/*
 * GStreamer ENRIGIN Hardware Decoder Plugin
 * Based on FFmpeg vsv decoder
 */

#ifndef PACKAGE
#define PACKAGE "enrigindec"
#endif

#include <gst/gst.h>
#include <gst/video/video.h>
#include <gst/video/gstvideodecoder.h>

extern "C" {
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libavutil/hwcontext.h>
#include <libavutil/imgutils.h>
#include <libavutil/opt.h>
#include <libavutil/dict.h>
}

#define GST_TYPE_ENRIGINDEC (gst_enrigindec_get_type())
#define GST_ENRIGINDEC(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_ENRIGINDEC, GstEnriginDec))
#define GST_ENRIGINDEC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_ENRIGINDEC, GstEnriginDecClass))

typedef struct _GstEnriginDec {
    GstVideoDecoder parent;
    
    // FFmpeg decoder context
    AVCodecContext *avctx;
    const AVCodec *decoder;
    AVBufferRef *hw_device_ctx;
    enum AVPixelFormat hw_pix_fmt;
    
    // Configuration
    gchar *codec_name;  // h264, hevc, vp9, mjpeg
    gint card_id;
    gint vpu_id;
    gchar *output_format;  // yuv420p, nv12
    gint buffer_type;      // 0=L3, 1=L4
    
    // State
    gboolean decoder_initialized;
    gint width;
    gint height;
    gint64 frame_count;
    
} GstEnriginDec;

typedef struct _GstEnriginDecClass {
    GstVideoDecoderClass parent_class;
} GstEnriginDecClass;

GType gst_enrigindec_get_type(void);

enum {
    PROP_0,
    PROP_CODEC,
    PROP_CARD_ID,
    PROP_VPU_ID,
    PROP_OUTPUT_FORMAT,
    PROP_BUFFER_TYPE,
};

static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE(
    "sink",
    GST_PAD_SINK,
    GST_PAD_ALWAYS,
    GST_STATIC_CAPS(
        "video/x-h264, stream-format=(string)byte-stream, alignment=(string)au; "
        "video/x-h265, stream-format=(string)byte-stream, alignment=(string)au; "
        "video/x-vp9; "
        "image/jpeg"
    )
);

static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE(
    "src",
    GST_PAD_SRC,
    GST_PAD_ALWAYS,
    GST_STATIC_CAPS(
        "video/x-raw, "
        "format = (string) { I420, NV12, BGR }, "
        "width = (int) [ 1, MAX ], "
        "height = (int) [ 1, MAX ], "
        "framerate = (fraction) [ 0/1, MAX ]"
    )
);

#define gst_enrigindec_parent_class parent_class
G_DEFINE_TYPE(GstEnriginDec, gst_enrigindec, GST_TYPE_VIDEO_DECODER);

/* Forward declarations */
static void gst_enrigindec_set_property(GObject *object, guint prop_id,
    const GValue *value, GParamSpec *pspec);
static void gst_enrigindec_get_property(GObject *object, guint prop_id,
    GValue *value, GParamSpec *pspec);
static void gst_enrigindec_finalize(GObject *object);
static gboolean gst_enrigindec_start(GstVideoDecoder *decoder);
static gboolean gst_enrigindec_stop(GstVideoDecoder *decoder);
static gboolean gst_enrigindec_set_format(GstVideoDecoder *decoder, GstVideoCodecState *state);
static GstFlowReturn gst_enrigindec_handle_frame(GstVideoDecoder *decoder, GstVideoCodecFrame *frame);
static gboolean gst_enrigindec_flush(GstVideoDecoder *decoder);

/* Helper functions */
static char* generate_vpu_parameters(int vid, int cid)
{
    char* buffer = (char*)g_malloc(100);
    g_snprintf(buffer, 100, "dec=/dev/ecu%dvid%d,enc=/dev/ecu%dvid%d,mem=/dev/ecu%d,mapped_io=1", 
               cid, vid, cid, vid, cid);
    return buffer;
}

static enum AVPixelFormat get_hw_format(AVCodecContext* ctx, const enum AVPixelFormat* pix_fmts)
{
    GstEnriginDec *dec = (GstEnriginDec*)ctx->opaque;
    const enum AVPixelFormat* p;
    
    for (p = pix_fmts; *p != -1; p++) {
        if (*p == dec->hw_pix_fmt) return *p;
    }
    
    GST_ERROR_OBJECT(dec, "Failed to get HW surface format");
    return AV_PIX_FMT_NONE;
}

static gboolean init_hw_decoder(GstEnriginDec *dec)
{
    int ret;
    char *vpu_device;
    AVDictionary *opts = NULL;
    enum AVHWDeviceType type;
    
    type = av_hwdevice_find_type_by_name("vsv");
    if (type == AV_HWDEVICE_TYPE_NONE) {
        GST_ERROR_OBJECT(dec, "VSV device type not supported");
        return FALSE;
    }
    
    // Generate VPU device parameters
    vpu_device = generate_vpu_parameters(dec->vpu_id, dec->card_id);
    ret = av_dict_parse_string(&opts, vpu_device, "=", ",", 0);
    g_free(vpu_device);
    
    if (ret < 0) {
        GST_ERROR_OBJECT(dec, "Failed to parse device options");
        return FALSE;
    }
    
    // Create hardware device context
    ret = av_hwdevice_ctx_create(&dec->hw_device_ctx, type, NULL, opts, 0);
    av_dict_free(&opts);
    
    if (ret < 0) {
        GST_ERROR_OBJECT(dec, "Failed to create HW device context");
        return FALSE;
    }
    
    dec->avctx->hw_device_ctx = av_buffer_ref(dec->hw_device_ctx);
    return TRUE;
}

static void gst_enrigindec_class_init(GstEnriginDecClass *klass)
{
    GObjectClass *gobject_class = G_OBJECT_CLASS(klass);
    GstElementClass *element_class = GST_ELEMENT_CLASS(klass);
    GstVideoDecoderClass *decoder_class = GST_VIDEO_DECODER_CLASS(klass);
    
    gobject_class->set_property = gst_enrigindec_set_property;
    gobject_class->get_property = gst_enrigindec_get_property;
    gobject_class->finalize = gst_enrigindec_finalize;
    
    g_object_class_install_property(gobject_class, PROP_CODEC,
        g_param_spec_string("codec", "Codec", "Codec type (h264/hevc/vp9/mjpeg)",
            "h264", (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
    
    g_object_class_install_property(gobject_class, PROP_CARD_ID,
        g_param_spec_int("card-id", "Card ID", "Card ID",
            0, 15, 0, (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
    
    g_object_class_install_property(gobject_class, PROP_VPU_ID,
        g_param_spec_int("vpu-id", "VPU ID", "VPU ID",
            0, 1, 0, (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
    
    g_object_class_install_property(gobject_class, PROP_OUTPUT_FORMAT,
        g_param_spec_string("output-format", "Output Format", "Output pixel format (yuv420p/nv12/bgr24)",
            "nv12", (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
    
    g_object_class_install_property(gobject_class, PROP_BUFFER_TYPE,
        g_param_spec_int("buffer-type", "Buffer Type", "Buffer type (0=L3, 1=L4)",
            0, 1, 1, (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
    
    gst_element_class_set_static_metadata(element_class,
        "ENRIGIN Hardware Video Decoder",
        "Codec/Decoder/Video",
        "Hardware accelerated video decoder using ENRIGIN VSV",
        "Cascade AI");
    
    gst_element_class_add_static_pad_template(element_class, &sink_template);
    gst_element_class_add_static_pad_template(element_class, &src_template);
    
    decoder_class->start = GST_DEBUG_FUNCPTR(gst_enrigindec_start);
    decoder_class->stop = GST_DEBUG_FUNCPTR(gst_enrigindec_stop);
    decoder_class->set_format = GST_DEBUG_FUNCPTR(gst_enrigindec_set_format);
    decoder_class->handle_frame = GST_DEBUG_FUNCPTR(gst_enrigindec_handle_frame);
    decoder_class->flush = GST_DEBUG_FUNCPTR(gst_enrigindec_flush);
}

static void gst_enrigindec_init(GstEnriginDec *dec)
{
    // Set VCD_SHARED_LIB environment variable early
    if (!getenv("VCD_SHARED_LIB")) {
        setenv("VCD_SHARED_LIB", "/opt/rivs/lib/lib_vcd.so", 0);
        GST_INFO_OBJECT(dec, "Set VCD_SHARED_LIB=/opt/rivs/lib/lib_vcd.so");
    }
    
    dec->codec_name = g_strdup("h264");
    dec->card_id = 0;
    dec->vpu_id = 0;
    dec->output_format = g_strdup("nv12");
    dec->buffer_type = 1;
    dec->decoder_initialized = FALSE;
    dec->frame_count = 0;
    dec->avctx = NULL;
    dec->decoder = NULL;
    dec->hw_device_ctx = NULL;
    
    gst_video_decoder_set_packetized(GST_VIDEO_DECODER(dec), TRUE);
}

static void gst_enrigindec_set_property(GObject *object, guint prop_id,
    const GValue *value, GParamSpec *pspec)
{
    GstEnriginDec *dec = GST_ENRIGINDEC(object);
    
    switch (prop_id) {
        case PROP_CODEC:
            g_free(dec->codec_name);
            dec->codec_name = g_value_dup_string(value);
            break;
        case PROP_CARD_ID:
            dec->card_id = g_value_get_int(value);
            break;
        case PROP_VPU_ID:
            dec->vpu_id = g_value_get_int(value);
            break;
        case PROP_OUTPUT_FORMAT:
            g_free(dec->output_format);
            dec->output_format = g_value_dup_string(value);
            break;
        case PROP_BUFFER_TYPE:
            dec->buffer_type = g_value_get_int(value);
            break;
        default:
            G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
            break;
    }
}

static void gst_enrigindec_get_property(GObject *object, guint prop_id,
    GValue *value, GParamSpec *pspec)
{
    GstEnriginDec *dec = GST_ENRIGINDEC(object);
    
    switch (prop_id) {
        case PROP_CODEC:
            g_value_set_string(value, dec->codec_name);
            break;
        case PROP_CARD_ID:
            g_value_set_int(value, dec->card_id);
            break;
        case PROP_VPU_ID:
            g_value_set_int(value, dec->vpu_id);
            break;
        case PROP_OUTPUT_FORMAT:
            g_value_set_string(value, dec->output_format);
            break;
        case PROP_BUFFER_TYPE:
            g_value_set_int(value, dec->buffer_type);
            break;
        default:
            G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
            break;
    }
}

static void gst_enrigindec_finalize(GObject *object)
{
    GstEnriginDec *dec = GST_ENRIGINDEC(object);
    
    g_free(dec->codec_name);
    g_free(dec->output_format);
    
    G_OBJECT_CLASS(parent_class)->finalize(object);
}

static gboolean gst_enrigindec_start(GstVideoDecoder *decoder)
{
    GstEnriginDec *dec = GST_ENRIGINDEC(decoder);
    dec->frame_count = 0;
    return TRUE;
}

static gboolean gst_enrigindec_stop(GstVideoDecoder *decoder)
{
    GstEnriginDec *dec = GST_ENRIGINDEC(decoder);
    
    GST_INFO_OBJECT(dec, "Stopping decoder, decoded %ld frames", dec->frame_count);
    
    if (dec->avctx) {
        avcodec_free_context(&dec->avctx);
        dec->avctx = NULL;
    }
    if (dec->hw_device_ctx) {
        av_buffer_unref(&dec->hw_device_ctx);
        dec->hw_device_ctx = NULL;
    }
    
    dec->decoder_initialized = FALSE;
    dec->frame_count = 0;
    
    return TRUE;
}

static gboolean gst_enrigindec_set_format(GstVideoDecoder *decoder, GstVideoCodecState *state)
{
    GstEnriginDec *dec = GST_ENRIGINDEC(decoder);
    GstStructure *structure;
    const gchar *mime_type;
    const char *decoder_name = NULL;
    int ret;
    
    structure = gst_caps_get_structure(state->caps, 0);
    mime_type = gst_structure_get_name(structure);
    
    GST_INFO_OBJECT(dec, "Setting format: %s", mime_type);
    
    // Determine decoder based on mime type
    if (g_str_has_prefix(mime_type, "video/x-h264")) {
        decoder_name = "h264_vsv_decoder";
    } else if (g_str_has_prefix(mime_type, "video/x-h265")) {
        decoder_name = "hevc_vsv_decoder";
    } else if (g_str_has_prefix(mime_type, "video/x-vp9")) {
        decoder_name = "vp9_vsv_decoder";
    } else if (g_str_has_prefix(mime_type, "image/jpeg")) {
        decoder_name = "jpeg_vsv_decoder";
    } else {
        GST_ERROR_OBJECT(dec, "Unsupported codec type: %s", mime_type);
        return FALSE;
    }
    
    dec->decoder = avcodec_find_decoder_by_name(decoder_name);
    if (!dec->decoder) {
        GST_ERROR_OBJECT(dec, "Decoder %s not found", decoder_name);
        return FALSE;
    }
    
    dec->avctx = avcodec_alloc_context3(dec->decoder);
    if (!dec->avctx) {
        GST_ERROR_OBJECT(dec, "Failed to allocate decoder context");
        return FALSE;
    }
    
    // Set decoder parameters from caps
    if (state->info.width > 0 && state->info.height > 0) {
        dec->width = state->info.width;
        dec->height = state->info.height;
        dec->avctx->width = dec->width;
        dec->avctx->height = dec->height;
    }
    
    // Find hardware config
    for (int i = 0;; i++) {
        const AVCodecHWConfig* config = avcodec_get_hw_config(dec->decoder, i);
        if (!config) {
            GST_ERROR_OBJECT(dec, "Decoder does not support VSV device");
            return FALSE;
        }
        
        if (config->methods & AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX &&
            config->device_type == AV_HWDEVICE_TYPE_VSV) {
            dec->hw_pix_fmt = config->pix_fmt;
            break;
        }
    }
    
    dec->avctx->get_format = get_hw_format;
    dec->avctx->opaque = dec;
    
    // Set buffer type option
    char buffer_type_str[8];
    g_snprintf(buffer_type_str, sizeof(buffer_type_str), "%d", dec->buffer_type);
    av_opt_set(dec->avctx->priv_data, "buffer_type", buffer_type_str, 0);
    
    // Initialize hardware decoder
    if (!init_hw_decoder(dec)) {
        return FALSE;
    }
    
    // Set output format AFTER hw_device_ctx is set, BEFORE avcodec_open2
    // This allows the decoder to output directly in the requested format
    if (g_strcmp0(dec->output_format, "yuv420p") == 0) {
        dec->avctx->pix_fmt = AV_PIX_FMT_YUV420P;
    } else if (g_strcmp0(dec->output_format, "bgr24") == 0) {
        dec->avctx->pix_fmt = AV_PIX_FMT_BGR24;
    } else {
        dec->avctx->pix_fmt = AV_PIX_FMT_NV12;
    }
    
    // Open decoder
    ret = avcodec_open2(dec->avctx, dec->decoder, NULL);
    if (ret < 0) {
        GST_ERROR_OBJECT(dec, "Failed to open decoder");
        return FALSE;
    }
    
    dec->decoder_initialized = TRUE;
    
    // Set output state
    GstVideoCodecState *output_state;
    GstVideoFormat format;
    if (dec->avctx->pix_fmt == AV_PIX_FMT_YUV420P) {
        format = GST_VIDEO_FORMAT_I420;
    } else if (dec->avctx->pix_fmt == AV_PIX_FMT_BGR24) {
        format = GST_VIDEO_FORMAT_BGR;
    } else {
        format = GST_VIDEO_FORMAT_NV12;
    }
    
    output_state = gst_video_decoder_set_output_state(decoder, format,
                                                       dec->width, dec->height, state);
    gst_video_codec_state_unref(output_state);
    
    GST_INFO_OBJECT(dec, "Decoder initialized: %dx%d, format=%s", 
                    dec->width, dec->height, dec->output_format);
    
    return TRUE;
}

static GstFlowReturn gst_enrigindec_handle_frame(GstVideoDecoder *decoder, GstVideoCodecFrame *frame)
{
    GstEnriginDec *dec = GST_ENRIGINDEC(decoder);
    GstMapInfo map;
    AVPacket *pkt = NULL;
    AVFrame *hw_frame = NULL;
    AVFrame *sw_frame = NULL;
    int ret;
    
    if (!gst_buffer_map(frame->input_buffer, &map, GST_MAP_READ)) {
        gst_video_codec_frame_unref(frame);
        return GST_FLOW_ERROR;
    }
    
    // Create AVPacket
    pkt = av_packet_alloc();
    if (!pkt) {
        gst_buffer_unmap(frame->input_buffer, &map);
        gst_video_codec_frame_unref(frame);
        return GST_FLOW_ERROR;
    }
    
    pkt->data = map.data;
    pkt->size = map.size;
    
    // Send packet to decoder
    ret = avcodec_send_packet(dec->avctx, pkt);
    gst_buffer_unmap(frame->input_buffer, &map);
    av_packet_free(&pkt);
    
    if (ret < 0) {
        GST_ERROR_OBJECT(dec, "Error sending packet to decoder");
        gst_video_codec_frame_unref(frame);
        return GST_FLOW_ERROR;
    }
    
    // Receive decoded frame
    hw_frame = av_frame_alloc();
    sw_frame = av_frame_alloc();
    
    if (!hw_frame || !sw_frame) {
        if (hw_frame) av_frame_free(&hw_frame);
        if (sw_frame) av_frame_free(&sw_frame);
        gst_video_codec_frame_unref(frame);
        return GST_FLOW_ERROR;
    }
    
    ret = avcodec_receive_frame(dec->avctx, hw_frame);
    if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
        av_frame_free(&hw_frame);
        av_frame_free(&sw_frame);
        gst_video_codec_frame_unref(frame);
        return GST_FLOW_OK;
    } else if (ret < 0) {
        GST_ERROR_OBJECT(dec, "Error receiving frame from decoder");
        av_frame_free(&hw_frame);
        av_frame_free(&sw_frame);
        gst_video_codec_frame_unref(frame);
        return GST_FLOW_ERROR;
    }
    
    // Transfer data from GPU to CPU
    AVFrame *output_frame = hw_frame;
    
    if (hw_frame->format == dec->hw_pix_fmt) {
        ret = av_hwframe_transfer_data(sw_frame, hw_frame, 0);
        if (ret < 0) {
            GST_ERROR_OBJECT(dec, "Error transferring frame data");
            av_frame_free(&hw_frame);
            av_frame_free(&sw_frame);
            gst_video_codec_frame_unref(frame);
            return GST_FLOW_ERROR;
        }
        output_frame = sw_frame;
    }
    
    // Create output buffer
    gsize buffer_size = av_image_get_buffer_size((AVPixelFormat)output_frame->format,
                                                  output_frame->width, output_frame->height, 1);
    GstBuffer *out_buf = gst_buffer_new_allocate(NULL, buffer_size, NULL);
    
    GstMapInfo out_map;
    if (gst_buffer_map(out_buf, &out_map, GST_MAP_WRITE)) {
        av_image_copy_to_buffer(out_map.data, out_map.size,
                               (const uint8_t * const *)output_frame->data,
                               (const int *)output_frame->linesize,
                               (AVPixelFormat)output_frame->format,
                               output_frame->width, output_frame->height, 1);
        gst_buffer_unmap(out_buf, &out_map);
    }
    
    frame->output_buffer = out_buf;
    dec->frame_count++;
    
    av_frame_free(&hw_frame);
    av_frame_free(&sw_frame);
    
    return gst_video_decoder_finish_frame(decoder, frame);
}

static gboolean gst_enrigindec_flush(GstVideoDecoder *decoder)
{
    GstEnriginDec *dec = GST_ENRIGINDEC(decoder);
    
    if (dec->avctx) {
        avcodec_flush_buffers(dec->avctx);
    }
    
    return TRUE;
}

static gboolean plugin_init(GstPlugin *plugin)
{
    return gst_element_register(plugin, "enrigindec", GST_RANK_PRIMARY, GST_TYPE_ENRIGINDEC);
}

GST_PLUGIN_DEFINE(
    GST_VERSION_MAJOR,
    GST_VERSION_MINOR,
    enrigindec,
    "ENRIGIN Hardware Video Decoder",
    plugin_init,
    "1.0",
    "LGPL",
    "GStreamer",
    "http://gstreamer.net/"
)
