﻿// ref:https://github.com/FFmpeg/FFmpeg/blob/master/doc/examples/hw_decode.c
// ref: https://github.com/chinahbcq/ffmpeg_hw_decode
// ref: https://www.jianshu.com/p/3ea9ef713211

#include "decode_video.h"

#include <stdio.h>
#include <opencv2/core.hpp>
#include <opencv2/highgui.hpp>

extern "C" {
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libavutil/hwcontext.h>
#include <libavutil/imgutils.h>
#include <libswscale/swscale.h>
#include <libavutil/opt.h>
}

static AVFormatContext *s_input_ctx = NULL;
static AVCodecContext *s_decoder_ctx = NULL;
static int s_video_stream = -1;
static bool s_is_init = false;

static volatile int DISTURBE_SIGNALS = 0;
static int decode_interrupt_cb(void *ctx)
{
    return DISTURBE_SIGNALS > 0;
}
static const AVIOInterruptCB int_cb = { decode_interrupt_cb, NULL };

static AVBufferRef *s_hw_device_ctx = NULL;
static AVPixelFormat s_hw_pix_fmt;

static AVPixelFormat s_pixel_format = AV_PIX_FMT_BGR24;  // AV_PIX_FMT_BGR24对应opencv中的CV_8UC3
static int s_thread_count = 4;
static const char* s_hwdevice_name = "cuda"; // 默认使用cuda

static int hw_decoder_init(AVCodecContext *ctx, const AVHWDeviceType type)
{
    int err = 0;

    if ((err = av_hwdevice_ctx_create(&s_hw_device_ctx, type,
        NULL, NULL, 0)) < 0) 
    {
        fprintf(stderr, "Failed to create specified HW device.\n");
        return err;
    }
    ctx->hw_device_ctx = av_buffer_ref(s_hw_device_ctx);

    return err;
}

static AVPixelFormat get_hw_format(AVCodecContext *ctx, const AVPixelFormat *pix_fmts)
{
    const AVPixelFormat *p;

    for (p = pix_fmts; *p != -1; p++)
    {
        if (*p == s_hw_pix_fmt)
            return *p;
    }

    fprintf(stderr, "Failed to get HW surface format.\n");
    return AV_PIX_FMT_NONE;
}

///////////////////////////////////////////
int init_ctx(const char* url, bool use_hw_decode)
{
    if (s_is_init)
        return 0;

    int ret = -1;

    s_input_ctx = avformat_alloc_context();
    if (!s_input_ctx)
    {
        ret = AVERROR(ENOMEM);
        return -1;
    }
    // 设置input_ctx的初始化参数
    s_input_ctx->video_codec_id = AV_CODEC_ID_NONE;
    s_input_ctx->audio_codec_id = AV_CODEC_ID_NONE;
    s_input_ctx->flags |= AVFMT_FLAG_NONBLOCK;
    s_input_ctx->interrupt_callback = int_cb;

    AVHWDeviceType type;
    if (use_hw_decode)
    {
        type = av_hwdevice_find_type_by_name(s_hwdevice_name);
        if (type == AV_HWDEVICE_TYPE_NONE)
        {
            fprintf(stderr, "Device type %s is not supported.\n", s_hwdevice_name);
            fprintf(stderr, "Available device types:");
            while ((type = av_hwdevice_iterate_types(type)) != AV_HWDEVICE_TYPE_NONE)
                fprintf(stderr, " %s", av_hwdevice_get_type_name(type));
            fprintf(stderr, "\n");
            return -1;
        }
    }

    // open the input file 
    if (avformat_open_input(&s_input_ctx, url, NULL, NULL) != 0)
    {
        fprintf(stderr, "Cannot open input file '%s'\n", url);
        return -1;
    }

    if (avformat_find_stream_info(s_input_ctx, NULL) < 0)
    {
        fprintf(stderr, "Cannot find input stream information.\n");
        return -1;
    }

    // find the video stream information 
    AVCodec *decoder = NULL;
    ret = av_find_best_stream(s_input_ctx, AVMEDIA_TYPE_VIDEO, -1, -1, &decoder, 0);
    if (ret < 0)
    {
        fprintf(stderr, "Cannot find a video stream in the input file\n");
        return -1;
    }
    s_video_stream = ret;
    AVCodecParameters *codecpar = s_input_ctx->streams[s_video_stream]->codecpar;

    if (use_hw_decode)
    {
        for (int i = 0;; i++)
        {
            const AVCodecHWConfig *config = avcodec_get_hw_config(decoder, i);
            if (!config)
            {
                fprintf(stderr, "Decoder %s does not support device type %s.\n", decoder->name, av_hwdevice_get_type_name(type));
                return -1;
            }
            if (config->methods & AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX && config->device_type == type)
            {
                s_hw_pix_fmt = config->pix_fmt;
                break;
            }
        }
    }
    else
    {
        decoder = avcodec_find_decoder(codecpar->codec_id);
        if (!decoder)
        {
            fprintf(stderr, "Codec not found\n");
            return -1;
        }
    }

    // duration
    int64_t duration = s_input_ctx->duration / AV_TIME_BASE;
    // fps
    int fps_num = s_input_ctx->streams[s_video_stream]->r_frame_rate.num;
    int fps_den = s_input_ctx->streams[s_video_stream]->r_frame_rate.den;
    double fps = 0.0;
    if (fps_den > 0)
    {
        fps = fps_num / fps_den;
    }
    av_log(NULL, AV_LOG_INFO, "duration:%ld,\nfps:%f,\n", duration, fps);

    s_decoder_ctx = avcodec_alloc_context3(decoder);
    if (!s_decoder_ctx)
    {
        fprintf(stderr, "Could not allocate video codec context\n");
        return -1;
    }

    if (use_hw_decode)
    {
        if (avcodec_parameters_to_context(s_decoder_ctx, codecpar) < 0)
            return -1;

        s_decoder_ctx->get_format = get_hw_format;
        // 硬解码不需要赋值,从硬件读取, AV_PIX_FMT_NV12;
        s_decoder_ctx->pix_fmt = AV_PIX_FMT_NV12;

        if (hw_decoder_init(s_decoder_ctx, type) < 0)
        {
            fprintf(stderr, "hw decoder init failed\n");
            return -1;
        }
    }
    else
    {
        // 软编码需要赋值
        // FIX: FFmpeg deprecated pixel format used, ref:https://blog.csdn.net/xionglifei2014/article/details/90710797
        switch (s_decoder_ctx->pix_fmt) {
        case AV_PIX_FMT_YUVJ420P:
            s_decoder_ctx->pix_fmt = AV_PIX_FMT_YUV420P;
            break;
        case AV_PIX_FMT_YUVJ422P:
            s_decoder_ctx->pix_fmt = AV_PIX_FMT_YUV422P;
            break;
        case AV_PIX_FMT_YUVJ444P:
            s_decoder_ctx->pix_fmt = AV_PIX_FMT_YUV444P;
            break;
        case AV_PIX_FMT_YUVJ440P:
            s_decoder_ctx->pix_fmt = AV_PIX_FMT_YUV440P;
            break;
        default:
            s_decoder_ctx->pix_fmt = AVPixelFormat(codecpar->format);
        }

        s_decoder_ctx->height = codecpar->height;
        s_decoder_ctx->width = codecpar->width;
        s_decoder_ctx->thread_count = s_thread_count; // 设置解码线程数目
        s_decoder_ctx->thread_type = FF_THREAD_FRAME; // 设置解码type
    }

    av_opt_set_int(s_decoder_ctx, "refcounted_frames", 1, 0);

    if ((ret = avcodec_open2(s_decoder_ctx, decoder, NULL)) < 0)
    {
        fprintf(stderr, "Failed to open codec for stream #%u\n", s_video_stream);
        return -1;
    }
    
    av_log(NULL, AV_LOG_INFO, "codecpar->codec_id:%d,\ndecoder_ctx->codec_id:%d,\n", codecpar->codec_id, s_decoder_ctx->codec_id);
    av_log(NULL, AV_LOG_INFO, "codecpar->format:%d,\ndecoder_ctx->pix_fmt:%d,\nformat:%d,\n", codecpar->format, s_decoder_ctx->pix_fmt, s_pixel_format);

    s_is_init = true;
    return 0;
}


void clear_ctx()
{
    if (s_is_init)
    {
        avcodec_close(s_decoder_ctx);
        av_freep(&s_decoder_ctx);
        avformat_close_input(&s_input_ctx);
        s_video_stream = -1;
        s_is_init = false;
    }
}


bool support_hwdevice()
{
    AVHWDeviceType type;
    type = av_hwdevice_find_type_by_name(s_hwdevice_name);
    if (type == AV_HWDEVICE_TYPE_NONE)
    {
        fprintf(stderr, "Device type %s is not supported.\n", s_hwdevice_name);
        fprintf(stderr, "Available device types:");
        while ((type = av_hwdevice_iterate_types(type)) != AV_HWDEVICE_TYPE_NONE)
            fprintf(stderr, " %s", av_hwdevice_get_type_name(type));
        fprintf(stderr, "\n");
        return false;
    }
    return true;
}

int ffmpeg_video_decode(const char* url, void(*frame_callback)(AVFrame *frame_bgr), bool use_hw_decode, bool only_key_frame)
{
    av_log(NULL, AV_LOG_INFO, "stream path:%s, \nuse_hw_decode:%s, \nonly_key_frame:%s,\n", 
        url, use_hw_decode ? "true" : "false", only_key_frame ? "true" : "false");
    

    if (0 != init_ctx(url, use_hw_decode))
    {
        fprintf(stderr, "init failed!\n");
        return -1;
    }

    // 视频流中解码的frame
    AVFrame *frame = av_frame_alloc();
    AVFrame *frame_bgr = av_frame_alloc();
    AVFrame *sw_frame = NULL;
    SwsContext *sws_ctx = NULL;
    AVPacket packet;
    uint8_t *buffer = NULL;
    int buffer_size;
    int ret = -1;
    int err = -1;
    AVFrame *tmp_frame = NULL;

    if (use_hw_decode)
        sw_frame = av_frame_alloc(); // transfer frame data from GPU to CPU
     
    int align = 1; // 字节对齐的方式
    av_log(NULL, AV_LOG_INFO, "width:%d,\nheight:%d,\n", s_decoder_ctx->width, s_decoder_ctx->height);
    // format ：缓冲区格式, align：字节对齐的方式(通用设置：1)
    buffer_size = av_image_get_buffer_size(s_pixel_format, s_decoder_ctx->width, s_decoder_ctx->height, align);
    buffer = (uint8_t*)av_malloc(buffer_size);
    if (!buffer)
    {
        fprintf(stderr, "Can not alloc buffer\n");
        ret = AVERROR(ENOMEM);
        return -4;
    }
    av_image_fill_arrays(frame_bgr->data, frame_bgr->linesize, buffer, s_pixel_format, s_decoder_ctx->width, s_decoder_ctx->height, align);

    // 初始化图像pixer format进行转换的context	
    sws_ctx = sws_getContext(s_decoder_ctx->width, s_decoder_ctx->height, s_decoder_ctx->pix_fmt,
        s_decoder_ctx->width, s_decoder_ctx->height, s_pixel_format, SWS_FAST_BILINEAR, NULL, NULL, NULL);

    // 开始解码
    while (av_read_frame(s_input_ctx, &packet) >= 0) 
    {
        if (packet.stream_index != s_video_stream || (only_key_frame && !(packet.flags & AV_PKT_FLAG_KEY)) || // 不是关键帧就会跳过
            packet.size < 1) 
        {
            goto discard_packet;
        }

        // 解码packet
        err = avcodec_send_packet(s_decoder_ctx, &packet);
        if (err != AVERROR(EAGAIN) && err != AVERROR_EOF && err < 0) 
        {
            fprintf(stderr, "Error during decoding\n");
            goto discard_packet;
        }
        err = avcodec_receive_frame(s_decoder_ctx, frame);
        if (err == AVERROR(EAGAIN))
        {
            goto discard_packet;
        }
        else if (err == AVERROR_EOF) 
        {
            av_packet_unref(&packet);
            break;
        }
        else if (err < 0)
        {
            fprintf(stderr, "Error while decoding\n");
            goto discard_packet;
        }

        if (use_hw_decode)
        {
            if (frame->format == s_hw_pix_fmt)
            {
                // retrieve data from GPU to CPU 
                if ((ret = av_hwframe_transfer_data(sw_frame, frame, 0)) < 0)
                {
                    fprintf(stderr, "Error transferring the data to system memory\n");
                    goto discard_packet;
                }
                tmp_frame = sw_frame;
            }
            else
                tmp_frame = frame;
        }
        else
        { 
            tmp_frame = frame;
        }

        memset(buffer, 0, buffer_size);
        sws_scale(sws_ctx, tmp_frame->data, tmp_frame->linesize, 0, tmp_frame->height, frame_bgr->data, frame_bgr->linesize);
        frame_bgr->height = tmp_frame->height;
        frame_bgr->width = tmp_frame->width;

        frame_callback(frame_bgr); // execute callback function

    discard_packet:
        av_frame_unref(frame);
        if (use_hw_decode)
            av_frame_unref(sw_frame);
        av_packet_unref(&packet);
    }

    // clear
    sws_freeContext(sws_ctx);
    av_frame_unref(frame);
    av_frame_unref(frame_bgr);
    av_freep(&frame);
    av_freep(&frame_bgr);
    av_freep(&buffer);
    clear_ctx();

    return 0;
}
