/*
* Copyright (c) Huawei Technologies Co., Ltd. 2022-2022. All rights reserved.
* Description: h264 h265发送实现
*/

#ifndef IMAGE_SENDER_VIDEO_SENDER_H_
#define IMAGE_SENDER_VIDEO_SENDER_H_

#include <stdio.h>
#include "mdc/cam/camera/cameradecodedmbufserviceinterface_skeleton.h"
#include "util.h"
#include "image_sender_base.h"

#ifdef __cplusplus
extern "C"
{
#endif
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#ifdef __cplusplus
};
#endif
#define  PIX_FMT_YUV420P  AV_PIX_FMT_YUV420P
using namespace std;
using namespace cv;
using Skeleton = mdc::cam::camera::skeleton::CameraDecodedMbufServiceInterfaceSkeleton;
class VideoSender : public ImageSenderBase<Skeleton>
{
private:
    AVFormatContext *pFormatCtx { nullptr };
    int32_t i { 0 };
    int32_t videoindex { 0 };
    AVCodecContext *pCodecCtx { nullptr };
    AVCodec *pCodec { nullptr };
    AVFrame *pFrame { nullptr };
    AVFrame *pFrameYUV { nullptr };
    uint8_t *outBuffer { nullptr };
    AVPacket *packet { nullptr };
    int32_t ySize { 0 };
    int32_t ret { 0 };
    int32_t gotPicture { 0 };
    struct SwsContext *imgConvertCtx { nullptr };

    std::string sourceYuvPath { };
    FILE *fpYuv { nullptr };
    chrono::high_resolution_clock::time_point  sec { };//-t used
public:
    VideoSender() = default;
    VideoSender(uint32_t id, cv::Size s, std::string dir): ImageSenderBase<Skeleton>(id, s, dir)
    {
        sourceYuvPath = dir;
        if (dir.back() != '/') {
            sourceYuvPath += string("/") += "videoSource.yuv";
        } else {
            sourceYuvPath += "videoSource.yuv";
        }
#ifdef SAVE_SENDER_FILE
        fpYuv = fopen(sourceYuvPath.c_str(), "wb+");
#endif
        sec = chrono::high_resolution_clock::now();
    }
    ~VideoSender() = default;
    bool RegisterSender()
    {
        std::cout << "Begin register normal image sender." << endl;
        auto result = ImageSenderBase<Skeleton>::RegisterSender();
        std::cout << "Finished to register normal image sender." << endl;
        return result;
    }

    int32_t FfmpegInit(std::string videoPath)
    {
        av_register_all(); // 注册所有组件
        avformat_network_init(); // 初始化网络
        pFormatCtx = avformat_alloc_context(); // 初始化一个AVFormatContext
        if (avformat_open_input(&pFormatCtx, videoPath.c_str(), NULL, NULL) != 0) {  // 打开输入的视频文件
            printf("Couldn't open input stream.\n");
            return -1;
        }
        if (avformat_find_stream_info(pFormatCtx, NULL) < 0) { // 获取视频文件信息
            printf("Couldn't find stream information.\n");
            return -1;
        }
        videoindex = -1;
        for (i = 0; i < pFormatCtx->nb_streams; i++)
            if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
                videoindex = i;
                break;
            }
        if (videoindex == -1) {
            printf("Didn't find a video stream.\n");
            return -1;
        }
        pCodecCtx = pFormatCtx->streams[videoindex]->codec;
        pCodec = avcodec_find_decoder(pCodecCtx->codec_id); // 查找解码器
        printf("pCodecCtx->codec_id : %d\n", pCodecCtx->codec_id);
        printf("AV_CODEC_ID_H264:%d\n", AV_CODEC_ID_H264);
        printf("AV_CODEC_ID_H265:%d\n", AV_CODEC_ID_H265);
        if (pCodec == NULL) {
            printf("Codec not found.\n");
            return -1;
        }
        if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) { // 打开解码器
            printf("Could not open codec.\n");
            return -1;
        }
        pFrame = av_frame_alloc();
        pFrameYUV = av_frame_alloc();
        outBuffer = (uint8_t *)av_malloc(avpicture_get_size(PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));
        avpicture_fill((AVPicture *)pFrameYUV, outBuffer, PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
        packet = (AVPacket *)av_malloc(sizeof(AVPacket));
        printf("--------------- File Information ----------------\n");
        av_dump_format(pFormatCtx, 0, videoPath.c_str(), 0);
        printf("-------------------------------------------------\n");
        imgConvertCtx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
                                       pCodecCtx->width, pCodecCtx->height, PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
        return 0;
    }

    void FfmpegUninit()
    {
        sws_freeContext(imgConvertCtx);
        av_frame_free(&pFrameYUV);
        av_frame_free(&pFrame);
        avcodec_close(pCodecCtx);
        avformat_close_input(&pFormatCtx);
    }

    void VideoDecodedAndSend(std::string videoPath, uint32_t seq, void *flag)
    {
        std::string path = dir;
        std::string postfix = comPara.fileType;
        bool isLoop = comPara.loop;
        cout << "dir : " << dir << "\t" << "postfix :" << postfix << endl;
        std::vector<std::string> files;
        int32_t fileCnt = GetFilesBySort(path, files, comPara.fileType);
        if (fileCnt == 0) {
            cout << "PATH:" << path << " has no files!" << endl;
            return;
        }
        do {
            for (auto &file : files) {
                int32_t ret = FfmpegInit(file);
                if (ret != 0) {
                    std::cerr << "ffmpeg init err.";
                    return;
                }
                FillImageAndSend(file, seq, flag);
                FfmpegUninit();
            }
        } while (isLoop);
#ifdef SAVE_SENDER_FILE
        fclose(fpYuv);
#endif
    }

    void FillImageAndSend(std::string videoPath, uint32_t seq, void *flag) override
    {
        (void)flag;
        if (videoPath.empty()) {
            std::cerr << "File not exist,or corrupt.path:" << videoPath << std::endl;
            return ;
        }
        while (av_read_frame(pFormatCtx, packet) >= 0) { // 读取一帧压缩数据
            if (packet->stream_index == videoindex) {
                ret = avcodec_decode_video2(pCodecCtx, pFrame, &gotPicture, packet); // 解码一帧压缩数据
                if (ret < 0) {
                    printf("Decode Error.\n");
                    return ;
                }
                if (gotPicture) {
                    sws_scale(imgConvertCtx, (const uint8_t *const *)pFrame->data, pFrame->linesize, 0,
                              pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);
                    printf("Succeed to decode 1 frame!\n");
                    ySize = pCodecCtx->width * pCodecCtx->height;
#ifdef SAVE_SENDER_FILE
                    fwrite(pFrameYUV->data[0], 1, ySize, fpYuv);    // Y
                    fwrite(pFrameYUV->data[1], 1, ySize / 4, fpYuv);  // U
                    fwrite(pFrameYUV->data[2], 1, ySize / 4, fpYuv);  // V
#endif
                    auto imagePub = dataSkeleton->cameraDecodedMbufEvent.Allocate();
                    imagePub->CameraHeader.FrameId = frameID;
                    imagePub->CameraHeader.Seq = seq;
                    imagePub->Width = pCodecCtx->width;
                    imagePub->Height = pCodecCtx->height;
                    size_t sizeInBytes = pCodecCtx->width * pCodecCtx->height * 3 / 2;
                    imagePub->DataSize = sizeInBytes;
                    Mat YUV420NV12;
                    YUV420NV12.create(pCodecCtx->height * 1.5, pCodecCtx->width, CV_8UC1);
                    memcpy(YUV420NV12.data, pFrameYUV->data[0], ySize);
                    memcpy(YUV420NV12.data + ySize, pFrameYUV->data[1], ySize / 4);
                    memcpy(YUV420NV12.data + ySize * 5 / 4, pFrameYUV->data[2], ySize / 4);
                    imagePub->UdpData.assign(YUV420NV12.data, YUV420NV12.data + imagePub->DataSize);
                    if (comPara.time.empty()) {
                        timeval now;
                        gettimeofday(&now, NULL);
                        imagePub->CameraHeader.Stamp.Sec = now.tv_sec;
                        imagePub->CameraHeader.Stamp.Nsec = now.tv_usec * 1000U;
                    } else {
                        time_t timeStamp = ConvertTimeStr2TimeStamp(comPara.time);
                        auto t1 = chrono::high_resolution_clock::now();
                        int64_t duration = (t1 - sec).count() / 1000000000.0;
                        imagePub->CameraHeader.Stamp.Sec = timeStamp + duration;
                        printf("timeStamp=%ld\n", timeStamp + duration);
                    }
                    dataSkeleton->cameraDecodedMbufEvent.Send(move(imagePub));
                    cout << "Begin send image. seq:" << seq << " frameID:" << frameID << " path:" << videoPath << endl;
                    if (comPara.frequency != "") {
                        int32_t freq = atoi(comPara.frequency.c_str());
                        int32_t sleepTime = 1000000 / freq;
                        usleep(sleepTime);
                    } else {
                        usleep(1000000 / 30); // 30hz
                    }
                }
            }
            av_free_packet(packet);
        }
    }
};

#endif //IMAGE_SENDER_VIDEO_SENDER_H_
