/*
 * Copyright (c) Huawei Technologies Co., Ltd. 2022-2022. All rights reserved.
 * Description: h264 h265发送实现
 */

#ifndef IMAGE_SENDER_VIDEO_SENDER_H
#define IMAGE_SENDER_VIDEO_SENDER_H

#include <cstdio>
#include "mdc/cam/camera/cameradecodedmbufserviceinterface_skeleton.h"
#include "util.h"
#include "image_sender_base.h"

#ifdef __cplusplus
extern "C" {
#endif
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#ifdef __cplusplus
};
#endif
#define PIX_FMT_YUV420P AV_PIX_FMT_YUV420P
using Skeleton = mdc::cam::camera::skeleton::CameraDecodedMbufServiceInterfaceSkeleton;

class VideoSender : public ImageSenderBase<Skeleton> {
public:
    VideoSender() = default;
    VideoSender(uint32_t id, cv::Size s, std::string dir) : ImageSenderBase<Skeleton>(id, s, dir)
    {
        sourceYuvPath = dir;
        if (dir.back() != '/') {
            sourceYuvPath += std::string("/") += "videoSource.yuv";
        } else {
            sourceYuvPath += "videoSource.yuv";
        }
#ifdef SAVE_SENDER_FILE
        fpYuv = fopen(sourceYuvPath.c_str(), "wb+");
#endif
        sec = std::chrono::high_resolution_clock::now();
    }
    ~VideoSender() = default;
    bool RegisterSender()
    {
        std::cout << "Begin register normal image sender." << std::endl;
        auto result = ImageSenderBase<Skeleton>::RegisterSender();
        std::cout << "Finished to register normal image sender." << std::endl;
        return result;
    }

    int32_t FfmpegInit(std::string videoPath)
    {
        av_register_all();                                                          // 注册所有组件
        avformat_network_init();                                                    // 初始化网络
        pFormatCtx = avformat_alloc_context();                                      // 初始化一个AVFormatContext
        if (avformat_open_input(&pFormatCtx, videoPath.c_str(), NULL, NULL) != 0) { // 打开输入的视频文件
            printf("Couldn't open input stream.\n");
            return -1;
        }
        if (avformat_find_stream_info(pFormatCtx, NULL) < 0) { // 获取视频文件信息
            printf("Couldn't find stream information.\n");
            return -1;
        }
        videoindex = -1;
        for (i = 0; i < pFormatCtx->nb_streams; i++)
            if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
                videoindex = i;
                break;
            }
        if (1 == SwitchFlag) {
            videoindex = -1;
        }
        if (videoindex == -1) {
            return -1;
        }
        pCodecCtx = pFormatCtx->streams[videoindex]->codec;
        pCodec = avcodec_find_decoder(pCodecCtx->codec_id); // 查找解码器
        printf("pCodecCtx->codec_id : %d\n", pCodecCtx->codec_id);
        printf("AV_CODEC_ID_H264:%d\n,  AV_CODEC_ID_H265:%d\n", AV_CODEC_ID_H264, AV_CODEC_ID_H265);
        const int ciTWO = 2;
        if (ciTWO == SwitchFlag) {
            pCodec = NULL;
        }
        if (pCodec == NULL) {
            printf("Codec not found.\n");
            return -1;
        }
        if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) { // 打开解码器
            printf("Could not open codec.\n");
            return -1;
        }
        pFrame = av_frame_alloc();
        pFrameYUV = av_frame_alloc();
        outBuffer = (uint8_t *)av_malloc(avpicture_get_size(PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));
        avpicture_fill((AVPicture *)pFrameYUV, outBuffer, PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
        packet = (AVPacket *)av_malloc(sizeof(AVPacket));
        printf("--------------- File Information ----------------\n");
        av_dump_format(pFormatCtx, 0, videoPath.c_str(), 0);
        printf("-------------------------------------------------\n");
        imgConvertCtx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width,
            pCodecCtx->height, PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
        return 0;
    }

    void FfmpegUninit()
    {
        sws_freeContext(imgConvertCtx);
        av_frame_free(&pFrameYUV);
        av_frame_free(&pFrame);
        avcodec_close(pCodecCtx);
        avformat_close_input(&pFormatCtx);
    }

    int retVideoSender1 = -1;
    void VideoDecodedAndSend(std::string videoPath, uint32_t seq, int32_t *flag)
    {
        std::string path = dir;
        std::string postfix = g_comPara.fileType;
        bool isLoop = g_comPara.loop;
        std::cout << "dir : " << dir << "\t" <<
            "postfix : " << postfix << std::endl;
        std::vector<std::string> files;
        int32_t fileCnt = GetFilesBySort(path, files, g_comPara.fileType);
        if (fileCnt == 0) {
            std::cout << "PATH:" << path << " has no files!" << std::endl;
            retVideoSender1 = -1;
            return;
        }
        do {
            for (auto &file : files) {
                int32_t ret = FfmpegInit(file);
                if (ret != 0) {
                    std::cerr << "ffmpeg init err.";
                    retVideoSender1 = -1;
                    return;
                }
                FillImageAndSend(file, seq, flag);
                FfmpegUninit();
            }
        } while (isLoop);
#ifdef SAVE_SENDER_FILE
        if (fclose(fpYuv) == -1) {
            retVideoSender1 = -1;
            return;
        }
#endif
        retVideoSender1 = 0;
    }

    int retVideoSender2 = -1;
    void FillImageAndSend(std::string videoPath, uint32_t seq, int32_t *flag) override
    {
        (void)flag;
        if (videoPath.empty()) {
            std::cerr << "File not exist,or corrupt.path:" << videoPath << std::endl;
            retVideoSender2 = -1;
            return;
        }
        while (av_read_frame(pFormatCtx, packet) >= 0) { // 读取一帧压缩数据
            if (packet->stream_index == videoindex) {
                ret = avcodec_decode_video2(pCodecCtx, pFrame, &gotPicture, packet); // 解码一帧压缩数据
                if (ret < 0) {
                    printf("Decode Error.\n");
                    retVideoSender2 = -1;
                    return;
                }
                HanderImageAndSend(videoPath, seq);
            }
            av_free_packet(packet);
        }
        retVideoSender2 = 0;
    }

private:
    void HandleFrequency()
    {
        const int32_t sleepTimeIntervalMs = 1000000;
        if (g_comPara.frequency != "") {
            int32_t freq = atoi(g_comPara.frequency.c_str());
            if (freq != 0) {
                int32_t sleepTime = sleepTimeIntervalMs / freq;
                usleep(sleepTime);
            }
        } else {
            usleep(sleepTimeIntervalMs / 30); // 30hz
        }
    }

    void HanderImageAndSend(std::string videoPath, uint32_t seq)
    {
        if (gotPicture) {
            sws_scale(imgConvertCtx, (const uint8_t *const *)pFrame->data, pFrame->linesize, 0,
                pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);
            ySize = pCodecCtx->width * pCodecCtx->height;
#ifdef SAVE_SENDER_FILE
            fwrite(pFrameYUV->data[0], 1, ySize, fpYuv);    // Y
            fwrite(pFrameYUV->data[1], 1, ySize / intSize, fpYuv);  // U
            fwrite(pFrameYUV->data[frameDataYuvIndex], 1, ySize / intSize, fpYuv);  // V
#endif
            auto imagePub = dataSkeleton->cameraDecodedMbufEvent.Allocate();
            imagePub->CameraHeader.FrameId = frameID;
            imagePub->CameraHeader.Seq = seq;
            imagePub->Width = pCodecCtx->width;
            imagePub->Height = pCodecCtx->height;
            size_t sizeInBytes = pCodecCtx->width * pCodecCtx->height * 3 / 2;
            imagePub->DataSize = sizeInBytes;
            cv::Mat YUV420NV12;
            // yuv w:h compare - 1.5
            YUV420NV12.create(pCodecCtx->height * 1.5, pCodecCtx->width, CV_8UC1);
            memcpy_s(YUV420NV12.data, ySize, pFrameYUV->data[0], ySize);
            memcpy_s(YUV420NV12.data + ySize, ySize / intSize, pFrameYUV->data[1], ySize / intSize);
            memcpy_s(YUV420NV12.data + ySize * frameDataYuvSize / intSize, ySize / intSize,
                pFrameYUV->data[frameDataYuvIndex], ySize / intSize);
            imagePub->UdpData.assign(YUV420NV12.data, YUV420NV12.data + imagePub->DataSize);
            if (g_comPara.time.empty()) {
                timeval now;
                gettimeofday(&now, NULL);
                imagePub->CameraHeader.Stamp.Sec = now.tv_sec;
                imagePub->CameraHeader.Stamp.Nsec = now.tv_usec * 1000U;
            } else {
                time_t timeStamp = ConvertTimeStr2TimeStamp(g_comPara.time);
                auto t1 = std::chrono::high_resolution_clock::now();
                int64_t duration = (t1 - sec).count() / 1000000000.0;
                imagePub->CameraHeader.Stamp.Sec = timeStamp + duration;
            }
            dataSkeleton->cameraDecodedMbufEvent.Send(move(imagePub));
            HandleFrequency();
        }
    }

private:
    AVFormatContext *pFormatCtx { nullptr };
    int32_t i { 0 };
    int32_t videoindex { 0 };
    AVCodecContext *pCodecCtx { nullptr };
    AVCodec *pCodec { nullptr };
    AVFrame *pFrame { nullptr };
    AVFrame *pFrameYUV { nullptr };
    uint8_t *outBuffer { nullptr };
    AVPacket *packet { nullptr };
    int32_t ySize { 0 };
    int32_t ret { 0 };
    int32_t gotPicture { 0 };
    struct SwsContext *imgConvertCtx { nullptr };

    std::string sourceYuvPath {};
    FILE *fpYuv { nullptr };
    std::chrono::high_resolution_clock::time_point sec {}; // -t used
    const int32_t intSize { 4 };
    const int32_t frameDataYuvIndex { 2 };
    const int32_t frameDataYuvSize { 5 };
};

#endif // IMAGE_SENDER_VIDEO_SENDER_H
