﻿#ifndef C_REMOTECAMERA_H
#define C_REMOTECAMERA_H

#include "../../includes/common/common.h"

#ifdef ENABLE_REMOTECAMERA

/**
 * 编译说明
 *
 * ffmpeg安装：https://github.com/BtbN/FFmpeg-Builds/releases/ffmpeg-n5.0-latest-win64-lgpl-5.0.zip
 * opencv下载地址：https://opencv.org/releases/
 * darknet下载地址：https://github.com/AlexeyAB/darknet
 */

/// 1. FFMpeg头文件
#ifdef __cplusplus             //告诉编译器，这部分代码按C语言的格式进行编译，而不是C++的
extern "C"{
#endif

#include <libavcodec/avcodec.h>
#include <libavutil/channel_layout.h>
#include <libavutil/common.h>
#include <libavutil/frame.h>
#include <libavutil/samplefmt.h>
#include <libavutil/opt.h>
#include <libavutil/imgutils.h>
#include <libavutil/parseutils.h>
#include <libavutil/error.h>
#include <libavutil/buffer.h>
#include <libavutil/hwcontext.h>
#include <libavutil/hwcontext_qsv.h>
#include <libavutil/mem.h>
#include <libswscale/swscale.h>
#include <libavformat/avformat.h>

#ifdef __cplusplus
}
#endif

#ifdef REMOTECAMERA_ENABLE_DARKNET
#define GPU
#define OPENCV
#define HAVE_STRUCT_TIMESPEC
#include <opencv2/opencv.hpp>
#include <include/yolo_v2_class.hpp>
#include <include/darknet.h>
#endif

#include <QObject>
#include <QThread>
#include <QThreadPool>
#include <QMutex>
#include <QImage>
#include <QSize>
#include <QList>
#include <QHash>
#include <QVector>

#include <QOpenGLWidget>
#include <QOpenGLShaderProgram>
#include <QOpenGLFunctions>
#include <QOpenGLTexture>

#include "../../includes/common/NedAllocatedObject.h"
#include "../../includes/common/singleton.h"
#include "../../includes/common/NedAllocatorImpl.h"

#ifdef REMOTECAMERA_ENABLE_DARKNET
struct tagVideoCaptureBbox
{
    tagVideoCaptureBbox()
        : curTime(0),curMaxTime(60000) {}
    tagVideoCaptureBbox(quint64 maxtime)
        : curTime(0),curMaxTime(maxtime) {}

    quint64 curTime;
    quint64 curMaxTime;
    std::vector<bbox_t> bboxs;
};
#endif

class CVideoCapture : public QThread ,public NedAllocatedObject
{
    Q_OBJECT

public:
    explicit CVideoCapture(void);
    ~CVideoCapture();

    /// 打开一个网络摄像头
    bool openUrl(QString url,bool isHarddecoding=true);
    /// 设置是否退出程序
    inline void setIsExit(bool isexit) { m_isExit = isexit; }
    /// 设置是否开始播放视频
    inline void setIsPlaying(bool isplaying) { m_isPlaying = isplaying; }
    /// 得到是否开始播放视频
    inline bool isPlaying(void) { return m_isPlaying; }
    /// 得到视频高宽
    inline QSize getVideoSize(void) { return m_VideoSize; }
    /// 设置是否进行流推送
    inline void setIsRtmpSend(bool isSend) { m_isRtmpSend = isSend; }
    /// 得到是否进行流推送
    inline bool isRtmpSend(void) { return m_isRtmpSend; }
    /// 设置是否显示视频帧
    inline void setDisplayFrame(bool isShow) { m_isDisplayFrame = isShow; }
    /// 得到是否显示视频帧
    inline bool isDisplayFrame(void) { return m_isDisplayFrame; }

private:
    /// 初始化硬解码
    bool InitHarddecoding(QString url);
    /// 初始化软解码
    bool InitSoftdecoding(QString url);

    static enum AVPixelFormat get_hw_format(AVCodecContext *ctx,
                                            const enum AVPixelFormat *pix_fmts);
    static int hw_decoder_init(AVCodecContext *ctx, const enum AVHWDeviceType type);

signals:
    /// 处理视频帧
    void signal_processVideoFrame(uint8_t *outbuffer,int videoW,int videoH);
    /// rtmp流推送
    void signal_processVideoRtmpSend(AVFrame *pFrame);

protected:
    virtual void run();

private:
    bool m_isExit;                       // 是否退出线程
    bool m_isPlaying;                    // 是否开始播放

    AVFormatContext *pAVFormatContext;
    AVCodecContext  *pAVCodecContext;
    const AVCodec   *pAVCodec;
    AVPacket        *pAVpacket;
    AVFrame         *pyuvFrame;
    AVFrame         *nv12Frame;
    AVFrame         *pAVFrameRgb;
    struct SwsContext *pSwsContext;
    AVPixelFormat   mpix_fmt;
    AVBufferRef     *m_hw_device_ctx;
    bool            m_isHarddecoding;     // 是否硬解码
    bool            m_isRtmpSend;         // 是否进行rtmp流推送
    bool            m_isDisplayFrame;     // 是否显示视频帧

    int videoIndex;
    int numBytes;            // 解码后的数据长度
    uint8_t * m_outbuffer;
    QString filename;
    QSize m_VideoSize;       // 视频高宽
};

class CVideoCaptureRtmpSend : public QObject ,public NedAllocatedObject
{
    Q_OBJECT

public:
    explicit CVideoCaptureRtmpSend(QObject *parent=nullptr);
    ~CVideoCaptureRtmpSend();

    /// 打开相应的网络摄像头，推送到对应的目标地址
    bool openUrl(QString srcurl,QString decurl);
    bool openUrl2(CVideoCapture *pVideoCapture,QString decurl);

    /// 开始工作
    void start(void);
    /// 停止工作
    void stop(void);

private:
    // 打开RTMP网络IO，发送封装头MUX
    bool SendMuxHead(QString url);
    /// 初始化编码器
    bool InitVideoCodec(int videoW,int videoH);
    /// 开始编码视频
    AVPacket* EncodeVideo(AVFrame* frame);
    /// RTMP推流
    bool SendFrame(AVPacket* pack);
    /// 关闭流推送
    void CloseVideoSend(void);

private slots:
    /// rtmp流推送
    void processVideoRtmpSend(AVFrame *pFrame);

private:
    // RTMP FLV 封装器
    AVFormatContext* avFormatContext;
    // 编码器上下文， YUV->H264
    AVCodecContext* InvideoCodecContext;
    SwsContext* swsContext; // 像素格式转换上下文
    AVPacket outVideoPacket; // 编码后的数据
    // 新创建的视频流
    AVStream *OutvideoStream;
    int fps;
    int videoPts;
    int vindex;

    CVideoCapture *m_VideoCapture;         // 视频处理
    bool m_isDeleteVideoCapture;
};

class CVideoCaptureWidget : public QOpenGLWidget ,public NedAllocatedObject,
        protected QOpenGLFunctions
{
    Q_OBJECT

public:
    explicit CVideoCaptureWidget(QWidget *parent = nullptr);
    ~CVideoCaptureWidget();

    /// 打开一个网络摄像头
    bool openUrl(QString url,bool isHarddecoding=true);
    /// 是否启用rtmp推流
    bool enableRtmp(QString decurl);

    /// 开始工作
    void open(void);
    /// 停止工作
    void close(void);
    /// 开始播放视频
    void play(void);
    /// 停止播放视频
    void stop(void);

    /// 设置ID
    inline void setID(int pid) { m_Id = pid; }
    /// 得到ID
    inline int getID(void) { return m_Id; }
    /// 设置屏幕大还是小
    inline void setChangeBig(bool big) { m_ChangeBig = big; }
    /// 得到屏幕大还是小
    inline bool isChangeBig(void) { return m_ChangeBig; }

signals:
    void signalsMouseDoubleClickEvent(int pId);

private slots:
    /// 处理视频帧
    void processvideoframe(uint8_t *outbuffer,int videoW,int videoH);

protected:
    virtual void initializeGL() Q_DECL_OVERRIDE;
    virtual void resizeGL(int w, int h) Q_DECL_OVERRIDE;
    virtual void paintGL() Q_DECL_OVERRIDE;
    virtual void mouseDoubleClickEvent(QMouseEvent *event);

private:
    int m_Id;
    bool m_ChangeBig;       //屏幕变大还是变小
    GLuint textureUniformY; //y纹理数据位置
    GLuint textureUniformU; //u纹理数据位置
    GLuint textureUniformV; //v纹理数据位置
    GLuint id_y; //y纹理对象ID
    GLuint id_u; //u纹理对象ID
    GLuint id_v; //v纹理对象ID
    QOpenGLTexture* m_pTextureY;  //y纹理对象
    QOpenGLTexture* m_pTextureU;  //u纹理对象
    QOpenGLTexture* m_pTextureV;  //v纹理对象
    QOpenGLShader *m_pVSHader;  //顶点着色器程序对象
    QOpenGLShader *m_pFSHader;  //片段着色器对象
    QOpenGLShaderProgram *m_pShaderProgram; //着色器程序容器

    int m_nVideoW; //视频分辨率宽
    int m_nVideoH; //视频分辨率高
    uint8_t * m_outbuffer;

    CVideoCapture m_VideoCapture;         // 视频处理

    CVideoCaptureRtmpSend m_VideoCaptureRtmpSend;   //rtmp推流
    QString m_decurl;          // 目标推流地址
    bool m_RtmpSendInitSuccessed;        // rtmp推流是否初始成功
};

class CVideoCapManager : public QObject,public Singleton<CVideoCapManager>
{
    Q_OBJECT

public:
    explicit CVideoCapManager(QObject *parent = nullptr);
    ~CVideoCapManager();

    /// 添加一个网络摄像头
    CVideoCaptureWidget *addVideoCapture(QWidget *parwidget,QString capUrl,bool isHarddecoding=true);
    /// 删除一个摄像头
    bool delVideoCapture(CVideoCaptureWidget *parwidget);
    /// 删除所有的摄像头
    void deleteAllVideoCaptures(void);
    /// 根据ID得到网络摄像头
    CVideoCaptureWidget *getVideoCapture(int pId);
    /// 设置指定ID的网络摄像头是否显示
    void showVideoCapture(int pId,bool isShow,bool isMyself);

    /// 添加一个网络摄像头推流
    CVideoCaptureRtmpSend* addVideoCaptureRtmp(QString capUrl,QString rtmpUrl);
    /// 删除一个网络摄像头推流
    bool delVideoCaptureRtmp(CVideoCaptureRtmpSend *caprtmp);
    /// 清除所有的网络摄像头推流
    void deleteAllVideoCapRtmp(void);

    /// 启用Detector
    bool enableDetector(QString cfg_filename, QString weight_filename,
                        QString names_filename,
                        int gpu_id = 0,
                        int DetectorMaxTime=60000);
    /// 关闭Detector
    void cleanDetector(void);
    /// 处理视频帧
    void DetectorprocessVideoFrame(CVideoCapture *videocapture,unsigned char *data,int videoW,int videoH);

signals:
    void signalsMouseDoubleClickEvent(int pId);

private slots:
    void slotsMouseDoubleClickEvent(int pId);

private:
    QVector<CVideoCaptureWidget*> m_VideoCapManager;            // 摄像头管理
    QVector<CVideoCaptureRtmpSend*> m_VideoCapRtmpSendManager;  // 摄像头推流管理

#ifdef REMOTECAMERA_ENABLE_DARKNET
    bool m_isLoadDetector;                                      // 是否已经导入darknet
    int m_DetectorMaxTime;                                      // 多少毫秒检测一次，初始为1分钟检测一次
    Detector *m_Detector;
    QMutex m_DetectorMutex;
    std::vector<std::string> objects_names;
    QHash<CVideoCapture*,tagVideoCaptureBbox> m_videocapturebboxs;
#endif
};

#endif

#endif // CREMOTEVOICE_H
