﻿#include "videocapturer.h"
#include "timeutil.h"
#include "avpublishtime.h"
#include <QDesktopServices>
#include <QDebug>
#include <QCameraInfo>
#include <QtGlobal>
namespace LQF
{

//Show Dshow Device
static void show_dshow_device()
{
    AVFormatContext *pFormatCtx = avformat_alloc_context();
    AVDictionary* options = nullptr;
    av_dict_set(&options, "list_devices", "true", 0);
    AVInputFormat *iformat = (AVInputFormat *)av_find_input_format("dshow");
    LOG(INFO) << "========Device Info=============";
    avformat_open_input(&pFormatCtx, "video=dummy", iformat, &options);
    LOG(INFO) << "================================";
}

//Show Dshow Device Option
static void show_dshow_device_option()
{
    AVFormatContext *pFormatCtx = avformat_alloc_context();
    AVDictionary* options = nullptr;
    av_dict_set(&options, "list_options", "true", 0);
    AVInputFormat *iformat = (AVInputFormat *)av_find_input_format("dshow");
    LOG(INFO) << "========Device Option Info======";
    avformat_open_input(&pFormatCtx, "video=Integrated Camera", iformat, &options);
    LOG(INFO) << "================================";
}

VideoCapturer::VideoCapturer()
{

}

VideoCapturer::~VideoCapturer()
{
    Stop();
    if(video_frame_){
        av_frame_free(&video_frame_);
    }
    if(yuv_frame_){
        av_frame_free(&yuv_frame_);
    }
    if(codec_ctx_){
        avcodec_free_context(&codec_ctx_);
    }
    if(ifmt_ctx_){
        avformat_close_input(&ifmt_ctx_);
    }
}

RET_CODE VideoCapturer::Init(const Properties &properties)
{
    QList<QCameraInfo> cameras = QCameraInfo::availableCameras();
    for (int i = 0; i < cameras.size(); i++) {
        LOG(ERROR) << "VideoDeviceManager::VideoDeviceManager: input device " <<  i << ":" <<  cameras[i].deviceName().toStdString();
    }
    foreach (const QCameraInfo &cameraInfo, cameras) {
        QString str = cameraInfo.description();
        LOG(ERROR) << "video input device name:" <<str.toStdString() ; //摄像头名字
    }
    device_name_ = properties.GetProperty("video_device_name","");
    video_enc_width_ = properties.GetProperty("video_enc_width",0);
    video_enc_height_ = properties.GetProperty("video_enc_height",0);
    video_enc_pix_fmt_ = properties.GetProperty("video_enc_pix_fmt",-1);
    capture_fps_ = properties.GetProperty("fps",25);
    if(video_enc_width_ ==0
            || video_enc_height_ ==0
            || video_enc_pix_fmt_ == -1
            || capture_fps_ ==0){
        LOG(ERROR) << "VideoCapturer init properties have null parameter, please check it";
        return RET_FAIL;
    }
    if(device_name_.empty())
    {
        LOG(ERROR) << "VideoCapturer init failed device_name is null";
        return RET_FAIL;
    }
    x_ = properties.GetProperty("x",0);
    y_ = properties.GetProperty("y",0);
    AVCodec* codec = nullptr;
    ifmt_ctx_ = avformat_alloc_context();
#ifdef Q_OS_WIN
    //Show Dshow Device
    show_dshow_device();
    //Show Device Options
    int ret = 0;
    show_dshow_device_option();
    AVInputFormat *ifmt = av_find_input_format("dshow"); //使用dshow
    AVDictionary *param = NULL;
    char framerate[32] = {0};
    sprintf(framerate, "%d", capture_fps_);
    av_dict_set(&param, "video_size", "1920x1080", 0);
    av_dict_set(&param, "pixel_format", "yuv420p", 0);
    av_dict_set(&param, "framerate", "25", 0);
    av_dict_set(&param,"rtbufsize","30412800",0);
//    device_name_ = "video=Camera (NVIDIA Broadcast)";
    if((ret = avformat_open_input(&ifmt_ctx_, device_name_.c_str(), ifmt,&param)) != 0) {
        char err[1024]= {0};
        av_strerror(ret,err,1023);
        LOG(ERROR) << "Couldn't open input stream video: " << device_name_ << " err: " << err;
        return RET_FAIL;
    }
    av_dict_free(&param);
#endif
#ifdef Q_OS_LINUX
    AVInputFormat *ifmt = (AVInputFormat *)av_find_input_format("x11grab"); //x11grab
    AVDictionary *param = NULL;
    char framerate[32] = {0};
    sprintf(framerate, "%d", capture_fps_);
    av_dict_set(&param, "framerate", framerate, 0);        //
    if(avformat_open_input(&ifmt_ctx_, ":0.0+0,0", ifmt, nullptr) != 0) {
        LOG(ERROR) << "Couldn't open input stream video: " << ":0.0+0,0";
        return RET_FAIL;
    }
#endif
    video_stream_ = -1;
    codec_ctx_ = nullptr;
    int r = avformat_find_stream_info(ifmt_ctx_,NULL);
    if(r != 0){
        LOG(ERROR) << "avformat_find_stream_info failed";
    }
    for(unsigned int i = 0; i < ifmt_ctx_->nb_streams; i++){
        if(ifmt_ctx_->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO){
            video_stream_ = static_cast<int>(i);
            break;
        }
    }
    if(video_stream_ == -1){
        LOG(ERROR) << "Dot not find a video stream";
        return RET_FAIL;
    }
    codec_ctx_ = avcodec_alloc_context3(NULL);
    avcodec_parameters_to_context(codec_ctx_,ifmt_ctx_->streams[video_stream_]->codecpar);
    codec = avcodec_find_decoder(codec_ctx_->codec_id);
    if(codec == nullptr) {
        LOG(ERROR) << "video Codec not found";
        return RET_FAIL;
    }
    if(avcodec_open2(codec_ctx_, codec, nullptr) < 0) {
        LOG(ERROR) << "Could not open video codec";
        return RET_FAIL;
    }
    LOG(ERROR) << device_name_.c_str();
    LOG(ERROR) << codec_ctx_->codec_id << " " << codec_ctx_->coded_width << " " << codec_ctx_->coded_height;
    LOG(ERROR) << "video_cap_inf: width: " << codec_ctx_->width << " height: " << codec_ctx_->height << " fps "
              << codec_ctx_->framerate.den << " num " << codec_ctx_->framerate.num;
    video_frame_ = av_frame_alloc();
    yuv_frame_ = av_frame_alloc();
    return  RET_OK;
}

static AVFrame* alloc_picture(enum AVPixelFormat pix_fmt, int width, int height)
{
    AVFrame* picture;
    int ret = 0;
    picture = av_frame_alloc();
    if (!picture) {
        LOG(ERROR) << "Could not allocate frame data";
        return NULL;
    }
    picture->format = pix_fmt;
    picture->width = width;
    picture->height = height;
    ret = av_frame_get_buffer(picture,0);
    if(ret < 0){
        LOG(ERROR) << "Could not av_frame_get_buffer";
        return NULL;
    }
    return picture;
}

void VideoCapturer::Loop()
{
     LOG(INFO) << "into loop";
     struct SwsContext* img_convert_ctx = nullptr;
     int64_t pts = 0;
     int64_t pre_pts = 0;
     int ret = 0;
     if(codec_ctx_){
         // 将数据转换为YUV420P
         img_convert_ctx = sws_getContext(codec_ctx_->width,codec_ctx_->height,(enum AVPixelFormat)codec_ctx_->pix_fmt,
                                          video_enc_width_,video_enc_height_,(enum AVPixelFormat)video_enc_pix_fmt_,
                                          SWS_BICUBIC,0,0,0);
         yuv_frame_ = alloc_picture(AV_PIX_FMT_YUV420P,video_enc_width_,video_enc_height_);
     }
     AVPacket packet;
     av_init_packet(&packet);
     int64_t firsttime = TimesUtil::GetTimeMillisecond();
     LOG(INFO) << "init loop while";
     while(true){
         if(request_exit_){
             break;
         }
         if(av_read_frame(ifmt_ctx_,&packet) < 0){
             LOG(ERROR) << "read failed";
             std::this_thread::sleep_for(std::chrono::milliseconds(2));
             continue;
         }
         if(!is_first_frame_){
             is_first_frame_ = true;
             LOG(INFO) <<  AVPublishTime::GetInstance()->getVInTag() << ":t"  << AVPublishTime::GetInstance()->getCurrenTime();
         }
         if(packet.stream_index == video_stream_) {
             if(avcodec_send_packet(codec_ctx_,&packet) != 0){
                 LOG(ERROR) << "input AVPacket to decoder failed!";
                 av_packet_unref(&packet);
                 continue;
             }
             while (0 == (ret = avcodec_receive_frame(codec_ctx_,video_frame_))) {
                 /// 转换成YUV420
                 /// 由于解码后的数据不一定是yuv420p，比如硬件解码后会是yuv420sp，因此这里统一转成yuv420p
                sws_scale(img_convert_ctx, (const uint8_t* const*)video_frame_->data, video_frame_->linesize, 0,
                          codec_ctx_->height, yuv_frame_->data, yuv_frame_->linesize);
//                yuv_frame_->width = video_frame_->width;
//                yuv_frame_->height = video_frame_->height;
//                yuv_frame_->format = AV_PIX_FMT_YUV420P;
                pts = AVPublishTime::GetInstance()->get_video_pts();
                if(pts >= 0){
                    if(pre_pts >= pts){
                        yuv_frame_->pts = pre_pts;
                         LOG(WARNING) << "video pts failed";
                    }else{
                        yuv_frame_->pts = pts;
                        pre_pts = pts;
                    }
                    callback_get_frame_(yuv_frame_);
                }else {

                }
             }
             if(ret != 0){
                 char errbuf[1024] = {0};
                 av_strerror(ret,errbuf,1023);
                // LOG(ERROR) << "raw video errror " << errbuf;
             }

         }else{
               LOG(WARNING) << "other stream_index: " <<  packet.stream_index;
         }
         av_packet_unref(&packet);
         std::this_thread::sleep_for(std::chrono::milliseconds(2));
     }
}

}




























