#include "decodevideo.h"

const int DEFAULT_SPEED=40;
DecodeVideo::DecodeVideo():QThread()
{
    speed=DEFAULT_SPEED;
    this->prevSpeed=speed;
    video_input=-1;
    filepath="";

}

void DecodeVideo::decoding()
{
    filePrepare(filepath);


     //**********************AVPacket准备工作完成**************************
    //***********************AVFrame RGB准备工作********************
    FILE*YUVfp=fopen("../fileout/test.YUV","wb+");

    qDebug()<<isStop;
    qDebug()<<!av_read_frame(formatContex,pkt);


    if(filepath.isEmpty())
    {
        while(!av_read_frame(formatContex,pkt))
            {
                if(isStop==false)
                {

                  if(pkt->stream_index==video_input)
                  {

                    int get_picture_ptr=-1;
                    avcodec_decode_video2(codecContext,pictrue,&get_picture_ptr,pkt);
                    if(get_picture_ptr!=0)
                    {
                        sws_scale(SwsContextRGB,pictrue->data,pictrue->linesize,0,pictrue->height,
                                  pictureRGB->data,pictureRGB->linesize);
                        sws_scale(SwsContextYUV,pictrue->data,pictrue->linesize,0,pictrue->height,
                                  pictureYUV->data,pictureYUV->linesize);
                        fwrite(pictureYUV->data[0],this->size,1,YUVfp);
                        fwrite(pictureYUV->data[1],this->size/4,1,YUVfp);
                        fwrite(pictureYUV->data[2],this->size/4,1,YUVfp);
                        this->img=QImage((uchar*)bufferRGB,codecContext->width,codecContext->height,
                                         QImage::Format_RGB32);

                        emit sendImage(this->img,x);
                        emit sendYUV(pictureYUV);
                        //如果是第一帧并且是摄像头，那么就保存起来
                        if(isone&&filepath.size()==0)
                        {
                            QDateTime currentDateTime = QDateTime::currentDateTime();
                            QString timeStr = currentDateTime.toString("yyyy-MM-dd-hh-mm-ss");
                            QString fullFileName ="../video_image/"+timeStr+".png";
                            this->video_img_path=fullFileName;
                            img.save(fullFileName.toStdString().c_str());
                            this->isone=false;
                        }
                        x++;
                        qDebug()<<x;
                        msleep(speed);
                    }
                  }
                  av_packet_unref(pkt);
                  if(st) break;
                }
            }
    }
    else
    {
        while (true) {
                if (isStop==false) {

                    if (av_read_frame(formatContex, pkt) < 0) {
                        break;
                    }

                    if (pkt->stream_index == video_input)
                    {
                        int get_picture_ptr=-1;
                        avcodec_decode_video2(codecContext,pictrue,&get_picture_ptr,pkt);
                        if(get_picture_ptr!=0)
                        {
                            sws_scale(SwsContextRGB,pictrue->data,pictrue->linesize,0,pictrue->height,
                                      pictureRGB->data,pictureRGB->linesize);
                            sws_scale(SwsContextYUV,pictrue->data,pictrue->linesize,0,pictrue->height,
                                      pictureYUV->data,pictureYUV->linesize);
                            fwrite(pictureYUV->data[0],this->size,1,YUVfp);
                            fwrite(pictureYUV->data[1],this->size/4,1,YUVfp);
                            fwrite(pictureYUV->data[2],this->size/4,1,YUVfp);
                            this->img=QImage((uchar*)bufferRGB,codecContext->width,codecContext->height,
                                             QImage::Format_RGB32);

                            emit sendImage(this->img,x);
                            emit sendYUV(pictureYUV);
                            //如果是第一帧并且是摄像头，那么就保存起来
                            if(isone&&filepath.size()==0)
                            {
                                QDateTime currentDateTime = QDateTime::currentDateTime();
                                QString timeStr = currentDateTime.toString("yyyy-MM-dd-hh-mm-ss");

//                                this->model=this->con->getModel("device_info");
//                                Any ret=this->model->sqlSelect("select *from device_info;");
//                                Device device=ret.cast_<Device>();

                                //这个先把它放入这个文件，看看后面有没有要求
                                QString fullFileName ="../video_image/"+timeStr+".png";
                                this->video_img_path=fullFileName;
                                img.save(fullFileName.toStdString().c_str());
                                this->isone=false;
                            }
                            x++;
                           // qDebug()<<x;
                            msleep(speed);
                        }
                    }

                }
                 if(st) break;
                 av_packet_unref(pkt);
            }
    }





    fclose(YUVfp);
    //fclose(H264fp);

    emit sendclose();
    //释放解码器
    avcodec_close(codecContext);
    //关闭文件
    avformat_close_input(&formatContex);

}

void DecodeVideo::run()
{
    decoding();
}

int DecodeVideo::getTotalx() const
{
    return totalx;
}

void DecodeVideo::setTotalx(int value)
{
    totalx = value;
}

int DecodeVideo::getHight() const
{
    return hight;
}

void DecodeVideo::setHight(int value)
{
    hight = value;
}

int DecodeVideo::getWight() const
{
    return wight;
}

void DecodeVideo::setWight(int value)
{
    wight = value;
}

bool DecodeVideo::getCalculateFrames() const
{
    return calculateFrames;
}

void DecodeVideo::setCalculateFrames(bool value)
{
    calculateFrames = value;
}

int DecodeVideo::getTotalFrames() const
{
    return totalFrames;
}

void DecodeVideo::setTotalFrames(int value)
{
    totalFrames = value;
}

int DecodeVideo::getPrevSpeed() const
{
    return prevSpeed;
}

void DecodeVideo::setPrevSpeed(int value)
{
    prevSpeed = value;
}

int DecodeVideo::getX() const
{
    return x;
}

void DecodeVideo::setX(int value)
{
    x = value;
}


QString DecodeVideo::getVideo_img_path() const
{
    return video_img_path;
}

void DecodeVideo::setVideo_img_path(const QString &value)
{
    video_img_path = value;
}

bool DecodeVideo::getIsStop() const
{
    return isStop;
}

void DecodeVideo::setIsStop(bool value)
{
    isStop = value;
}

QImage DecodeVideo::getImg() const
{
    return img;
}

void DecodeVideo::setImg(const QImage &value)
{
    img = value;
}

bool DecodeVideo::getSt() const
{
    return st;
}

void DecodeVideo::setSt(bool value)
{
    st = value;
}

void DecodeVideo::stopDecode()
{
    st=true;
}

void DecodeVideo::SetXToBegin()
{
    this->x=0;
    this->isone=true;
}

QString DecodeVideo::getFilepath() const
{
    return filepath;
}

void DecodeVideo::setFilepath(const QString &value)
{
    filepath = value;
}

int DecodeVideo::getSpeed() const
{
    return speed;
}

void DecodeVideo::setSpeed(int value)
{
    speed = value;
}

void DecodeVideo::filePrepare(QString str,bool calculateFrames_)
{

    int res=0;
    if(str.size()==0)
    {
        //注册设备
        avdevice_register_all();
        //1.注册组件
        av_register_all();


        formatContex=avformat_alloc_context();
        //dshow
        AVInputFormat* iformat = av_find_input_format("dshow"); // 对于 Linux 系统，Windows 可使用 dshow

        QList<QCameraInfo>camers=QCameraInfo::availableCameras();
        QString str=camers[0].description();

        QString deviceName="video="+str;
        //const char* deviceName = "video="+str;
        res=avformat_open_input(&formatContex,deviceName.toStdString().c_str(),iformat,nullptr);
    }
    else
    {
        av_register_all();
        this->formatContex = avformat_alloc_context();
        res = avformat_open_input(&formatContex,this->filepath.toStdString().c_str(),nullptr,nullptr);

    }



    if(res)
    {
        qDebug()<<"打开失败";
    }
    else {
        qDebug()<<"打开成功";
    }
    //这里开始是不动的
    //3获取视频信息
      res=avformat_find_stream_info(formatContex,nullptr);
     if(res<0)
     {
         qDebug()<<"avformat_find_stream_info 失败";
     }
     else {
         qDebug()<<"avformat_find_stream_info 成功";
     }

     //4判断是否有视频流
     for(int i=0;i<formatContex->nb_streams;i++)
     {
         if(formatContex->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO)
         {
             video_input=i;
             break;
         }
     }

     if(video_input==-1)
     {
         qDebug()<<"video not find";
     }
     else {
         qDebug()<<formatContex->streams[video_input]->nb_frames;//总帧数
         qDebug()<<"video_input="<<video_input;
     }

     this->calculateFrames=calculateFrames_;




     //5 查找解码器

    codecContext=formatContex->streams[video_input]->codec;
     //通过文件信息中的编码id找到解码器
     decoder= avcodec_find_decoder(codecContext->codec_id);
//
     if(decoder==nullptr)
     {
         qDebug()<<"avcodec_find_decoder fail";
     }
     else {
         qDebug()<<"avcodec_find_decoder success";
     }
     //6 打开解码器
     res=avcodec_open2(codecContext,decoder,nullptr);
     if(res!=0)
     {
         qDebug()<<"avcodec_open2 fail";
     }
     else {
         qDebug()<<"avcodec_open2 success";
     }
     //7 读取一帧压缩数据
//     AVPacket*pkt;
    pkt=(AVPacket*)malloc(sizeof (AVPacket));
    this-> size=codecContext->width*codecContext->height;

    this->wight=codecContext->width;
    this->hight=codecContext->height;
    res=av_new_packet(pkt,size);




    if(res!=0)
    {
        qDebug()<<"av_new_packet fail";
    }
    else {
         qDebug()<<"av_new_packet success";
    }

    pictrue=av_frame_alloc();
    pictureRGB=av_frame_alloc();

    //解码画面信息按按原图信息进行设置
    pictureRGB->width=codecContext->width;
    pictureRGB->height=codecContext->height;
    pictureRGB->format=codecContext->pix_fmt;

    //设置缓冲区
    int imgByteRGB=avpicture_get_size(AV_PIX_FMT_RGB32,codecContext->width,codecContext->height);
    bufferRGB=(uint8_t*)av_malloc(imgByteRGB*sizeof (uint8_t));
    //填充缓冲区
    avpicture_fill((AVPicture*)pictureRGB,bufferRGB,AV_PIX_FMT_RGB32,codecContext->width,codecContext->height);
    //制定一个图像规则
   /* SwsContext**/SwsContextRGB=sws_getContext(codecContext->width,codecContext->height,codecContext->pix_fmt,codecContext->width,codecContext->height,AV_PIX_FMT_RGB32,SWS_BICUBIC,nullptr,nullptr,nullptr);
    //*************************AVFrame准备工作完成************************
    qDebug()<<"文件准备成功";


    initYUY();
}



void DecodeVideo::initYUY()
{
    pictureYUV=av_frame_alloc();
    pictureYUV->width=codecContext->width;
    pictureYUV->height=codecContext->height;
    pictureYUV->format=codecContext->pix_fmt;

    this->imgBYteYUV=avpicture_get_size(AV_PIX_FMT_YUV420P,codecContext->width,codecContext->height);
    bufferYUV=(uint8_t*)av_malloc(imgBYteYUV*sizeof (uint8_t));
    avpicture_fill((AVPicture*)pictureYUV,bufferYUV,AV_PIX_FMT_YUV420P,codecContext->width,codecContext->height);
    SwsContextYUV=sws_getContext(codecContext->width,codecContext->height,codecContext->pix_fmt,codecContext->width,codecContext->height,AV_PIX_FMT_YUV420P,SWS_BICUBIC,nullptr,nullptr,nullptr);
}

























