#include "opencv.h"
#include <QDebug>

opencv::opencv(QObject *parent)
    : QObject{parent}
{
    /* template:
    QImage img;
    QList<QImage> imgList;
    openVideo("F:/videoTest.mp4");
    test01();

    img = getOneFrame(89);
    img.save("imgimg.png");

    imgList = getAllFrame();
    for(int i = 0; i < getFrameNum(); i++)
    {
        imgList.at(i).save(QString("img%1.png").arg(i));
    }
    */
}
opencv::~opencv()
{
    this->m_cap.release();
}
// 设置VideoCapture
void opencv::setVideoCapture(VideoCapture cap)
{
    this->m_cap = cap;
}
// 获取VideoCapture
VideoCapture opencv::getVideoCapture()
{
    return this->m_cap;
}
// 打开视频
bool opencv::openVideo(QString path)
{
    this->m_cap.open(path.toStdString());
    if(!m_cap.isOpened())
    {
        qDebug() << "video open fail";
        return false;
    }
    setFrameNum(this->m_cap.get(CAP_PROP_FRAME_COUNT));
    setFrameRate(this->m_cap.get(CAP_PROP_FPS));
    setFrameWidth(this->m_cap.get(CAP_PROP_FRAME_WIDTH));
    setFrameHeight(this->m_cap.get(CAP_PROP_FRAME_HEIGHT));
    return true;
}
// 设置总帧数
void opencv::setFrameNum(int frameNum)
{
    this->m_frameNum = frameNum;
}
// 获取总帧数
int opencv::getFrameNum()
{
    return this->m_frameNum;
}
// 设置帧率
void opencv::setFrameRate(int frameRate)
{
    this->m_frameRate = frameRate;
}
// 获取帧率
int opencv::getFrameRate()
{
    return this->m_frameRate;
}
// 设置宽度
void opencv::setFrameWidth(int frameWidth)
{
    this->m_frameWidth = frameWidth;
}
// 获取宽度
int opencv::getFrameWidth()
{
    return this->m_frameWidth;
}
// 设置高度
void opencv::setFrameHeight(int frameHeight)
{
    this->m_frameHeight = frameHeight;
}
// 获取高度
int opencv::getFrameHeight()
{
    return this->m_frameHeight;
}
// 获取指定位置的视频帧
QImage opencv::getOneFrame(int posNum)
{
    QImage img;
    Mat frame;

    if(!this->m_cap.isOpened())
    {
        qDebug() << "video open fail";
        return img;
    }

    if(posNum >= getFrameNum())
    {
        qDebug() << "video frame position num overflow";
        return img;
    }

    this->m_cap.set(CAP_PROP_POS_FRAMES, posNum);
    if(!this->m_cap.grab())
    {
        qDebug() << "grab in opencv failed";
        return img;
    }
    this->m_cap >> frame;
    img = MatToQImage(frame);

    return img;
}
// 获取全部视频帧
QList<QImage> opencv::getAllFrame()
{
    QList<QImage> imgList;

    for(int i = 0; i < getFrameNum(); i++)
    {
        imgList.insert(i, getOneFrame(i));
    }

    return imgList;
}
// Mat转图像
QImage opencv::MatToQImage(const cv::Mat& mat)
{
    // 8-bits unsigned, NO. OF CHANNELS = 1
    if (mat.type() == CV_8UC1)
    {
        QImage image(mat.cols, mat.rows, QImage::Format_Indexed8);
        // Set the color table (used to translate colour indexes to qRgb values)
        image.setColorCount(256);
        for (int i = 0; i < 256; i++)
        {
            image.setColor(i, qRgb(i, i, i));
        }
        // Copy input Mat
        uchar *pSrc = mat.data;
        for (int row = 0; row < mat.rows; row++)
        {
            uchar *pDest = image.scanLine(row);
            memcpy(pDest, pSrc, mat.cols);
            pSrc += mat.step;
        }
        return image;
    }
    // 8-bits unsigned, NO. OF CHANNELS = 3
    else if (mat.type() == CV_8UC3)
    {
        // Copy input Mat
        const uchar *pSrc = (const uchar*)mat.data;
        // Create QImage with same dimensions as input Mat
        QImage image(pSrc, mat.cols, mat.rows, mat.step, QImage::Format_RGB888);
        return image.rgbSwapped();
    }
    else if (mat.type() == CV_8UC4)
    {
        // Copy input Mat
        const uchar *pSrc = (const uchar*)mat.data;
        // Create QImage with same dimensions as input Mat
        QImage image(pSrc, mat.cols, mat.rows, mat.step, QImage::Format_ARGB32);
        return image.copy();
    }
    else
    {
        return QImage();
    }
}

void opencv::test01()
{
    qDebug() << getFrameWidth() << getFrameHeight() << getFrameRate() << getFrameNum();
}
