#include "videosurface.h"

#define CLAMP(n) (n > 255 ? 255 : (n < 0 ? 0 : n))

#define EXPAND_UV(u, v) \
    int uu = u - 128; \
    int vv = v - 128; \
    int rv = 409 * vv + 128; \
    int guv = 100 * uu + 208 * vv + 128; \
    int bu = 516 * uu + 128; \

#define FETCH_INFO_BIPLANAR(frame) \
    const uchar *plane1 = frame.bits(0); \
    const uchar *plane2 = frame.bits(1); \
    int plane1Stride = frame.bytesPerLine(0); \
    int plane2Stride = frame.bytesPerLine(1); \
    int width = frame.width(); \
    int height = frame.height();

videoSurface::videoSurface(QObject *parent):QAbstractVideoSurface(parent)
{

}

QList<QVideoFrame::PixelFormat>  videoSurface::supportedPixelFormats(QAbstractVideoBuffer::HandleType handleType)const
{
    Q_UNUSED(handleType);

    QList<QVideoFrame::PixelFormat> list;
    list.clear();
    list.append(QVideoFrame::Format_ARGB32);
    list.append(QVideoFrame::Format_NV21);
    return list;
}

inline quint32 videoSurface::qYUVToARGB32(int y, int rv, int guv, int bu, int a)
{
    int yy = (y - 16) * 298;
    return (a << 24)
            | CLAMP((yy + rv) >> 8) << 16
            | CLAMP((yy - guv) >> 8) << 8
            | CLAMP((yy + bu) >> 8);
}

inline void videoSurface::planarYUV420_to_ARGB32(const uchar *y, int yStride,
                                          const uchar *u, int uStride,
                                          const uchar *v, int vStride,
                                          int uvPixelStride,
                                          quint32 *rgb,
                                          int width, int height)
{
    quint32 *rgb0 = rgb;
    quint32 *rgb1 = rgb + width;

    for (int j = 0; j < height; j += 2) {
        const uchar *lineY0 = y;
        const uchar *lineY1 = y + yStride;
        const uchar *lineU = u;
        const uchar *lineV = v;

        for (int i = 0; i < width; i += 2) {
            EXPAND_UV(*lineU, *lineV);
            lineU += uvPixelStride;
            lineV += uvPixelStride;

            *rgb0++ = qYUVToARGB32(*lineY0++, rv, guv, bu);
            *rgb0++ = qYUVToARGB32(*lineY0++, rv, guv, bu);
            *rgb1++ = qYUVToARGB32(*lineY1++, rv, guv, bu);
            *rgb1++ = qYUVToARGB32(*lineY1++, rv, guv, bu);
        }

        y += yStride << 1; // stride * 2
        u += uStride;
        v += vStride;
        rgb0 += width;
        rgb1 += width;
    }
}

void QT_FASTCALL videoSurface::qt_convert_NV21_to_ARGB32(const QVideoFrame &frame, uchar *output)
{
    FETCH_INFO_BIPLANAR(frame)
    planarYUV420_to_ARGB32(plane1, plane1Stride,
                           plane2 + 1, plane2Stride,
                           plane2, plane2Stride,
                           2,
                           reinterpret_cast<quint32*>(output),
                           width, height);
}

bool videoSurface::present(const QVideoFrame &frame)
{
    QVideoFrame cloneFrame(frame);

    if (frame.pixelFormat() == QVideoFrame::Format_NV21 )
    {
        if(cloneFrame.map(QAbstractVideoBuffer::ReadOnly))
        {
            if ( (img.width() != cloneFrame.width()) || (img.height() != cloneFrame.height()) || (img.format() != QImage::Format_ARGB32) )
            {
                img = QImage (cloneFrame.width(),cloneFrame.height(), QImage::Format_ARGB32);
            }

            qt_convert_NV21_to_ARGB32(cloneFrame,img.bits());
            cloneFrame.unmap();
        }
    }

    return true;
}

QImage* videoSurface::getQImage()
{
    return &img;
}

videoSurface::~videoSurface()
{

}
