#include "avhandler.h"
#include <QtGui/QImage>
#include <QtGui/QColor>
#include <QtCore/QDebug>

AVHandler::AVHandler() :
    isReadable(false),
    formatContext(avformat_alloc_context()),
    options(0),
    frame(avcodec_alloc_frame())
{
    ioContext = avio_alloc_context(reinterpret_cast<quint8*>(av_malloc(4096)),
                                   4096, 0, 0, &readFn, NULL, &seekFn);
}

AVHandler::~AVHandler()
{
    //av_free(ioContext->buffer);
    av_free(ioContext);
    av_free(frame);
    av_dict_free(&options);
    //avformat_free_context(formatContext);
    avformat_close_input(&formatContext);
}

void AVHandler::setFileName(const QString &fileName)
{
    m_fileName = fileName;
}

QVariant AVHandler::option(ImageOption option) const
{
    switch (option) {
    case Size:
        if (canRead())
            return QSize(codecContext->width, codecContext->height);
        return QVariant();
    case IncrementalReading:
    case Animation:
        return true;
    case ImageFormat:
        return QImage::Format_RGB32;
    case ClipRect:
    case ScaledSize:
    case ScaledClipRect:
    case Description:
    case CompressionRatio:
    case Gamma:
    case Quality:
    case Name:
    case SubType:
    case Endianness:
    case BackgroundColor:
    default:
        return QVariant();
    }
}

bool AVHandler::supportsOption(ImageOption option) const
{
    switch (option) {
    case Size:
    case IncrementalReading:
    case Animation:
    case ImageFormat:
        return true;
    case ClipRect:
    case ScaledSize:
    case ScaledClipRect:
    case Description:
    case CompressionRatio:
    case Gamma:
    case Quality:
    case Name:
    case SubType:
    case Endianness:
    case BackgroundColor:
    default:
        return false;
    }
}

int AVHandler::nextImageDelay() const {
    if (canRead() && stream->avg_frame_rate.den)
            return 1000/(stream->avg_frame_rate.num/stream->avg_frame_rate.den);
    return 0;
}

bool AVHandler::canRead() const
{
    if (!isReadable) {
        AVInputFormat *fmt = av_find_input_format(format());
        if (fmt && !(fmt->flags & AVFMT_NOFILE)) {
            ioContext->opaque = device();
            formatContext->pb = ioContext;
        }
        if (avformat_open_input(&formatContext, m_fileName.toLatin1(), fmt, &options) == 0) {
            int streamIndex = av_find_best_stream(formatContext, AVMEDIA_TYPE_VIDEO, -1, -1, &codec, 0);
            stream = formatContext->streams[streamIndex];
            codecContext = stream->codec;
            if (avcodec_open2(codecContext, codec, 0) == 0) {
                av_seek_frame(formatContext, streamIndex, 0, AVSEEK_FLAG_ANY);
                isReadable = true;
            }
        } else {
            qWarning() << "Could not open input.";
        }
    }
    return isReadable;
}

bool AVHandler::read(QImage *image)
{
    if (!canRead())
        return false;

    AVPacket packet;
    if(av_read_frame(formatContext, &packet) != 0) {
        qWarning("Libav image handler: frame not found.");
        av_free_packet(&packet);
        return false;
    }

    int success;
    avcodec_decode_video2(codecContext, frame, &success, &packet);
    if (success) {
        QByteArray bytes;
        // Convert colors if needed...
        switch (codecContext->pix_fmt) {
        case PIX_FMT_YUV420P:
            bytes.resize(frame->width * frame->height * 4);
            yuv420_rgb(frame, bytes.data(), QRect(0, 0, frame->width, frame->height));
            *image = QImage(reinterpret_cast<const uchar*>(bytes.constData()), frame->width, frame->height, QImage::Format_RGB32);
            break;
        case PIX_FMT_BGR24:
            bytes.resize(frame->width * frame->height * 4);
            bgr24_rgb(frame, bytes.data(), QRect(0, 0, frame->width, frame->height));
            *image = QImage(reinterpret_cast<const uchar*>(bytes.constData()), frame->width, frame->height, QImage::Format_RGB32);
            break;
        case PIX_FMT_YUVJ422P:
            bytes.resize(frame->width * frame->height * 4);
            yuv422_rgb(frame, bytes.data(), QRect(0, 0, frame->width, frame->height));
            *image = QImage(reinterpret_cast<const uchar*>(bytes.constData()), frame->width, frame->height, QImage::Format_RGB32);
            break;
        default:
            *image = QImage(frame->data[0], frame->width, frame->height, frame->linesize[0], QImage::Format_ARGB32);
            break;
        }
        image->detach();    // Copies the data which we are about to destroy
        av_free_packet(&packet);
        return true;
    }
    qWarning("AVHandler: unable to decode frame.");
    av_free_packet(&packet);
    return false;
}

int AVHandler::readFn(void *opaque, quint8 *buf, int buf_size)
{
    QIODevice* device = reinterpret_cast<QIODevice*>(opaque);
    return device->read(reinterpret_cast<char*>(buf), buf_size);
}

qint64 AVHandler::seekFn(void *opaque, qint64 offset, int whence)
{
    QIODevice* device = reinterpret_cast<QIODevice*>(opaque);
    switch(whence) {
    case AVSEEK_SIZE:
        return device->size();
    case SEEK_SET:
        if(device->seek(offset))
            return device->pos();
    case SEEK_CUR:
        qWarning() << "AVHandler: Seek current called, not implemented.";
        return -1;
    case SEEK_END:
        qWarning() << "AVHandler: Seek end called, not implemented.";
        return -1;
    default:
        qWarning() << "AVHandler: Unknown seek function called, not implemented.";
        return -1;
    }
}

QRgb AVHandler::fromYuv(qint16 y, qint16 u, qint16 v)
{
    QColor pixel;
    y -= 16;
    u -= 128;
    v -= 128;
    pixel.setRed(qBound(0,   (298 * y           + 409 * v + 128) >> 8, 255));
    pixel.setGreen(qBound(0, (298 * y - 100 * u - 208 * v + 128) >> 8, 255));
    pixel.setBlue(qBound(0,  (298 * y + 516 * u           + 128) >> 8, 255));
    return pixel.rgb();
}

void AVHandler::yuv420_rgb(AVFrame *frame, char *buffer, const QRect &roi)
{
    QRgb *pixels = reinterpret_cast<QRgb*>(buffer);
    int stride1 = frame->linesize[0];
    int stride2 = frame->linesize[1];
    int stride3 = frame->linesize[2];
    for (int j = 0; j < roi.height(); ++j) {
        int wrap = j * roi.width();
        for (int i = 0; i < roi.width(); ++i) {
            qint16 y = frame->data[0][j * stride1 + i];
            qint16 u = frame->data[1][j/2 * stride2 + i/2];
            qint16 v = frame->data[2][j/2 * stride3 + i/2];
            pixels[wrap + i] = fromYuv(y, u, v);
        }
    }
}

void AVHandler::yuv422_rgb(AVFrame *frame, char *buffer, const QRect &roi)
{
    QRgb *pixels = reinterpret_cast<QRgb*>(buffer);
    int stride1 = frame->linesize[0];
    int stride2 = frame->linesize[1];
    int stride3 = frame->linesize[2];
    for (int j = 0; j < roi.height(); ++j) {
        int wrap = j * roi.width();
        for (int i = 0; i < roi.width(); i += 2) {
            qint16 y1 = frame->data[0][j * stride1 + i];
            qint16 y2 = frame->data[0][j * stride1 + i + 1];
            qint16 u = frame->data[1][j * stride2 + i/2];
            qint16 v = frame->data[2][j * stride3 + i/2];
            pixels[wrap + i] = fromYuv(y1, u, v);
            pixels[wrap + i + 1] = fromYuv(y2, u, v);
        }
    }
}

void AVHandler::bgr24_rgb(AVFrame *frame, char *buffer, const QRect &roi)
{
    QColor pixel;
    QRgb *pixels = reinterpret_cast<QRgb*>(buffer);
    quint8 *data = frame->data[0];
    int stride = frame->linesize[0];
    for (int j = 0; j < roi.height(); ++j) {
        int wrap = j * roi.width();
        for (int i = 0; i < roi.width(); ++i) {
            int idx = j * stride + i*3;
            pixel.setBlue(data[idx]);
            pixel.setGreen(data[idx + 1]);
            pixel.setRed(data[idx + 2]);
            pixels[wrap + i] = pixel.rgb();
        }
    }
}
