#include "qnativecamera.h"
#include "videosurface.h"

#include <QPainter>
#include <QTimer>
#include <QDateTime>
#include <QDebug>

QNativeCamera::QNativeCamera(QDeclarativeItem *parent) :
    QDeclarativeItem(parent)
  ,m_camera(0)
  ,m_frontCamera(false)
  ,m_cameraImageCapture(0)
  ,m_mediaRecorder(0)
  ,m_videoSurface(0)
  ,m_processedFrameCounter(0)
  ,m_incomingFrameCounter(0)
  ,m_preview(false)
{
    // Important, otherwise the paint method is never called
    setFlag(QGraphicsItem::ItemHasNoContents, false);
}

/*!
  Destructor.
*/
QNativeCamera::~QNativeCamera()
{
    destroyResources();
}

/*!
  Releases and destroyes all camera resources.
*/
void QNativeCamera::destroyResources()
{
    if (m_camera) {
        m_camera->stop();
    }

    m_imageFrame = QImage();

    if (m_cameraImageCapture) {
        delete m_cameraImageCapture;
        m_cameraImageCapture = 0;
    }

    if (m_mediaRecorder) {
        delete m_mediaRecorder;
        m_mediaRecorder = 0;
    }

    delete m_camera;
    m_camera = 0;

    m_incomingFrameCounter = 0;
    m_processedFrameCounter = 0;

    m_currentDevice = "";
    emit currentDeviceChanged();
}

/*!
  Returns the list of available devices. Eg. primary camera and front camera.
*/
QStringList QNativeCamera::availableDevices() const
{
        QStringList list;
        foreach (const QByteArray array, QCamera::availableDevices()) {
            list << QString(array);
        }

        return list;
}

/*!
  Returns the state camera is currently.
*/
QNativeCamera::State QNativeCamera::cameraState() const
{
    if (!m_camera) {
        return UnloadedState;
    }

    return State(m_camera->state());
}


/*!
  Starts the camera with the given device. The available devices
  can be queried with the availableDevices method. Starting will release
  and destroy all earlier camera resources before creating new ones.
*/
void QNativeCamera::start(const QString &device)
{
    qDebug() << "Opening device: " << device;

    destroyResources();

    m_camera = new QCamera(device.toLatin1(), this);

    // Make sure the camera is in loaded state.
    //m_camera->load();

    m_videoSurface = new VideoSurface(this, m_camera);    
    m_camera->setViewfinder(m_videoSurface);

    // Set the image capturing objects.
    m_cameraImageCapture = new QCameraImageCapture(m_camera);
    connect(m_cameraImageCapture,SIGNAL(imageAvailable(int,QVideoFrame)), this, SLOT(imageAvailable(int,QVideoFrame)));
    connect(m_cameraImageCapture,SIGNAL(imageCaptured(int,QImage)), this, SLOT(imageCaptured(int,QImage)));
    connect(m_cameraImageCapture,SIGNAL(imageExposed(int)), this, SLOT(imageExposed(int)));


    qDebug() << "QVideoFrame::Format_Jpeg: "<<QString::number(QVideoFrame::Format_Jpeg);
    qDebug() << "code buffer format used: "<<QString::number(m_cameraImageCapture->bufferFormat());
    //m_cameraImageCapture->setBufferFormat(QVideoFrame::Format_RGB24); // This is equivalent to QImage::Format_RGB888
    qDebug() << "code buffer format setted: "<<QString::number(m_cameraImageCapture->bufferFormat());
    //m_cameraImageCapture->setCaptureDestination( QCameraImageCapture::CaptureToFile);

    if( m_cameraImageCapture->isCaptureDestinationSupported(QCameraImageCapture::CaptureToBuffer) )
        qDebug() << "QCameraImageCapture::CaptureToBuffer supported";
    else
        qDebug() << "QCameraImageCapture::CaptureToBuffer NOT supported";


    m_cameraImageCapture->setCaptureDestination( QCameraImageCapture::CaptureToBuffer);

    // The following code finds a 16:9 resolution and sets it as capture
    // resolution. The view finder image should also change to corresponding
    // aspect ratio. On some phones the view finder image does not change
    // immediately but after the first image is captured. On some other
    // phones the view finder image is correct right a way.

    // This code was commented out to prevent sudden change of aspect ratio
    // in viewfinder on some phones.


    // Find resolution that matches to device's full screen, 16:9
    QImageEncoderSettings imageSettings;
    imageSettings.setCodec("image/jpeg");

    QList<QSize> resolutions = m_cameraImageCapture->supportedResolutions();
    //QSize resolution;

    qDebug() << "supported resolutions";

    foreach (const QSize size, resolutions) {
        qDebug() << size.width() << "x" << size.height();
        /*float a = size.width() * 1.0f / (size.height() * 1.0f);
        float b = 640.0f / 360.0f;

        if (qAbs(a - b) <= 0.1f * qMin(qAbs(a), qAbs(b))) {
            resolution = size;
            break;
        }*/
    }

    QList<QVideoFrame::PixelFormat> bformats = m_cameraImageCapture->supportedBufferFormats();
    qDebug() << "QImage::Format_Invalid code: "<<QString::number(QImage::Format_Invalid);
    qDebug() << "founded "<<QString::number(bformats.count()) << "buffer formats";
    foreach (const QVideoFrame::PixelFormat format, bformats) {
        qDebug() << "PixelFormat: " << QString::number(format);
    }

    if (!m_resolution.isNull()) {
        imageSettings.setResolution(m_resolution);

        m_cameraImageCapture->setEncodingSettings(imageSettings);

    }


    // Set the video recording objects.
    m_mediaRecorder = new QMediaRecorder(m_camera);

    m_camera->load();

    // Camera API
    connect(m_camera, SIGNAL(locked()), this, SIGNAL(locked()));
    connect(m_camera, SIGNAL(lockFailed()), this, SIGNAL(lockFailed()));

    //connect(m_camera->focus(), SIGNAL(digitalZoomChanged(qreal)), this, SIGNAL(digitalZoomChanged()));
    //connect(m_camera->focus(), SIGNAL(maximumDigitalZoomChanged(qreal)),this, SIGNAL(maximumDigitalZoomChanged()));

    connect(m_camera->exposure(), SIGNAL(exposureCompensationChanged(qreal)), this, SIGNAL(exposureCompensationChanged()));
    connect(m_camera->exposure(), SIGNAL(isoSensitivityChanged(int)), this, SIGNAL(isoValueChanged()));

    connect(m_camera, SIGNAL(stateChanged(QCamera::State)), this, SLOT(cameraStateChanged(QCamera::State)));
    connect(m_camera, SIGNAL(stateChanged(QCamera::State)), this, SIGNAL(cameraStateChanged()));

    // Image capture API
    connect(m_cameraImageCapture, SIGNAL(imageSaved(int, QString)), this, SLOT(handleImageSaved(int, QString)));

    // Video recording API
    //connect(m_mediaRecorder, SIGNAL(stateChanged(QMediaRecorder::State)), this, SLOT(videoStateChanged(QMediaRecorder::State)));
    //connect(m_mediaRecorder, SIGNAL(error(QMediaRecorder::Error)), this, SLOT(handleVideoError(QMediaRecorder::Error)));

    // Set the initial capture mode to image capturing.
    m_camera->setCaptureMode(QCamera::CaptureStillImage);

    // Begin the receiving of view finder frames.
    m_camera->start();

    m_currentDevice = device;
    emit currentDeviceChanged();

    if (m_currentDevice == availableDevices()[0]) {
        m_frontCamera = false;
    }
    else {
        m_frontCamera = true;
    }

    emit frontCameraChanged();
}


/*!
  Stops the camera and releases all resources.
*/
void QNativeCamera::stop()
{
    destroyResources();
}

void QNativeCamera::imageAvailable(int id ,QVideoFrame buffer) {
    qDebug() << "QNativeCamera captured id:" << QString::number(id);
}

void QNativeCamera::imageCaptured ( int id, const QImage & preview ) {
    qDebug() << "QHdrCamera::imageCaptured id:" << QString::number(id);    
}

void QNativeCamera::imageExposed(int id){
    qDebug() << "QHdrCamera::imageExposed id" << QString::number(id) ;
}

/*!
  Returns the current camera in use.
*/
QString QNativeCamera::currentDevice() const
{
    return m_currentDevice;
}


/*!
  Returns true if the secondary camera (front) is in use.
*/
bool QNativeCamera::frontCamera() const
{
    return m_frontCamera;
}

QSize QNativeCamera::resolution(){
    return m_resolution;
}

void QNativeCamera::setResolution(QSize& value){
    m_resolution = value;
    qDebug() << "Setting resolution to "<<QString::number(value.width())<<"x"<<QString::number(value.height());
}

/*!
  Paints the camera view finder frame.
*/
void QNativeCamera::paint(QPainter *painter,
                         const QStyleOptionGraphicsItem *option,
                         QWidget *widget)
{

    Q_UNUSED(option);
    Q_UNUSED(widget);

    if (!m_imageFrame.isNull()) {

        if (m_processedFrameCounter != m_incomingFrameCounter) {
            /*if (m_imageAnalyzer) {
                m_imageAnalyzer->analyze((unsigned int*)m_imageFrame.bits(),
                                         m_imageFrame.width(),
                                         m_imageFrame.height(),
                                         m_imageFrame.bytesPerLine() / 4,
                                         false);
            }*/

            m_processedFrameCounter = m_incomingFrameCounter;
        }


        QPointF upperLeft = boundingRect().center() -
                QPointF(m_imageFrame.width() / 2,
                        m_imageFrame.height() / 2);

        // Draw the black borders.
        painter->fillRect(0, 0, upperLeft.x(), boundingRect().height(),
                          Qt::black);
        painter->fillRect(upperLeft.x() + m_imageFrame.width(), 0,
                          boundingRect().right(), boundingRect().bottom(),
                          Qt::black);

        painter->drawImage(QRect(upperLeft.x(), upperLeft.y(),
                                 m_imageFrame.width(),
                                 m_imageFrame.height()), m_imageFrame);
    }
    else {
        painter->fillRect(boundingRect(), Qt::black);
        qDebug() << "frame null:" << QString::number(m_incomingFrameCounter);
    }

}

/*!
  Stores the frame as member to allow it to be processed on paint.
  Returns false when there is error, otherwise returns true.
*/
bool QNativeCamera::updateFrame(const QVideoFrame &frame)
{
    if( m_preview ) {
        if (m_processedFrameCounter != m_incomingFrameCounter) {
            // Discard frame.
            return true;
        }
        m_incomingFrameCounter++;
        m_imageFrame = i_preview.scaled(frame.width(), frame.height()).convertToFormat(QImage::Format_RGB32);
        //m_imageFrame = i_preview.convertToFormat(QImage::Format_RGB32);


        update();
        //qDebug() << "preview:" << QString::number(m_incomingFrameCounter);
    } else {
        if (!frame.isValid()) {
            qDebug() << "CustomCameras::updateFrame: Invalid frame";
            return false;
        }

        if (m_processedFrameCounter != m_incomingFrameCounter) {
            // Discard frame.
            return true;
        }

        m_incomingFrameCounter++;

        QVideoFrame f = frame;

        if (f.map(QAbstractVideoBuffer::ReadOnly)) {
            if (m_imageFrame.isNull() || m_imageFrame.width() != f.width() ||
                    m_imageFrame.height() != f.height()) {
                m_imageFrame = QImage(f.width(), f.height(), QImage::Format_RGB32);
            }

            memcpy(m_imageFrame.bits(), f.bits(), f.mappedBytes());

            f.unmap();

            update();
        }
    }

    return true;
}

/*!
  Capture image with the currently set attributes.
*/
void QNativeCamera::captureImage()
{
    qDebug() << "QNativeCamera capture";
    if (!m_cameraImageCapture) {
        return;
    }

    QString fileName =
            QDateTime::currentDateTime().toString("yyyy-MM-dd-hh-mm-ss.jpg");

    m_cameraImageCapture->capture(fileName);    
}

/*!
  Handles the image saved signal. Emits imageSaved signal delayed, this must be
  done in order to be possible to stop the camera from the QML. If
  QCameraImageCapture::imageSaved signal was directly connected to the QML and
  the camera would be stopped, the applcication would crash as the stop will
  destroye the QCameraImageCapture instance.
*/
void QNativeCamera::handleImageSaved(int id, const QString &fileName)
{
    Q_UNUSED(id);

    m_galleryImage = fileName;
    i_preview.load(fileName);
    QTimer::singleShot(0, this, SLOT(emitImageSavedSignal()));
}


/*!
  Emits imageSaved signal.
*/
void QNativeCamera::emitImageSavedSignal()
{
    emit imageSaved(m_galleryImage);
}


/*!
  Gets info about the state change. When the camera is loaded we must emit
  the camera API signal in order to QML side to know about the changed camera.
*/
void QNativeCamera::cameraStateChanged(QCamera::State state)
{
    if (state == QCamera::ActiveState) {
        emit digitalZoomChanged();
        emit maximumDigitalZoomChanged();

        emit exposureCompensationChanged();
        emit exposureModeChanged();
        emit supportedExposureModesChanged();

        emit flashModeChanged();
        emit supportedFlashModesChanged();

        emit isoValueChanged();
        emit supportedIsoValuesChanged();

        emit whiteBalanceModeChanged();
        emit supportedWhiteBalanceModesChanged();
       // emit sharpeningLevelChanged();
       // emit contrastChanged();
    }
}

qreal QNativeCamera::exposureCompensation(){
    return this->m_exposureCompensation;
}

void  QNativeCamera::setExposureCompensation(qreal value){
    this->m_exposureCompensation=value;
    this->m_camera->exposure()->setExposureCompensation(this->m_exposureCompensation);
    emit exposureCompensationChanged();
    qDebug() << "QNativeCamera: exposure compensation passed:" << QString::number(value);
    qDebug() << "QNativeCamera: exposure compensation:" << QString::number(m_camera->exposure()->aperture());
}

bool QNativeCamera::preview(){
    return m_preview;
}

void QNativeCamera::setPreview(bool value){
    m_preview=value;
}
