#include "camera.h"


int CameraFramedSource::nalIndex = 0;

/**************************************************************/
/* video output */

static AVFrame *alloc_picture(enum AVPixelFormat pix_fmt, int width, int height)
{
    AVFrame *picture;
    int ret;

    picture = av_frame_alloc();
    if (!picture)
        return NULL;

    picture->format = pix_fmt;
    picture->width  = width;
    picture->height = height;

    /* allocate the buffers for the frame data */
    ret = av_frame_get_buffer(picture, 0);
    if (ret < 0) {
        fprintf(stderr, "Could not allocate frame data.\n");
        exit(1);
    }

    return picture;
}


CameraFramedSource::CameraFramedSource(UsageEnvironment &env) : FramedSource(env)
{
    camera = new Camera(2);  // 2帧buffer，如果应用没有及时读取buffer，只保留最新的2帧
    encoder = new H264Encoder();
    camera->initDev(CAMERA_DEV_NAME,CAMERA_WIDTH,CAMERA_WIDTH);
 
    picture = alloc_picture(AV_PIX_FMT_YUV420P, camera->getWidth(), camera->getHeight());

    // avpicture_alloc(&picture,AV_PIX_FMT_YUV420P, camera->getWidth(),camera->getHeight());
    camera->startStream();
    encoder->x264Init(picture, camera->getWidth(),camera->getHeight());
}

unsigned CameraFramedSource::maxFrameSize() const
{
    return 40 * 1024;
}

void CameraFramedSource::doGetNextFrame()
{
    if(nalIndex == encoder->nnal)
    {
        camera->readFrame(picture, AV_PIX_FMT_YUV420P, camera->getWidth(),
				camera->getHeight());
        encoder->x264Encode();
        nalIndex = 0;
        gettimeofday(&fPresentationTime, NULL);
    }
    memmove(fTo, encoder->nals[nalIndex].p_payload,
			encoder->nals[nalIndex].i_payload);
    fFrameSize = encoder->nals[nalIndex].i_payload;
    nalIndex++;
    afterGetting(this);
}
CameraFramedSource::~CameraFramedSource()
{
    delete camera;
    delete encoder;
    // avpicture_free(&picture);
    av_frame_free(&picture);
}
void CameraFramedSource::getNextFrame1()
{
    
}
