//#include "stdafx.h"
#include "recorder_fun.h"

/* settings */
int qmax = 2;
int sc_width = 500;
int sc_height = 500;
int recording = 0;
clock_t start = 0;

/* constants */
static const int sws_flags = SWS_BICUBIC;

/* video output */
static AVFrame *picture;
static uint8_t *video_outbuf;
static char error_message[200];
static int frame_count, video_outbuf_size;
static struct SwsContext *img_convert_ctx;

int getScreenshotWithCursor(void* frame, int x, int y, int width, int height)
{
    int successful = 0;
	HDC screen, bitmapDC;
	HBITMAP screen_bitmap;

	screen = GetDC(NULL);
	bitmapDC = CreateCompatibleDC(screen);
	screen_bitmap = CreateCompatibleBitmap(screen, width, height);
	SelectObject(bitmapDC, screen_bitmap);
	if (BitBlt(bitmapDC, 0, 0, width, height, screen, x, y, SRCCOPY))
	{
		int pos_x, pos_y;
		HICON hcur;
		ICONINFO icon_info;
		CURSORINFO cursor_info;
		cursor_info.cbSize = sizeof(CURSORINFO);
		if (GetCursorInfo(&cursor_info))
		{
			if (cursor_info.flags == CURSOR_SHOWING)
			{
				hcur = CopyIcon(cursor_info.hCursor);
				if (GetIconInfo(hcur, &icon_info))
				{
					pos_x = cursor_info.ptScreenPos.x - x - icon_info.xHotspot;
					pos_y = cursor_info.ptScreenPos.y - y - icon_info.yHotspot;
					DrawIcon(bitmapDC, pos_x, pos_y, hcur);
					if (icon_info.hbmColor) DeleteObject(icon_info.hbmColor);
					if (icon_info.hbmMask) DeleteObject(icon_info.hbmMask);
				}
			}
		}
		int header_size = sizeof(BITMAPINFOHEADER) + 256*sizeof(RGBQUAD);
		size_t line_size = lineSizeOfFrame(width);
		PBITMAPINFO lpbi = (PBITMAPINFO) malloc(header_size);
		lpbi->bmiHeader.biSize = header_size;
		lpbi->bmiHeader.biWidth = width;
		lpbi->bmiHeader.biHeight = height;
		lpbi->bmiHeader.biPlanes = 1;
		lpbi->bmiHeader.biBitCount = 24;
		lpbi->bmiHeader.biCompression = BI_RGB;
		lpbi->bmiHeader.biSizeImage = height*line_size;
		lpbi->bmiHeader.biXPelsPerMeter = 0;
		lpbi->bmiHeader.biYPelsPerMeter = 0;
		lpbi->bmiHeader.biClrUsed = 0;
		lpbi->bmiHeader.biClrImportant = 0;
		if (GetDIBits(bitmapDC, screen_bitmap, 0, height, (LPVOID)frame, lpbi, DIB_RGB_COLORS))
		{
		    int i;
		    void * buf_begin = frame;
		    void * buf_end = frame + line_size*(lpbi->bmiHeader.biHeight - 1);
		    void * temp = malloc(line_size);
		    for (i = 0; i < lpbi->bmiHeader.biHeight / 2; ++i)
		    {
		        memcpy(temp, buf_begin, line_size);
		        memcpy(buf_begin, buf_end, line_size);
		        memcpy(buf_end, temp, line_size);
		        buf_begin += line_size;
		        buf_end -= line_size;
		    }
		    free(temp);
		    successful = 1;
		}
		free(lpbi);
	}
	DeleteObject(screen_bitmap);
	DeleteDC(bitmapDC);
	ReleaseDC(NULL, screen);
	return successful;
}

int lineSizeOfFrame(int width)
{
    return  (width*24 + 31)/32 * 4;//((width*24 / 8) + 3) & ~3;//(width*24 + 31)/32 * 4;
}

void ClearError()
{
    memset(error_message, 0, 200);
}

void SetError( const char* error)
{
    strcpy(error_message, error);
    printf(error_message);
    recording = 0;
}

char * GetError()
{
    return error_message;
}

/* Initializes mpeg */
void register_mpeg()
{
    static int initialized;

    if (initialized)
        return;
    initialized = 1;

    /* video codecs */
    REGISTER_ENCODER  (MPEG4, mpeg4);

    /* (de)muxers */
    REGISTER_MUXDEMUX (AVI, avi);

    /* protocols */
    REGISTER_PROTOCOL (FILE, file);
}

/* add a video output stream */
AVStream *add_video_stream(AVFormatContext *oc, enum CodecID codec_id)
{
    AVCodecContext *c;
    AVStream *st;

    st = av_new_stream(oc, 0);
    if (!st) {
        SetError("Could not alloc stream.");
        return NULL;
    }

    c = st->codec;
    c->codec_id = codec_id;
    c->codec_type = CODEC_TYPE_VIDEO;

    /* resolution must be a multiple of two */
    c->width = sc_width;
    c->height = sc_height;
    /* time base: this is the fundamental unit of time (in seconds) in terms
       of which frame timestamps are represented. for fixed-fps content,
       timebase should be 1/framerate and timestamp increments should be
       identically 1. */
    c->time_base.den = STREAM_FRAME_RATE;
    c->time_base.num = 1;
    c->gop_size = 12; /* emit one intra frame every twelve frames at most */
    c->pix_fmt = STREAM_PIX_FMT;
    c->qmax = qmax;
    if (c->codec_id == CODEC_ID_MPEG2VIDEO) {
        /* just for testing, we also add B frames */
        c->max_b_frames = 2;
    }
    if (c->codec_id == CODEC_ID_MPEG1VIDEO){
        /* Needed to avoid using macroblocks in which some coeffs overflow.
           This does not happen with normal video, it just happens here as
           the motion of the chroma plane does not match the luma plane. */
        c->mb_decision=2;
    }
    // some formats want stream headers to be separate
    if(oc->oformat->flags & AVFMT_GLOBALHEADER)
        c->flags |= CODEC_FLAG_GLOBAL_HEADER;

    return st;
}

AVFrame *alloc_picture(enum PixelFormat pix_fmt, int width, int height)
{
    AVFrame *picture;
    uint8_t *picture_buf;
    int size;

    picture = avcodec_alloc_frame();
    if (!picture)
        return NULL;
    size = avpicture_get_size(pix_fmt, width, height);
    picture_buf = (uint8_t*) av_malloc(size);
    if (!picture_buf) {
        av_free(picture);
        return NULL;
    }
    avpicture_fill((AVPicture *)picture, picture_buf,
                   pix_fmt, width, height);
    return picture;
}

void open_video(AVFormatContext *oc, AVStream *st)
{
    AVCodec *codec;
    AVCodecContext *c;

    c = st->codec;

    // find the video encoder
    codec = avcodec_find_encoder(c->codec_id);
    if (!codec) {
        SetError("codec not found");
        return;
    }

    // open the codec
    if (avcodec_open(c, codec) < 0) {
        SetError("could not open codec");
        return;
    }

    video_outbuf = NULL;
    if (!(oc->oformat->flags & AVFMT_RAWPICTURE)) {
        // allocate output buffer //
        // XXX: API change will be done //
        // buffers passed into lav* can be allocated any way you prefer,
        // as long as they're aligned enough for the architecture, and
        // they're freed appropriately (such as using av_free for buffers
        // allocated with av_malloc)
        video_outbuf_size = avpicture_get_size(c->pix_fmt, sc_width, sc_height);
        if (video_outbuf_size < FF_MIN_BUFFER_SIZE)
            video_outbuf_size = FF_MIN_BUFFER_SIZE;
        video_outbuf = (uint8_t*) av_malloc(video_outbuf_size);
    }

    // allocate the encoded raw picture
    picture = alloc_picture(c->pix_fmt, c->width, c->height);
    if (!picture) {
        SetError("Could not allocate picture");
        return;
    }
}

void write_video_frame(AVFormatContext *oc, AVStream *st, uint8_t *frame)
{
    int out_size, ret;
    AVCodecContext *c;

    c = st->codec;

    if (c->pix_fmt != PIX_FMT_BGR24) {
        /* as we only generate a YUV420P picture, we must convert it
           to the codec pixel format if needed */
        if (img_convert_ctx == NULL) {
            img_convert_ctx = sws_getContext(c->width, c->height,
                                             PIX_FMT_BGR24,
                                             c->width, c->height,
                                             c->pix_fmt,
                                             sws_flags, NULL, NULL, NULL);
            if (img_convert_ctx == NULL) {
                SetError("Cannot initialize the conversion context");
                return;
            }
        }
        uint8_t* temp[] = {frame, NULL, NULL};
        int lin[] = {lineSizeOfFrame(sc_width), 0, 0};
        sws_scale(img_convert_ctx, temp, lin,
                  0, c->height, picture->data, picture->linesize);
    }
    else
    {
        picture->data[0] = frame;
        picture->linesize[0] = lineSizeOfFrame(sc_width);
    }

    if (oc->oformat->flags & AVFMT_RAWPICTURE) {
        /* raw video case. The API will change slightly in the near
           futur for that */
        AVPacket pkt;
        av_init_packet(&pkt);

        pkt.flags |= PKT_FLAG_KEY;
        pkt.stream_index= st->index;
        pkt.data= (uint8_t *)picture;
        pkt.size= sizeof(AVPicture);

        ret = av_interleaved_write_frame(oc, &pkt);
    } else {
        /* encode the image */
        out_size = avcodec_encode_video(c, video_outbuf, video_outbuf_size, picture);
        /* if zero size, it means the image was buffered */
        if (out_size > 0) {
            AVPacket pkt;
            av_init_packet(&pkt);

            if (c->coded_frame->pts != 0)//AV_NOPTS_VALUE)
                pkt.pts = ((clock()- start)*c->time_base.den) / (CLOCKS_PER_SEC*c->time_base.num);

            if(c->coded_frame->key_frame)
                pkt.flags |= PKT_FLAG_KEY;
            pkt.stream_index= st->index;
            pkt.data= video_outbuf;
            pkt.size= out_size;

            /* write the compressed frame in the media file */
            ret = av_interleaved_write_frame(oc, &pkt);
        } else {
            ret = 0;
        }
    }
    if (ret != 0) {
        if (ret == AVERROR(EIO))
            SetError("I/O error occurred\n"
                "Usually that means that input file is truncated and/or corrupted.");
        else
            SetError("Error while writing video frame");
        return;
    }
    frame_count++;
}

void close_video(AVFormatContext *oc, AVStream *st)
{
    if (img_convert_ctx != NULL)
    {
        sws_freeContext(img_convert_ctx);
        img_convert_ctx = NULL;
    }
    avcodec_close(st->codec);
    av_free(picture->data[0]);
    av_free(picture);
    av_free(video_outbuf);
}

/*void SaveBitmapToFile( BYTE* pBitmapBits, LONG lWidth, LONG lHeight, WORD wBitsPerPixel, LPCTSTR lpszFileName )
{
    BITMAPINFOHEADER bmpInfoHeader = {0};
    // Set the size
    bmpInfoHeader.biSize = sizeof(BITMAPINFOHEADER);
    // Bit count
    bmpInfoHeader.biBitCount = wBitsPerPixel;
    // Use all colors
    bmpInfoHeader.biClrImportant = 0;
    // Use as many colors according to bits per pixel
    bmpInfoHeader.biClrUsed = 0;
    // Store as un Compressed
    bmpInfoHeader.biCompression = BI_RGB;
    // Set the height in pixels
    bmpInfoHeader.biHeight = lHeight;
    // Width of the Image in pixels
    bmpInfoHeader.biWidth = lWidth;
    // Default number of planes
    bmpInfoHeader.biPlanes = 1;
    // Calculate the image size in bytes
    bmpInfoHeader.biSizeImage = lWidth* lHeight * (wBitsPerPixel/8);

    BITMAPFILEHEADER bfh = {0};
    // This value should be values of BM letters i.e 0×4D42
    // 0×4D = M 0×42 = B storing in reverse order to match with endian
    bfh.bfType=0x4D42;
    // Offset to the RGBQUAD
    bfh.bfOffBits = sizeof(BITMAPINFOHEADER) + sizeof(BITMAPFILEHEADER);
    // Total size of image including size of headers
    bfh.bfSize = bfh.bfOffBits + bmpInfoHeader.biSizeImage;
    // Create the file in disk to write
    HANDLE hFile = CreateFile( lpszFileName,GENERIC_WRITE, 0,NULL,

                               CREATE_ALWAYS, FILE_ATTRIBUTE_NORMAL,NULL);

    if( !hFile ) // return if error opening file
    {
        return;
    }

    DWORD dwWritten = 0;
    // Write the File header
    WriteFile( hFile, &bfh, sizeof(bfh), &dwWritten , NULL );
    // Write the bitmap info header
    WriteFile( hFile, &bmpInfoHeader, sizeof(bmpInfoHeader), &dwWritten, NULL );
    // Write the RGB Data
    WriteFile( hFile, pBitmapBits, bmpInfoHeader.biSizeImage, &dwWritten, NULL );
    // Close the file handle
    CloseHandle( hFile );
}//*/
