#pragma once
#include "VideoRecoder-i.h"


#define STREAM_FRAME_RATE 25/* 25 images/s */
#define STREAM_PIX_FMT    AV_PIX_FMT_YUV420P /* default pix_fmt */
#define SCALE_FLAGS		SWS_BICUBIC

//'1' Use Dshow 
//'0' Use GDIgrab
#define USE_DSHOW 0

class CVideoRecorder : public IVideoRecoder
{
public:
	CVideoRecorder();
	~CVideoRecorder();

	void startMicrophone();
	void SetAudioInput(int jparam);
	void SetAudioVolume(int jparam);
	void SetAudioFrequency(int jparam);
	void SetAudioBites(int jparam);
	void SetAudioType(int jparam);
	void SetAudioFormat(int jparam);
	void SetAudioIS(int jparam);
	void SetVideoWidth(int jparam);
	void SetVideoHeight(int jparam);
	void SetVideoFrameRates(int jparam);
	void SetVideoCodec(int jparam);

private:
	HANDLE hEvent_BufferReady;
	HANDLE hEvent_FinishedPlaying;

	int				_iBuf;
	int				_iplaying;
	unsigned long	result;

	HWAVEIN hWaveIn;
	WAVEFORMATEX pFormat;

	enum { NUM_BUF = 3 };
	WAVEHDR _header[NUM_BUF];

	int		g_captureStopFlag = 0;  // if >0, stop
	int		g_capturePauseFlag = 0;  // if >0, pause

	int m_audioinput = 0;//MIC
	int m_audiovolume = 10;//10
	int m_audiofrequency = 44100;//44100Hz
	int m_audiobites = 16;//16-bit
	int m_audiotype = 0;//Stereo, Mono
	int m_audioformat = 0;//AVI, MP4
	int m_audioIS = 0;//
	int m_videowith = 1440;//1440
	int m_videoheight = 900;//900
	int m_videoframerates = 25;//25
	int m_videoCodec = 0;//WMV, MP4

	AVFormatContext	*pFormatCtx;
	int				videoindex;
	AVCodecContext	*pCodecCtx;
	AVCodec			*pCodec;
	AVPacket		*packet;
	AVFrame			*pFrame;

	SwsContext		*img_convert_ctx;

	HANDLE hThread;
	HANDLE hThreadPlay;
	// a wrapper around a single output AVStream
	typedef struct OutputStream {
		AVStream *st;
		AVCodecContext *enc;

		/* pts of the next frame that will be generated */
		int64_t next_pts;
		int samples_count;

		AVFrame *frame;
		AVFrame *tmp_frame;

		float t, tincr, tincr2;

		struct SwsContext *sws_ctx;
		struct SwrContext *swr_ctx;
	} OutputStream;


	int write_frame(AVFormatContext *fmt_ctx, const AVRational *time_base, AVStream *st, AVPacket *pkt);

	/* Add an output stream. */
	void add_stream(OutputStream *ost, AVFormatContext *oc,
		AVCodec **codec, enum AVCodecID codec_id);

	/**************************************************************/
	/* audio output */

	AVFrame *alloc_audio_frame(enum AVSampleFormat sample_fmt,
		uint64_t channel_layout,
		int sample_rate, int nb_samples);

	void open_audio(AVFormatContext *oc, AVCodec *codec, OutputStream *ost, AVDictionary *opt_arg);

	/* Prepare a 16 bit dummy audio frame of 'frame_size' samples and
	* 'nb_channels' channels. */
	AVFrame *get_audio_frame(OutputStream *ost);

	/*
	* encode one audio frame and send it to the muxer
	* return 1 when encoding is finished, 0 otherwise
	*/
	int write_audio_frame(AVFormatContext *oc, OutputStream *ost);

	/**************************************************************/
	/* video output */

	AVFrame *alloc_picture(enum AVPixelFormat pix_fmt, int width, int height);

	void open_video(AVFormatContext *oc, AVCodec *codec, OutputStream *ost, AVDictionary *opt_arg);

	/* Prepare a dummy image. */
	void fill_yuv_image(AVFrame *pict, int frame_index, int width, int height);

	AVFrame *get_video_frame(OutputStream *ost);

	/*
	* encode one video frame and send it to the muxer
	* return 1 when encoding is finished, 0 otherwise
	*/
	int write_video_frame(AVFormatContext *oc, OutputStream *ost);

	void close_stream(AVFormatContext *oc, OutputStream *ost);

	DWORD _RecordingWaitingThread();

	static DWORD CALLBACK RecordingWaitingThread(LPVOID pData);

	static DWORD CALLBACK PlayingWaitingThread(LPVOID pData);

	static void CALLBACK myWaveInProc(HWAVEIN hwi, UINT uMsg, DWORD_PTR dwInstance, DWORD dwParam1, DWORD dwParam2);

	int OnStartCapturing();

	int OnStopCapturing();

	int OnPauseResumeCapturing();
};

