#ifndef SCREENCASTING_TEACHING_SYSTEM___CLIENT___MEDIA___VIDEO_PLAYER_H_
#define SCREENCASTING_TEACHING_SYSTEM___CLIENT___MEDIA___VIDEO_PLAYER_H_

//#include "video_state.h"
#include "../../common/comm/thread.h"
#include <SDL.h>
#include <vector>
#include "packet_queue.h"
#include "../../common/comm/lock.h"
#include "../../common/comm/conditionvariable.h"

#define VIDEO_PICTURE_QUEUE_SIZE 1
#define DEFAULT_AV_SYNC_TYPE AV_SYNC_VIDEO_MASTER
#define AV_SYNC_THRESHOLD 0.05
#define AV_NOSYNC_THRESHOLD 10.0
#define AUDIO_DIFF_AVG_NB 20
#define SAMPLE_CORRECTION_PERCENT_MAX 10
#define FF_REFRESH_EVENT (SDL_USEREVENT + 1)
#define FF_ALLOC_EVENT   (SDL_USEREVENT)
#define FF_QUIT_EVENT (SDL_USEREVENT + 2)
#define SDL_AUDIO_BUFFER_SIZE 1024
#define MAX_AUDIOQ_SIZE (5 * 16 * 1024)
#define MAX_VIDEOQ_SIZE (5 * 256 * 1024)

class VideoPlayer : public Thread {
public:
	VideoPlayer(SDL_Surface *surface);
	virtual ~VideoPlayer();

	void enqueueVideoPacket(const std::vector<char> &data, int pts);
	void enqueueAudioPacket(const std::vector<char> &data, int pts);

	virtual void run();
	void stop();

private:
	/**
	 *
	 */
	struct VideoPicture {
		SDL_Overlay *bmp;
		int width, height;
		int allocated;
		double pts;

		VideoPicture()
			: bmp(0), width(0), height(0), allocated(0), pts(0) {}
		~VideoPicture() { if (bmp) SDL_FreeYUVOverlay(bmp); }
	};

	/**
	 *
	 */
	struct VideoState {
		VideoState();
		void reset();
		int quit_;

		// enums
		enum { AV_SYNC_AUDIO_MASTER, AV_SYNC_VIDEO_MASTER, AV_SYNC_EXTERNAL_MASTER };

		// codecs
		AVCodecContext *audioDecoderContext_;
		AVCodec		   *audioDecoder_;
		AVCodecContext *videoDecoderContext_;
		AVCodec		   *videoDecoder_;

		// clocks
		int     avSyncType_;
		double  externalClock_; /// external clock base
		int64_t externalClockTime_;
		double  audioClock_;
		double  getAudioClock();
		double  getVideoClock();
		double  getExternalClock();
		double  getMasterClock();

		// audio
		PacketQueue  audioQueue_;
		AVPacket     audioPacket_;
		uint8_t      *audioPackageData_;
		int          audioPacketSize_;
		uint8_t      audioBuffer_[(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 2];
		unsigned int audioBuferSize_;
		unsigned int audioBufferIndex_;
		int          audioHwBufferSize_;
		double       audioDiffCum_; /// used for AV difference average computation
		double       audioDiffAvgCoef_;
		double       audioDiffThreshold_;
		int          audioDiffAvgCount_;
		int          audioDecodeFrame(uint8_t *audio_buf, int buf_size, double *pts_ptr);
		int          synchronizeAudio(short *samples, int samples_size, double pts);

		// video
		double       frameTimer_;
		double       frameLastPts_;
		double       frameLastDelay_;
		double       videoClock_; ///<pts of last decoded frame / predicted pts of next decoded frame
		double       videoCurrentPts_; ///<current displayed pts (different from video_clock if frame fifos are used)
		int64_t      videoCurrentPtsTime_; ///<time (av_gettime) at which we updated video_current_pts - used to have running video pts
		PacketQueue  videoQueue_;
		struct SwsContext *imageConvertContext_;
		double       synchronizeVideo(AVFrame *src_frame, double pts);

		VideoPicture      PictureQueue_[VIDEO_PICTURE_QUEUE_SIZE];
		int               pictureQueueSize_;
		int               pictureQueueRIndex_;
		int               pictureQueueWIndex_;
		Mutex		      pictureQueueMutex_;
		ConditionVariable pictureQueueCondition_;
		int               queuePicture(AVFrame *pFrame, double pts);
		void              queuePictureStop();
	};

	class DisplayVideo : public Thread {
	public:
		DisplayVideo(VideoPlayer &outer) : outer_(outer) {}
		virtual ~DisplayVideo() {}
		virtual void run();

	private:
		VideoPlayer &outer_;
	};

	DisplayVideo displayThread_;

	class VideoPlayerCallbacks {
	public:
		static Uint32 sdlRefreshTimerCallback(Uint32 interval, void *data);
		static int decodeInterruptCallback();
		static void audioCallback(void *userdata, Uint8 *stream, int len);
	};

	enum { AV_SYNC_AUDIO_MASTER, AV_SYNC_VIDEO_MASTER, AV_SYNC_EXTERNAL_MASTER };

	SDL_Surface *surface_;
	SDL_Event   event_;
	VideoState  videoState;

	void play();
	void initializeFFMPEG();
	void cleanUpFFMPEG();
	void allocPicture();
	void videoDisplay();

	// dispatchers
	void scheduleRefresh(int delay);
	void videoRefreshTimer(void *userdata);

	// callback data
	static VideoState *global_video_state;
	static uint64_t    global_video_pkt_pts;

	/** These are called whenever we allocate a frame
	 *  buffer. We use this to store the global_pts in
	 *  a frame at the time it is allocated.
	 */
	static int our_get_buffer(struct AVCodecContext *c, AVFrame *pic);
	static void our_release_buffer(struct AVCodecContext *c, AVFrame *pic);
};

#endif /* SCREENCASTING_TEACHING_SYSTEM___CLIENT___MEDIA___VIDEO_PLAYER_H_ */
