#pragma once

#include "Audio.h"
//#include "Counter.h"
#include "Mem2D.h"
//#include <vector>


//+==================================================================+
class MediaFile : public BufferedVoice
{
//+------------------------------------------------------------------+
//	DISALLOW_COPY_AND_ASSIGN
	MediaFile		(const MediaFile&);
	void operator=	(const MediaFile&);

//+------------------------------------------------------------------+
	FFmpegMediaFile	file;
//+------------------------------------------------------------------+
// audio frame buffer(s) and pcm data
//	Mem2D<int64_t>		pts;
	int					audio_stream_index,
							video_stream_index;
	const AVStream		*audio_stream,
							*video_stream;
	Mem2D<uint8_t>		audio_buffer;		// PCM samples
	Mem2D<AVFrame>		video_buffer;		// 1 * RGB frame + numBuffers * YUV frames
//	int					video_src_frame,	// the index of the next to be read frame.
//							video_dst_frame;	// the index of the next to be written frame.
	SwsContext			*video_to_rgb;		// context for YUV to RGB conversion
	int					picture_number,
							audio_frame_size;
	bool					eof;					// end of file reached?

	CRITICAL_SECTION	file_access;		// file reading must be synchronized

//+------------------------------------------------------------------+
//	Buffers the audio and video packets and decodes the audio packets.
	virtual bool fillBuffer(unsigned i);

//+------------------------------------------------------------------+
//	unsigned				currBuffer;
//	int64_t				currTimestamp;
//	bool					eof;

public:
//////////////////////////////////////////////////////////////////////
//	If audio == NULL, only video streams will be used.
// If numBuffers is set to 1, audio data is decoded right away.
//	Video decoding needs numBuffers >= 2.
	MediaFile(Audio *audio, DWORD numBuffers = 10);
	MediaFile(Audio *audio, const std::wstring &filename, DWORD numBuffers = 10, float volume = 1.0f, bool blockBuffers = false);
	MediaFile(Audio *audio, const uint8_t *pcmData, size_t totalBytes, WORD channels = 2, WORD bitsPerSample = 16, DWORD sampleRate = 44100);
	~MediaFile();

//+------------------------------------------------------------------+
	void open(const std::wstring &filename);
	void close();

//	const	XAUDIO2_BUFFER *getBuffers() const	{	return &buffers[0];	}
//			XAUDIO2_BUFFER *getBuffers()			{	return &buffers[0];	}

//+------------------------------------------------------------------+
//	Size of one PCM frame in samples.
	unsigned audioFrameSize() const
	{
//		return audio_frame_size;
		if(buffers.size())	return buffers[0].AudioBytes / (channels * bitsPerSample / 8);
		else						return 0;
	}
//+------------------------------------------------------------------+
//	Size of one PCM buffer in samples.
	unsigned audioBufferSize() const
	{
		return AVCODEC_MAX_AUDIO_FRAME_SIZE / (channels * bitsPerSample / 8);
	}

//+------------------------------------------------------------------+
//	Start of the PCM buffers.
	const	int16_t *audioBuffers() const	{	return (const int16_t*) &audio_buffer[0];	}
			int16_t *audioBuffers()			{	return		 (int16_t*) &audio_buffer[0];	}

//+------------------------------------------------------------------+
//	Get index into the PCM buffer of the currently playing sample,
//	unsigned getCurrentSample() const;

//+------------------------------------------------------------------+
//	Seeks file to the time in ms. Only useful for streaming files.
	void seek(int64_t time_in_ms, bool seekSafe = false);

//+------------------------------------------------------------------+
	bool endOfFile() const					{	return eof;	}
/*
//+------------------------------------------------------------------+
	void WINAPI OnBufferStart(void *pBufferContext)
	{
		int currBuffer = (int)pBufferContext;
		endReached = buffers[currBuffer].Flags == XAUDIO2_END_OF_STREAM;
	//----
	//	Some codecs, like ogg don't seem to report every frametime.
//		if(pts[currBuffer])
//			currTimestamp = pts[currBuffer];
	}
*/

//+------------------------------------------------------------------+
	int		getWidth()		const			{	return video_stream ? video_stream->codec->width  : 0;	}
	int		getHeight()		const			{	return video_stream ? video_stream->codec->height : 0;	}
	double	getFrameRate()	const			{	return video_stream ? double(video_stream->r_frame_rate.num) / double(video_stream->r_frame_rate.den) : 0;	}
	double	getDuration()	const			{	return file.getDuration();	}

//+------------------------------------------------------------------+
//	Decodes one frame and stores it in the buffer.
//	Returns false if buffer is already full.
	bool bufferFrame();
	int bufferedFrames() const;

//+------------------------------------------------------------------+
//	Will return the next frame, or NULL if not ready yet, or eof.
	AVFrame *getNextFrame();
};

