#include "StreamedWaveSound.h"
#include "SoundManager.h"
#include "Sound.h"

#include <string>
#include <vector>
#include <iostream>

namespace UniverseSound
{
	StreamedWaveSound::StreamedWaveSound(const Ogre::String &name) : SoundClip(name), mFormatData(0), mStreamEOF(false)
	{
		for (int i = 0; i < NUM_BUFFERS; i++) 
			mBuffers[i] = 0;
		mStream = false;
	}

	StreamedWaveSound::~StreamedWaveSound()
	{
		_release();
		for (int i = 0; i < NUM_BUFFERS; i++) 
			mBuffers[i] = 0;
		if (mFormatData) 
			delete mFormatData;
	}

	void StreamedWaveSound::open(Ogre::DataStreamPtr &fileStream)
	{
		// Variables decribing wave file format
		char*			sound_buffer=0;
		char			id[5]={0};
		unsigned short	format_tag;
		unsigned long	size;
		int				bytesRead=0;

		// Store stream pointer
		mAudioStream = fileStream;

		// Read in "RIFF" chunk descriptor (4 bytes)
		mAudioStream->read(id, 4);

		// Valid RIFF?
		if (!strcmp(id, "RIFF"))
		{
			// Read in chunk size (4 bytes)
			mAudioStream->read(&size, 4);

			// Read in "WAVE" format descriptor (4 bytes)
			mAudioStream->read(id, 4);

			// Valid wav?
			if (!strcmp(id,"WAVE"))
			{
				// Create format struct
				if (!mFormatData) mFormatData = new WavFormatData;

				// Read in "fmt" id ( 4 bytes )
				mAudioStream->read(id, 4);

				// Read in "fmt" chunk size ( 4 bytes )
				mAudioStream->read(&mFormatData->mFormatChunkSize, 4);

				// Should be 16 unless compressed ( compressed NOT supported )
				if ( mFormatData->mFormatChunkSize>=16 )
				{
					// Calculate extra info bytes
					unsigned short extraBytes = mFormatData->mFormatChunkSize-16;

					// Read in audio format  ( 2 bytes )
					mAudioStream->read(&format_tag, 2);

					// PCM == 1
					if (format_tag==0x0001 || format_tag==0xFFFE)
					{
						// Read in num channels ( 2 bytes )
						mAudioStream->read(&mFormatData->mNumChannels, 2);

						// Read in sample rate ( 4 bytes )
						mAudioStream->read(&mFormatData->mSampleRate, 4);

						// Read in byte rate ( 4 bytes )
						mAudioStream->read(&mFormatData->mAvgBytesPerSecond, 4);

						// Read in byte align ( 2 bytes )
						mAudioStream->read(&mFormatData->mBlockAlign, 2);

						// Read in bits per sample ( 2 bytes )
						mAudioStream->read(&mFormatData->mBitsPerSample, 2);

						// If WAVEFORMATEXTENSIBLE...
						if (format_tag==0xFFFE)
						{
							unsigned short sigBitsPerSample;
							unsigned short extraInfoSize;

							// Read in significant bits per sample ( 2 bytes )
							mAudioStream->read(&sigBitsPerSample, 2);

							// Read in extra info size ( 2 bytes )
							mAudioStream->read(&extraInfoSize, 2);

							// Read in samples ( 2 bytes )
							mAudioStream->read(&mFormatData->mSamples, 2);

							// Read in channel mask ( 2 bytes )
							mAudioStream->read(&mFormatData->mChannelMask, 2);

							// Read in sub format ( 16 bytes )
							mAudioStream->read(&mFormatData->mSubFormat, sizeof(char[16]));
						}
						// Skip extra info
						else if (extraBytes)
						{
							// Create byte array to hold extra bytes
							char* info = new char[extraBytes];

							// Read in extra bytes chunk
							mAudioStream->read(info, extraBytes);

							// Delete array
							delete [] info;
						}

						// Read in chunk id ( 4 bytes )
						mAudioStream->read(id, 4);

						if ( !strcmp(id, "data") )
						{
							// Read in size of audio data ( 4 bytes )
							mAudioStream->read(&mFormatData->mDataSize, 4);

							// Store byte offset of start of audio data
							mFormatData->mAudioOffset = static_cast<unsigned long>(mAudioStream->tell());
						}
						else
						{
							// Find "data" chunk
							try
							{
								do
								{
									unsigned long chunkSize;

									// Read in size of chunk data ( 4 bytes )
									mAudioStream->read(&chunkSize, 4);

									// Skip chunk
									mAudioStream->skip(chunkSize);

									// Read next chunk id
									mAudioStream->read(id, 4);
								}
								while ( strcmp(id, "data") || mAudioStream->eof() );

								// Validity check
								if (!mAudioStream->eof())
								{
									// Read in size of audio data ( 4 bytes )
									mAudioStream->read(&mFormatData->mDataSize, 4);

									// Store byte offset of start of audio data
									mFormatData->mAudioOffset = static_cast<unsigned long>(mAudioStream->tell());
								}
								else
								{
									Ogre::LogManager::getSingleton().logMessage("UniverseSound::StreamedWaveSound::open() - No WAVE data available in specified file");
								}
							}
							catch(...)
							{
								Ogre::LogManager::getSingleton().logMessage("UniverseSound::StreamedWaveSound::open() - The WAVE file is incomplete");
							}
						}
					}
					else
					{
						Ogre::LogManager::getSingleton().logMessage("UniverseSound::StreamedWaveSound::open() - No 'data' chunk in specified file - the file is invalid");
					}
				}
				else
				{
					Ogre::LogManager::getSingleton().logMessage("UniverseSound::StreamedWaveSound::open() - Compressed WAVE files are not supported by UniverseSound");
				}
			}
			else
			{
				Ogre::LogManager::getSingleton().logMessage("UniverseSound::StreamedWaveSound::open() - The file specified is not a valid WAVE file");
			}
		}
		else
		{
			Ogre::LogManager::getSingleton().logMessage("UniverseSound::StreamedWaveSound::open() - The file specified is not a valid RIFF file");
		}

		// Generate audio buffers
		alGenBuffers(NUM_BUFFERS, mBuffers);

		// Calculate length in seconds
		mPlayTime = (mFormatData->mDataSize / ((mFormatData->mBitsPerSample/8) * mFormatData->mSampleRate)) / mFormatData->mNumChannels;

		// Use XRAM if available
		if (SoundManager::getSingleton().isXRAMSupported())
			SoundManager::getSingleton().setXRAMBuffer(NUM_BUFFERS, mBuffers);
		
		// Check format support
		if (!_queryBufferInfo())
		{
			Ogre::LogManager::getSingleton().logMessage("UniverseSound::StreamedWaveSound::open() - The format is not supported");
		}

		// Set file ready flag
		mFileOpened = true;
	}

	bool StreamedWaveSound::_queryBufferInfo()
	{
		if (!mFormatData)
		{
			Ogre::LogManager::getSingleton().logMessage("UniverseSound::StaticSound::_queryBufferInfo() - No vorbis information available!", Ogre::LML_NORMAL);
			return false;
		}

		switch (mFormatData->mNumChannels)
		{
		case 1:
			{
				if ( mFormatData->mBitsPerSample==8 )
				{
					// 8-bit mono
					mFormat = AL_FORMAT_MONO8;

					// IMPORTANT : The Buffer Size must be an exact multiple of the BlockAlignment ...
					mBufferSize = mFormatData->mSampleRate/4;
				}
				else
				{
					// 16-bit mono
					mFormat = AL_FORMAT_MONO16;

					// Queue 250ms of audio data
					mBufferSize = mFormatData->mAvgBytesPerSecond >> 2;

					// IMPORTANT : The Buffer Size must be an exact multiple of the BlockAlignment ...
					mBufferSize -= (mBufferSize % mFormatData->mBlockAlign);
				}
			}
			break;
		case 2:
			{
				if ( mFormatData->mBitsPerSample==8 )
				{
					// 8-bit stereo
					mFormat = AL_FORMAT_STEREO8;

					// Set BufferSize to 250ms (Frequency * 2 (8bit stereo) divided by 4 (quarter of a second))
					mBufferSize = mFormatData->mSampleRate >> 1;

					// IMPORTANT : The Buffer Size must be an exact multiple of the BlockAlignment ...
					mBufferSize -= (mBufferSize % 2);
				}
				else
				{
					// 16-bit stereo
					mFormat = AL_FORMAT_STEREO16;

					// Queue 250ms of audio data
					mBufferSize = mFormatData->mAvgBytesPerSecond >> 2;

					// IMPORTANT : The Buffer Size must be an exact multiple of the BlockAlignment ...
					mBufferSize -= (mBufferSize % mFormatData->mBlockAlign);
				}
			}
			break;
		case 4:
			{
				mFormat = alGetEnumValue("AL_FORMAT_QUAD16");
				if (!mFormat)
					return false;
				mBufferSize = mFormatData->mAvgBytesPerSecond >> 2;
				mBufferSize -= (mBufferSize % mFormatData->mBlockAlign);
			}
			break;
		case 6:
			{
				mFormat = alGetEnumValue("AL_FORMAT_51CHN16");
				if (!mFormat)
					return false;
				mBufferSize = mFormatData->mAvgBytesPerSecond >> 2;
				mBufferSize -= (mBufferSize % mFormatData->mBlockAlign);
			}
			break;
		case 7:
			{
				mFormat = alGetEnumValue("AL_FORMAT_61CHN16");
				if (!mFormat)
					return false;
				mBufferSize = mFormatData->mAvgBytesPerSecond >> 2;
				mBufferSize = (mBufferSize % mFormatData->mBlockAlign);
			}
			break;
		case 8:
			{
				mFormat = alGetEnumValue("AL_FORMAT_71CHN16");
				if (!mFormat)
					return false;
				mBufferSize = mFormatData->mAvgBytesPerSecond >> 2;
				mBufferSize = (mBufferSize % mFormatData->mBlockAlign);
			}
			break;
		default:
			{
				Ogre::LogManager::getSingleton().logMessage("UniverseSound - Unable to determine buffer format", Ogre::LML_NORMAL);
				Ogre::LogManager::getSingleton().logMessage("UniverseSound - Trying to set buffer format to default stereo ...", Ogre::LML_NORMAL);
				mFormat = AL_FORMAT_STEREO16;
				mBufferSize = mFormatData->mAvgBytesPerSecond >> 2;
				mBufferSize -= (mBufferSize % mFormatData->mBlockAlign);
			}
			break;
		}
		return true;
	}

	void StreamedWaveSound::_release()
	{
		if (mSource != AL_NONE)
		{
			ALuint src = AL_NONE;
			setSource(src);
		}
		alDeleteBuffers(NUM_BUFFERS, mBuffers);
		mPlayPosChanged = false;
		mPlayPos = 0.f;
	}

	void StreamedWaveSound::_prebuffer()
	{
		if (mSource == AL_NONE) 
			return;

		int i = 0;
		while (i < NUM_BUFFERS)
		{
			if (_stream(mBuffers[i]))
				alSourceQueueBuffers(mSource, 1, &mBuffers[i++]);
			else
				break;
		}
	}

	void StreamedWaveSound::setSource(ALuint &source)
	{
		if (source != AL_NONE)
		{
			// Set source
			mSource = source;

			// Fill data buffers
			_prebuffer();

			// Init source
			_initSource();
		}
		else
		{
			// Unqueue buffers
			_dequeue();

			// Set source
			mSource = source;
		}
	}

	void StreamedWaveSound::_updateAudioBuffers()
	{
		// Automatically play if previously delayed
		if (mPlayDelayed)
			play();

		if(mSource == AL_NONE || !mPlay) 
			return;

		ALenum state;
		alGetSourcei(mSource, AL_SOURCE_STATE, &state);

		if (state == AL_STOPPED)
		{
			if(mStreamEOF)
			{
				stop();
				// Finished callback
				if (mFinishedCB && mFinCBEnabled)
					mFinishedCB->execute(static_cast<SoundClip*>(this));
				return;
			}
			else
			{
				alSourcePlay(mSource);
			}
		}

		int processed;

		alGetSourcei(mSource, AL_BUFFERS_PROCESSED, &processed);

		while(processed--)
		{
			ALuint buffer;

			alSourceUnqueueBuffers(mSource, 1, &buffer);
			if (_stream(buffer)) 
				alSourceQueueBuffers(mSource, 1, &buffer);
		}

		// Handle play position change
		if ( mPlayPosChanged )
		{
			_updatePlayPosition();
			mPlayPosChanged = false;
		}
	}

	bool StreamedWaveSound::_stream(ALuint buffer)
	{
		std::vector<char> audioData;
		char* data;
		int  bytes = 0;
		int  result = 0;

		// Create buffer
		data = new char[mBufferSize];

		// Read only what was asked for
		while(static_cast<int>(audioData.size()) < mBufferSize)
		{
			// Read up to a buffer's worth of data
			bytes = static_cast<int>(mAudioStream->read(data, mBufferSize));
			// EOF check
			if (mAudioStream->eof())
			{
				// If set to loop wrap to start of stream
				if (mLoop)
				{
					mAudioStream->seek(mFormatData->mAudioOffset);
					if (mLoopCB && mLoopCBEnabled)
						mLoopCB->execute(static_cast<SoundClip*>(this));
				}
				else
				{
					mStreamEOF=true;
					// EOF - finish.
					if (bytes == 0) 
						break;
				}
			}
			// Append to end of buffer
			audioData.insert(audioData.end(), data, data + bytes);
			// Keep track of read data
			result+=bytes;
		}

		// EOF
		if(result == 0)
		{
			delete [] data;
			return false;
		}

		alGetError();
		// Copy buffer data
		alBufferData(buffer, mFormat, &audioData[0], static_cast<ALsizei>(audioData.size()), mFormatData->mSampleRate);

		// Cleanup
		delete [] data;

		return true;
	}

	void StreamedWaveSound::_dequeue()
	{
		if(mSource == AL_NONE)
			return;

		int queued = 0;

		alGetError();

		// Stop source to allow unqueuing
		alSourceStop(mSource);

		// Get number of buffers queued on source
		alGetSourcei(mSource, AL_BUFFERS_PROCESSED, &queued);

		// Remove number of buffers from source
		while (queued--)
		{
			ALuint buffer;
			alSourceUnqueueBuffers(mSource, 1, &buffer);

			// Any problems?
			if (alGetError()) 
				Ogre::LogManager::getSingleton().logMessage("UniverseSound::StreamedSound::_dequeue() - Unable to dequeue buffers");
		}
	}

	void StreamedWaveSound::pause()
	{
		boost::recursive_mutex::scoped_lock l(mMutex);

		if (!mFileOpened)
		{
			if (!mPauseDelayed )
			{
				mPauseDelayed = true;
				SoundManager::getSingletonPtr()->queueThreadedSound(this, ONLOAD_PAUSE);
			}
			return;
		}

		if(mSource == AL_NONE) 
			return;

		alSourcePause(mSource);

		mPauseDelayed = false;
	}

	void StreamedWaveSound::play()
	{
		boost::recursive_mutex::scoped_lock l(mMutex);

		if (!mFileOpened)
		{
			if ( !mPlayDelayed )
			{
				mPlayDelayed = true;
				SoundManager::getSingletonPtr()->queueThreadedSound(this, ONLOAD_PLAY);
			}
			return;
		}

		if(isPlaying())	
			return;

		// Grab a source if not already attached
		if (mSource == AL_NONE)
			if (!SoundManager::getSingleton().requestSoundSource(this))
				return;

		// Play source
		alSourcePlay(mSource);
		if (alGetError())
		{
			Ogre::LogManager::getSingleton().logMessage("UniverseSound::StreamedWaveSound::play() - Unable to play sound");
			return;
		}

		// Set play flag
		mPlay = true;
		mPlayDelayed = false;
	}

	void StreamedWaveSound::setPlayPosition(Ogre::Real seconds)
	{
		if (seconds < 0) 
			return;

		// Wrap
		if (seconds > mPlayTime) 
			do {seconds -= mPlayTime;} while (seconds > mPlayTime);

		// Store play position
		mPlayPos = seconds;

		// Set flag
		mPlayPosChanged = true;
	}

	void StreamedWaveSound::_updatePlayPosition()
	{
		if (mSource == AL_NONE) 
			return;

		// Get state
		bool playing = isPlaying();
		bool paused = isPaused();

		// Stop playback
		pause();

		// mBufferSize is 1/4 of a second
		size_t dataOffset = static_cast<size_t>(mPlayPos * mBufferSize * 4);
		mAudioStream->seek(mFormatData->mAudioOffset + dataOffset);

		// Unqueue audio
		_dequeue();

		// Fill buffers
		_prebuffer();

		// Set state
		if (playing) 
			play();
		else if	(paused) 
			pause();
	}

	void StreamedWaveSound::stop()
	{
		boost::recursive_mutex::scoped_lock l(mMutex);

		if (!mFileOpened)
		{
			if (!mStopDelayed)
			{
				mStopDelayed = true;
				SoundManager::getSingletonPtr()->queueThreadedSound(this, ONLOAD_STOP);
			}
			return;
		}

		if (mSource != AL_NONE)
		{
			// Remove audio data from source
			_dequeue();

			// Stop playback
			mPlay = false;
			mStopDelayed = false;

			// Reset stream pointer
			mAudioStream->seek(mFormatData->mAudioOffset);

			// Reload audio data
			_prebuffer();

			// Give up source immediately if specfied
			if (mGiveUpSource) 
				SoundManager::getSingleton().releaseSoundSource(this);
		}
	}
}