/*
 *  soundPlay.c
 *  TwitterPod
 *
 *  Created by acco on 08/02/27.
 *  Copyright 2008 __MyCompanyName__. All rights reserved.
 *
 */

#include "MyMelody.h"
#include <CoreServices/CoreServices.h>
#include <AudioToolbox/AudioQueue.h>

static int maxScoreSize = 50;
static float frameEndPoint = 0.5f;
static const int kNumberBuffers = 3;                              // 1

typedef struct _AQMFPlayerFileState {
    AudioQueueBufferRef           mBuffers;       // 4
    AudioFileID                   mAudioFile;                     // 5
    UInt32                        bufferByteSize;                 // 6
    SInt64                        mCurrentPacket;                 // 7
    UInt32                        mNumPacketsToRead;              // 8
    AudioStreamPacketDescription  *mPacketDescs;
	UInt32 numPackets;
} AQMFPlayerFileState;

typedef struct _AQMFPlayerState {
    AudioStreamBasicDescription   mDataFormat;                    // 2
    AudioQueueRef mQueue;                         // 3
	AudioQueueBufferRef           mQueueBuffers[3];       // 4
	AQMFPlayerFileState fstate[8];
    bool                          mIsRunning;                     // 10
	CFArrayRef score;
	int scoreIndex;
}AQMFPlayerState;

static void HandleOutputBuffer2 (
								void                *aqData,
								AudioQueueRef       inAQ,
								AudioQueueBufferRef inBuffer
) 
{

    AQMFPlayerState *pAqData = (AQMFPlayerState *) aqData;        // 1
    if (pAqData->mIsRunning == false) return;

	int ssize = CFArrayGetCount(pAqData->score);
	ssize = ssize < maxScoreSize ?  ssize : maxScoreSize;
    if (pAqData->scoreIndex > 0 && pAqData->scoreIndex < ssize) {
		CFNumberRef num = (CFNumberRef)CFDictionaryGetValue((CFDictionaryRef)CFArrayGetValueAtIndex( pAqData->score, pAqData->scoreIndex ),
						@"score");
		int idx = 0;
		CFNumberGetValue(num, kCFNumberSInt32Type, &idx);
		NSLog(@"[%d] %d: %d ",pAqData->scoreIndex, idx, pAqData->fstate[idx].numPackets);
		// enqueueing an audio queue buffer 
		int buffersize = pAqData->fstate[idx].mBuffers->mAudioDataByteSize < inBuffer->mAudioDataBytesCapacity ?
		pAqData->fstate[idx].mBuffers->mAudioDataByteSize : inBuffer->mAudioDataBytesCapacity;

		memcpy(inBuffer->mAudioData, pAqData->fstate[idx].mBuffers->mAudioData, buffersize );
		inBuffer->mAudioDataByteSize = pAqData->fstate[idx].mBuffers->mAudioDataByteSize;
		inBuffer->mUserData =  pAqData->fstate[idx].mBuffers->mUserData;

		AudioQueueEnqueueBufferWithParameters(
								 pAqData->mQueue,                           // 2
								 inBuffer,                                  // 3
								 (pAqData->fstate[idx].mPacketDescs ? pAqData->fstate[idx].numPackets : 0),  // 4
								 pAqData->fstate[idx].mPacketDescs, 0,
								 pAqData->fstate[idx].numPackets*frameEndPoint, 0, NULL, NULL, NULL);
		pAqData->scoreIndex += 1;
		AudioQueueFlush(pAqData->mQueue);		
//		pAqData->fstate[idx].mCurrentPacket += pAqData->fstate[idx].numPackets;                // 7 
    } else {
		AudioQueueFlush(pAqData->mQueue);
		AudioQueueStop (
						pAqData->mQueue,
						false
						);
		
		pAqData->mIsRunning = false; 
    }
}

void DeriveBufferSize (
					   AudioStreamBasicDescription &ASBDesc,                            // 1
					   UInt32                      maxPacketSize,                       // 2
					   Float64                     seconds,                             // 3
					   UInt32                      *outBufferSize,                      // 4
					   UInt32                      *outNumPacketsToRead                 // 5
) {
    static const int maxBufferSize = 0x50000;                        // 6
    static const int minBufferSize = 0x4000;                         // 7
	
    if (ASBDesc.mFramesPerPacket != 0) {                             // 8
        Float64 numPacketsForTime =
		ASBDesc.mSampleRate / ASBDesc.mFramesPerPacket * seconds;
        *outBufferSize = numPacketsForTime * maxPacketSize;
    } else {                                                         // 9
        *outBufferSize =
		maxBufferSize > maxPacketSize ?
		maxBufferSize : maxPacketSize;
    }
	
    if (                                                             // 10
        *outBufferSize > maxBufferSize &&
        *outBufferSize > maxPacketSize
		)
        *outBufferSize = maxBufferSize;
    else {                                                           // 11
        if (*outBufferSize < minBufferSize)
            *outBufferSize = minBufferSize;
    }
	
    *outNumPacketsToRead = *outBufferSize / maxPacketSize;           // 12
}

@implementation MyMelody

//static NSString *keys[] = { @"c4", @"c4s", @"d4", @"a4", @"e4", @"e4f", @"f4", 
//@"f4s", @"g4", @"c5", @"b4", @"a4f", @"b4f"};
//static NSString *keys[] = { @"c4", @"d4", @"f4", @"a4", @"c5"};
static NSString *keys[] = { @"c5", @"e5", @"g5", @"d5", @"f5"};
static int keysnum = 5;
static AQMFPlayerState aqData;

+ (void) UnloadAudioData
{
	int i = 0;
	for(i = 0; i < keysnum; i++) {
		//free (aqData.fstate[i].mPacketDescs);
		AudioQueueFreeBuffer(aqData.mQueue,
							 aqData.fstate[i].mBuffers);
	}
}

+ (void) LoadAudioData:(id)param
{
	// get bundle for resource files (aiff)
	CFBundleRef bundle = CFBundleGetMainBundle();
	int i = 0;
	for(i = 0; i < keysnum; i++) {
		CFURLRef audioFileURL = CFBundleCopyResourceURL(bundle,
														(CFStringRef)keys[i], 
														CFSTR("aif"), 
														NULL);
		if(!audioFileURL) return;
		
		// open audio file
		OSStatus result = AudioFileOpenURL (                                  // 2
						  audioFileURL,                                   // 3
						  fsRdPerm,                                       // 4
						  0,                                              // 5
						  &aqData.fstate[i].mAudioFile                              // 6
						  );
		if(result) {}

		CFRelease (audioFileURL);                               // 7
	
		if( i == 0 ) {
			UInt32 dataFormatSize = sizeof (aqData.mDataFormat);    // 1
			AudioFileGetProperty (                                  // 2
								  aqData.fstate[i].mAudioFile,                                  // 3
								  kAudioFilePropertyDataFormat,                       // 4
								  &dataFormatSize,                                    // 5
								  &aqData.mDataFormat                                 // 6
								  );
		
		
			AudioQueueNewOutput (                                // 1
								 &aqData.mDataFormat,                             // 2
								 HandleOutputBuffer2,                              // 3
								 &aqData,                                         // 4
								 CFRunLoopGetCurrent (),                          // 5
								 kCFRunLoopCommonModes,                           // 6
								 0,                                               // 7
								 &aqData.mQueue                                   // 8
								 );
		}

		UInt32 maxPacketSize;
		UInt32 propertySize = sizeof (maxPacketSize);
		AudioFileGetProperty (                               // 1
							  aqData.fstate[i].mAudioFile,                               // 2
							  kAudioFilePropertyPacketSizeUpperBound,          // 3
							  &propertySize,                                   // 4
							  &maxPacketSize                                   // 5
		);
		
		DeriveBufferSize (                                   // 6
						  aqData.mDataFormat,                              // 7
						  maxPacketSize,                                   // 8
						  0.5,                                             // 9
						  &aqData.fstate[i].bufferByteSize,                          // 10
						  &aqData.fstate[i].mNumPacketsToRead                        // 11
		);
		
		Float32 gain = 0.5;                                       // 1
		// Optionally, allow user to override gain setting here
		AudioQueueSetParameter (                                  // 2
								aqData.mQueue,                                        // 3
								kAudioQueueParam_Volume,                              // 4
								gain                                                  // 5
								);
		
		
		aqData.fstate[i].mCurrentPacket = 0;                                // 1
		aqData.fstate[i].mPacketDescs = NULL;
		AudioQueueAllocateBuffer (                            // 3
								  aqData.mQueue,                                    // 4
								  aqData.fstate[i].bufferByteSize,                 // 5
								  &aqData.fstate[i].mBuffers                              // 6
								  );

	    UInt32 numBytesReadFromFile;
		aqData.fstate[i].numPackets = aqData.fstate[i].mNumPacketsToRead;
		AudioFileReadPackets ( 
							  aqData.fstate[i].mAudioFile,
							  false,
							  &numBytesReadFromFile,
							  aqData.fstate[i].mPacketDescs,
							  aqData.fstate[i].mCurrentPacket,
							  &aqData.fstate[i].numPackets,
							  aqData.fstate[i].mBuffers->mAudioData 
							  );
		aqData.fstate[i].mBuffers->mAudioDataByteSize = numBytesReadFromFile;  // 6
	}
	
	for(int i = 0; i < kNumberBuffers; i++){
		AudioQueueAllocateBuffer (                            // 3
								  aqData.mQueue,                                    // 4
								  aqData.fstate[0].bufferByteSize,                 // 5
								  &aqData.mQueueBuffers[i]                              // 6
								  );
	}
	for(i = 0; i < keysnum; i++) {
		AudioFileClose (aqData.fstate[i].mAudioFile);            // 4
	}
}


+ (void) PlayMelody:(id)score
{
	if(aqData.mIsRunning == true) {
		AudioQueueStop ( aqData.mQueue, true );		
	}

	aqData.scoreIndex = 0;	
	aqData.score = (CFArrayRef)score;
	int numofscore = [score count] < kNumberBuffers ? [score count] : kNumberBuffers ;
	for(int i = 0; i < numofscore; i++) {
		aqData.scoreIndex++;			
		int idx = [[[score objectAtIndex:i] valueForKey:@"score"] intValue];
		// enqueueing an audio queue buffer 
		int buffersize = aqData.fstate[idx].mBuffers->mAudioDataByteSize < aqData.mQueueBuffers[i]->mAudioDataBytesCapacity ?
						 aqData.fstate[idx].mBuffers->mAudioDataByteSize : aqData.mQueueBuffers[i]->mAudioDataBytesCapacity;
		memcpy(aqData.mQueueBuffers[i]->mAudioData, aqData.fstate[idx].mBuffers->mAudioData, buffersize );
		aqData.mQueueBuffers[i]->mAudioDataByteSize = aqData.fstate[idx].mBuffers->mAudioDataByteSize;
		aqData.mQueueBuffers[i]->mUserData =  aqData.fstate[idx].mBuffers->mUserData;
		
		AudioQueueEnqueueBufferWithParameters(
								 aqData.mQueue,                           // 2
								  aqData.mQueueBuffers[i],                                  // 3
								 (aqData.fstate[idx].mPacketDescs ? aqData.fstate[idx].numPackets : 0),  // 4
								 aqData.fstate[idx].mPacketDescs,
								 0, frameEndPoint*aqData.fstate[idx].numPackets, 0, NULL, NULL, NULL );
		aqData.fstate[i].mCurrentPacket += aqData.fstate[i].numPackets;                // 7 
	}
	aqData.mIsRunning = true;                          // 1
	
	AudioQueueStart (                              // 2
					 aqData.mQueue,                                 // 3
					 NULL                                           // 4
					 );
	
	do {                                               // 5
		CFRunLoopRunInMode (                           // 6
							kCFRunLoopDefaultMode,                     // 7
							0.25,                                      // 8
							false                                      // 9
							);
	} while (aqData.mIsRunning);
	
	CFRunLoopRunInMode (                               // 10
						kCFRunLoopDefaultMode,
						0.5,
						false
						);
	AudioQueueFlush(aqData.mQueue);
}

+ (void)initialize
{
	[self LoadAudioData:nil];
}

+ (void)finalize
{
	AudioQueueFlush(aqData.mQueue);
	NSLog(@"unloading...");
	[self UnloadAudioData];	
	AudioQueueReset ( aqData.mQueue );						
	AudioQueueDispose ( aqData.mQueue, true );
}

@end

