//
//  AudioManager.m
//  ImageToSound
//
//  Created by jvmilazz on 9/28/11.
//  Copyright 2011 ASU. All rights reserved.
//



/*
 sadfjlaksjflk;s
 
 Okay, so here's what we do.
 
 ViewController keeps track of what pixel I am on
 Every RenderTone call, I move to a new pixel
 I use the BLUE to determine what key on a piano I will play (wiki piano_key_frequencies)
 I use the ToneGenerator Adam dug up to generate that pretty tone
 I add the RED AND GREEN as sinouiods to this tone
 The it get's played
 when we are past the last pixel, set the amplitude to 0!
 
 at least, until we figure out how to stop it properly
 */
#import "AudioManager.h"
#import <AudioToolbox/AudioQueue.h>
#import <AudioToolbox/AudioFile.h>
#import <OpenAL/al.h>
#import "ImageToSoundViewController.h"
#import <math.h>

/*
 ALuint dataToRecord[(int)(imageSize.width*imageSize.height)];
 
 float PI2 = 3.14*2;
 float freq1 = 10;
 float freq2 = 10;
 int imageWidth = (int)imageSize.width;
 int imageHeight = (int)imageSize.height;
 
 for(int i = 0; i < imageWidth; i++) {
 for (int j = 0; j < imageHeight; j++)
 {
 // populate the "dataToRecord" array with audio data. //
 dataToRecord[(imageWidth*j + i)] = (sin((imageWidth*j + i)*(PI2*(8000/freq1))) + sin((imageWidth*j + i)*(PI2*(8000/freq2)))) * 16383;
 }*/

OSStatus RenderTone(
                    void *inRefCon, 
                    AudioUnitRenderActionFlags *ioActionFlags, 
                    const AudioTimeStamp *inTimeStamp, 
                    UInt32 inBusNumber, 
                    UInt32 inNumberFrames, 
                    AudioBufferList *ioData)

{
    // Fixed amplitude is good enough for our purposes
    const double amplitude = 0.25;
    
    AudioManager *man = (AudioManager *)inRefCon;
    RGBPixel * imageData = man.imageData;
    
    if(man->curPixel < man.imageSize.height * man.imageSize.width)
    {
        int redKey = imageData[man->curPixel].red % 88;
        int greenKey = imageData[man->curPixel].green % 88;
        int blueKey = imageData[man->curPixel].blue % 88;
        
        double key = 0.5 * redKey + 0.25 * greenKey + 0.25 * blueKey;
        
        double freq = 440 * pow(2, (key-49)/12);
        double sampleRate = 44100.0;
        
        double theta = man->theta;
        double theta_increment = 2.0 * M_PI * freq / sampleRate;
        
        // This is a mono tone generator so we only need the first buffer
        const int channel = 0;
        Float32 *buffer = (Float32 *)ioData->mBuffers[channel].mData;
        
        // Generate the samples
        for (UInt32 frame = 0; frame < inNumberFrames; frame++) 
        {            
            buffer[frame] = sin(theta) * amplitude;
            
            theta += theta_increment;
            if (theta > 2.0 * M_PI)
            {
                theta -= 2.0 * M_PI;
            }
        }
        
        man->theta = theta;

        man->curPixel += 1;
    }
    else
    {
        //STOP PLAYBACK...HOWHOWHOWWWWWWW????
        man->curPixel = 0;
    }
    
    return noErr;
}

@implementation AudioManager 

@synthesize fileURL;
@synthesize recordState;
@synthesize playState;
@synthesize statusLabel;
@synthesize playButton;
@synthesize recordButton;
@synthesize imageSize;
@synthesize imageData;

- (void) onLoad
{
    // Get audio file page
    char path[256];
    [self getFilename:path maxLenth:sizeof path];
    fileURL = CFURLCreateFromFileSystemRepresentation(NULL, (UInt8*)path, strlen(path), false);
    
    // Init state variables
    playState.playing = false;
    recordState.recording = false;
}


- (void)dealloc
{
    CFRelease(fileURL);
    [super dealloc];
}


void AudioInputCallback(
                        void *inUserData, 
                        AudioQueueRef inAQ, 
                        AudioQueueBufferRef inBuffer, 
                        const AudioTimeStamp *inStartTime, 
                        UInt32 inNumberPacketDescriptions, 
                        const AudioStreamPacketDescription *inPacketDescs)
{
	RecordState* recordState = (RecordState*)inUserData;
    if(!recordState->recording)
    {
        printf("Not recording, returning\n");
    }
    
    if(inNumberPacketDescriptions == 0 && recordState->dataFormat.mBytesPerPacket != 0)
    {
        inNumberPacketDescriptions = inBuffer->mAudioDataByteSize / recordState->dataFormat.mBytesPerPacket;
    }
    
    printf("Writing buffer %lld\n", recordState->currentPacket);
    OSStatus status = AudioFileWritePackets(recordState->audioFile,
                                            false,
                                            inBuffer->mAudioDataByteSize,
                                            inPacketDescs,
                                            recordState->currentPacket,
                                            &inNumberPacketDescriptions,
                                            inBuffer->mAudioData);
    if(status == 0)
    {
        recordState->currentPacket += inNumberPacketDescriptions;
    }
    
    AudioQueueEnqueueBuffer(recordState->queue, inBuffer, 0, NULL);
}

void AudioOutputCallback(
                         void* inUserData,
                         AudioQueueRef outAQ,
                         AudioQueueBufferRef outBuffer)
{
	PlayState* playState = (PlayState*)inUserData;	
    if(!playState->playing)
    {
        printf("Not playing, returning\n");
        return;
    }
    
    printf("nananananananaiiiiii");
    
	printf("Queuing buffer %lld for playback\n", playState->currentPacket);
    
    AudioStreamPacketDescription* packetDescs;
    
    UInt32 bytesRead;
    UInt32 numPackets = 8000;
    OSStatus status;
    status = AudioFileReadPackets(
                                  playState->audioFile,
                                  false,
                                  &bytesRead,
                                  packetDescs,
                                  playState->currentPacket,
                                  &numPackets,
                                  outBuffer->mAudioData);
    
    if(numPackets)
    {
        outBuffer->mAudioDataByteSize = bytesRead;
        status = AudioQueueEnqueueBuffer(
                                         playState->queue,
                                         outBuffer,
                                         0,
                                         packetDescs);
        
        playState->currentPacket += numPackets;
    }
    else
    {
        if(playState->playing)
        {
            AudioQueueStop(playState->queue, false);
            AudioFileClose(playState->audioFile);
            playState->playing = false;
        }
        
        AudioQueueFreeBuffer(playState->queue, outBuffer);
    }
    
}

- (void)setupAudioFormat:(AudioStreamBasicDescription*)format 
{
	format->mSampleRate = 44100;
	format->mFormatID = kAudioFormatLinearPCM;
	format->mFramesPerPacket = 1;
	format->mChannelsPerFrame = 1;
	format->mBytesPerFrame = 2;
	format->mBytesPerPacket = 2;
	format->mBitsPerChannel = 16;
	format->mReserved = 0;
	format->mFormatFlags = kLinearPCMFormatFlagIsBigEndian |
    kLinearPCMFormatFlagIsSignedInteger |
    kLinearPCMFormatFlagIsPacked;
}

- (IBAction)recordPressed:(id)sender
{
    if(!playState.playing)
    {
        if(!recordState.recording)
        {
            printf("Starting recording\n");
            [self startRecording];
        }
        else
        {
            printf("Stopping recording\n");
            [self stopRecording];
        }
    }
    else
    {
        printf("Can't start recording, currently playing\n");
    }
}

// Play button pressed
- (IBAction)playPressed:(id)sender
{
    if(!playState.playing)
    {
        printf("Starting playback\n");
        [self startPlayback];
    }
    else
    {
        printf("Stopping playback\n");
        [self stopPlayback];
    }
}

- (void)startRecording
{
    [self setupAudioFormat:&recordState.dataFormat];
    
    recordState.currentPacket = 0;
	
    OSStatus status;
    status = AudioQueueNewInput(&recordState.dataFormat,
                                AudioInputCallback,
                                &recordState,
                                CFRunLoopGetCurrent(),
                                kCFRunLoopCommonModes,
                                0,
                                &recordState.queue);
    
    if(status == 0)
    {
        for(int i = 0; i < NUM_BUFFERS; i++)
        {
            AudioQueueAllocateBuffer(recordState.queue,
                                     16000, &recordState.buffers[i]);
            AudioQueueEnqueueBuffer(recordState.queue,
                                    recordState.buffers[i], 0, NULL);
        }
        
        status = AudioFileCreateWithURL(fileURL,
                                        kAudioFileAIFFType,
                                        &recordState.dataFormat,
                                        kAudioFileFlags_EraseFile,
                                        &recordState.audioFile);
        if(status == 0)
        {
            recordState.recording = true;   
            [recordButton setTitle:@"Stop" forState:UIControlStateNormal];
            status = AudioQueueStart(recordState.queue, NULL);
            if(status == 0)
            {
                statusLabel.text = @"Recording";
            }
        }
    }
    
    if(status != 0)
    {
        [self stopRecording];
        statusLabel.text = @"Record Failed";
    }
}

- (void)stopRecording
{
    recordState.recording = false;
    [recordButton setTitle:@"Record" forState:UIControlStateNormal];
    
    AudioQueueStop(recordState.queue, true);
    for(int i = 0; i < NUM_BUFFERS; i++)
    {
        AudioQueueFreeBuffer(recordState.queue,
                             recordState.buffers[i]);
    }
    
    AudioQueueDispose(recordState.queue, true);
    AudioFileClose(recordState.audioFile);
    statusLabel.text = @"Idle";
}

// Problem here, if the user plays the entire duration of the recording, 
// play button's text does not get set back to 'play' instead it is still 'stop'.
// Also, the statusLabel does not get reset back to 'idle'
- (void)startPlayback
{
    playState.playing = true;
    
    // Stop changing parameters on the unit
    OSErr err = AudioUnitInitialize(toneUnit);
    NSAssert1(err == noErr, @"Error initializing unit: %ld", err);
    
    // Start playback
    err = AudioOutputUnitStart(toneUnit);
    NSAssert1(err == noErr, @"Error starting unit: %ld", err);
    
    [playButton setTitle:@"Stop" forState:UIControlStateNormal];
    statusLabel.text = @"Playing";
    
    /*playState.currentPacket = 0;
    
    [self setupAudioFormat:&playState.dataFormat];
    
    OSStatus status;
    //status = AudioFileOpenURL(fileURL, fsRdPerm, kAudioFileAIFFType, &playState.audioFile);
    status = AudioFileOpenURL(fileURL, kAudioFileReadWritePermission, kAudioFileAIFFType, &playState.audioFile);
    if(status == 0)
    {
        status = AudioQueueNewOutput(&playState.dataFormat,
                                     AudioOutputCallback,
                                     &playState,
                                     CFRunLoopGetCurrent(),
                                     kCFRunLoopCommonModes,
                                     0,
                                     &playState.queue);
        
        if(status == 0)
        {
            playState.playing = true;
            [playButton setTitle:@"Stop" forState:UIControlStateNormal];
            for(int i = 0; i < NUM_BUFFERS && playState.playing; i++)
            {
                if(playState.playing)
                {
                    AudioQueueAllocateBuffer(playState.queue, 16000, &playState.buffers[i]);
                    AudioOutputCallback(&playState, playState.queue, playState.buffers[i]);
                }
            }
            
            if(playState.playing)
            {
                status = AudioQueueStart(playState.queue, NULL);
                if(status == 0)
                {
                    statusLabel.text = @"Playing";
                }
            }
        }        
    }
    
    if(status != 0)
    {
        [self stopPlayback];
        statusLabel.text = @"Play failed";
    }*/
}

- (void)stopPlayback
{
    playState.playing = false;
    
    // Stop changing parameters on the unit
    OSErr err = AudioUnitInitialize(toneUnit);
    NSAssert1(err == noErr, @"Error initializing unit: %ld", err);
    
    // Start playback
    err = AudioOutputUnitStop(toneUnit);
    NSAssert1(err == noErr, @"Error stopping unit: %ld", err);
    
    // Change the text of play button
    [playButton setTitle:@"Play" forState:UIControlStateNormal];
    statusLabel.text = @"Idle";
}    

- (BOOL)getFilename:(char*)buffer maxLenth:(int)maxBufferLength
{
    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, 
                                                         NSUserDomainMask, YES); 
    NSString* docDir = [paths objectAtIndex:0];
    NSString* file = [docDir stringByAppendingString:@"/mysound.wav"];
    return [file getCString:buffer maxLength:maxBufferLength encoding:NSUTF8StringEncoding];
}

// http://www.iphonedevsdk.com/forum/iphone-sdk-development/45613-creating-audio-programmatically.html
- (void)createWAVFile:(RGBPixel*)iD withImageSize:(CGSize)iS
{
    //NSString *path = [[NSBundle mainBundle] pathForResource:@"WavHeader" ofType:@"wav"];
    
    //NSMutableData *audioData = [[NSMutableData alloc] initWithContentsOfFile:path];
    
    //int samples = 22050;
    /* 
     ALuint dataToRecord[(int)(imageSize.width*imageSize.height)];
     
     float PI2 = 3.14*2;
     float freq1 = 10;
     float freq2 = 10;
     int imageWidth = (int)imageSize.width;
     int imageHeight = (int)imageSize.height;
     
     for(int i = 0; i < imageWidth; i++) {
     for (int j = 0; j < imageHeight; j++)
     {
     // populate the "dataToRecord" array with audio data. //
     dataToRecord[(imageWidth*j + i)] = (sin((imageWidth*j + i)*(PI2*(8000/freq1))) + sin((imageWidth*j + i)*(PI2*(8000/freq2)))) * 16383;
     }
     }*/
    
    imageData = iD;
    imageSize = iS;
    
    if (toneUnit)
	{
		AudioOutputUnitStop(toneUnit);
		AudioUnitUninitialize(toneUnit);
		AudioComponentInstanceDispose(toneUnit);
		toneUnit = nil;
	}
	else
	{
		[self createToneUnit];
		
		// Stop changing parameters on the unit
		//OSErr err = AudioUnitInitialize(toneUnit);
		//NSAssert1(err == noErr, @"Error initializing unit: %ld", err);
		
		// Start playback
		//err = AudioOutputUnitStart(toneUnit);
		//NSAssert1(err == noErr, @"Error starting unit: %ld", err);
	}
    
}

- (void)createToneUnit
{
	// Configure the search parameters to find the default playback output unit
	// (called the kAudioUnitSubType_RemoteIO on iOS but
	// kAudioUnitSubType_DefaultOutput on Mac OS X)
	AudioComponentDescription defaultOutputDescription;
	defaultOutputDescription.componentType = kAudioUnitType_Output;
	defaultOutputDescription.componentSubType = kAudioUnitSubType_RemoteIO;
	defaultOutputDescription.componentManufacturer = kAudioUnitManufacturer_Apple;
	defaultOutputDescription.componentFlags = 0;
	defaultOutputDescription.componentFlagsMask = 0;
	
	// Get the default playback output unit
	AudioComponent defaultOutput = AudioComponentFindNext(NULL, &defaultOutputDescription);
	NSAssert(defaultOutput, @"Can't find default output");
	
	// Create a new unit based on this that we'll use for output
	OSErr err = AudioComponentInstanceNew(defaultOutput, &toneUnit);
	NSAssert1(toneUnit, @"Error creating unit: %ld", err);
	
	// Set our tone rendering function on the unit
	AURenderCallbackStruct input;
	input.inputProc = RenderTone;
	input.inputProcRefCon = self;
	err = AudioUnitSetProperty(toneUnit, 
                               kAudioUnitProperty_SetRenderCallback, 
                               kAudioUnitScope_Input,
                               0, 
                               &input, 
                               sizeof(input));
	NSAssert1(err == noErr, @"Error setting callback: %ld", err);
	
	// Set the format to 32 bit, single channel, floating point, linear PCM
	const int four_bytes_per_float = 4;
	const int eight_bits_per_byte = 8;
	AudioStreamBasicDescription streamFormat;
	streamFormat.mSampleRate = 44100;
	streamFormat.mFormatID = kAudioFormatLinearPCM;
	streamFormat.mFormatFlags =
    kAudioFormatFlagsNativeFloatPacked | kAudioFormatFlagIsNonInterleaved;
	streamFormat.mBytesPerPacket = four_bytes_per_float;
	streamFormat.mFramesPerPacket = 1;	
	streamFormat.mBytesPerFrame = four_bytes_per_float;		
	streamFormat.mChannelsPerFrame = 1;	
	streamFormat.mBitsPerChannel = four_bytes_per_float * eight_bits_per_byte;
	err = AudioUnitSetProperty (toneUnit,
                                kAudioUnitProperty_StreamFormat,
                                kAudioUnitScope_Input,
                                0,
                                &streamFormat,
                                sizeof(AudioStreamBasicDescription));
	NSAssert1(err == noErr, @"Error setting stream format: %ld", err);
}



@end
