//
//  AudioManager.m
//  ImageToSound
//
//  Created by jvmilazz on 9/28/11.
//  Copyright 2011 ASU. All rights reserved.
//

#import "AudioManager.h"
#import <AudioToolbox/AudioQueue.h>
#import <AudioToolbox/AudioFile.h>


@implementation AudioManager 

@synthesize fileURL;
@synthesize recordState;
@synthesize playState;
@synthesize statusLabel;
@synthesize playButton;
@synthesize recordButton;
    
- (void) onLoad
{
    // Get audio file page
    char path[256];
    [self getFilename:path maxLenth:sizeof path];
    fileURL = CFURLCreateFromFileSystemRepresentation(NULL, (UInt8*)path, strlen(path), false);
    
    // Init state variables
    playState.playing = false;
    recordState.recording = false;
}
    
    
- (void)dealloc
{
    CFRelease(fileURL);
    [super dealloc];
}


void AudioInputCallback(
                        void *inUserData, 
                        AudioQueueRef inAQ, 
                        AudioQueueBufferRef inBuffer, 
                        const AudioTimeStamp *inStartTime, 
                        UInt32 inNumberPacketDescriptions, 
                        const AudioStreamPacketDescription *inPacketDescs)
{
	RecordState* recordState = (RecordState*)inUserData;
    if(!recordState->recording)
    {
        printf("Not recording, returning\n");
    }
    
    if(inNumberPacketDescriptions == 0 && recordState->dataFormat.mBytesPerPacket != 0)
    {
        inNumberPacketDescriptions = inBuffer->mAudioDataByteSize / recordState->dataFormat.mBytesPerPacket;
    }
    
    printf("Writing buffer %lld\n", recordState->currentPacket);
    OSStatus status = AudioFileWritePackets(recordState->audioFile,
                                            false,
                                            inBuffer->mAudioDataByteSize,
                                            inPacketDescs,
                                            recordState->currentPacket,
                                            &inNumberPacketDescriptions,
                                            inBuffer->mAudioData);
    if(status == 0)
    {
        recordState->currentPacket += inNumberPacketDescriptions;
    }
    
    AudioQueueEnqueueBuffer(recordState->queue, inBuffer, 0, NULL);
}

void AudioOutputCallback(
                         void* inUserData,
                         AudioQueueRef outAQ,
                         AudioQueueBufferRef outBuffer)
{
	PlayState* playState = (PlayState*)inUserData;	
    if(!playState->playing)
    {
        printf("Not playing, returning\n");
        return;
    }
    
	printf("Queuing buffer %lld for playback\n", playState->currentPacket);
    
    AudioStreamPacketDescription* packetDescs;
    
    UInt32 bytesRead;
    UInt32 numPackets = 8000;
    OSStatus status;
    status = AudioFileReadPackets(
                                  playState->audioFile,
                                  false,
                                  &bytesRead,
                                  packetDescs,
                                  playState->currentPacket,
                                  &numPackets,
                                  outBuffer->mAudioData);
    
    if(numPackets)
    {
        outBuffer->mAudioDataByteSize = bytesRead;
        status = AudioQueueEnqueueBuffer(
                                         playState->queue,
                                         outBuffer,
                                         0,
                                         packetDescs);
        
        playState->currentPacket += numPackets;
    }
    else
    {
        if(playState->playing)
        {
            AudioQueueStop(playState->queue, false);
            AudioFileClose(playState->audioFile);
            playState->playing = false;
        }
        
        AudioQueueFreeBuffer(playState->queue, outBuffer);
    }
    
}

- (void)setupAudioFormat:(AudioStreamBasicDescription*)format 
{
	format->mSampleRate = 8000.0;
	format->mFormatID = kAudioFormatLinearPCM;
	format->mFramesPerPacket = 1;
	format->mChannelsPerFrame = 1;
	format->mBytesPerFrame = 2;
	format->mBytesPerPacket = 2;
	format->mBitsPerChannel = 16;
	format->mReserved = 0;
	format->mFormatFlags = kLinearPCMFormatFlagIsBigEndian |
    kLinearPCMFormatFlagIsSignedInteger |
    kLinearPCMFormatFlagIsPacked;
}

- (IBAction)recordPressed:(id)sender
{
    if(!playState.playing)
    {
        if(!recordState.recording)
        {
            printf("Starting recording\n");
            [self startRecording];
        }
        else
        {
            printf("Stopping recording\n");
            [self stopRecording];
        }
    }
    else
    {
        printf("Can't start recording, currently playing\n");
    }
}

- (IBAction)playPressed:(id)sender
{
    if(!recordState.recording)
    {
        if(!playState.playing)
        {
            printf("Starting playback\n");
            [self startPlayback];
        }
        else
        {
            printf("Stopping playback\n");
            [self stopPlayback];
        }
    }
}

- (void)startRecording
{
    [self setupAudioFormat:&recordState.dataFormat];
    
    recordState.currentPacket = 0;
	
    OSStatus status;
    status = AudioQueueNewInput(&recordState.dataFormat,
                                AudioInputCallback,
                                &recordState,
                                CFRunLoopGetCurrent(),
                                kCFRunLoopCommonModes,
                                0,
                                &recordState.queue);
    
    if(status == 0)
    {
        for(int i = 0; i < NUM_BUFFERS; i++)
        {
            AudioQueueAllocateBuffer(recordState.queue,
                                     16000, &recordState.buffers[i]);
            AudioQueueEnqueueBuffer(recordState.queue,
                                    recordState.buffers[i], 0, NULL);
        }
        
        status = AudioFileCreateWithURL(fileURL,
                                        kAudioFileAIFFType,
                                        &recordState.dataFormat,
                                        kAudioFileFlags_EraseFile,
                                        &recordState.audioFile);
        if(status == 0)
        {
            recordState.recording = true;   
            [recordButton setTitle:@"Stop" forState:UIControlStateNormal];
            status = AudioQueueStart(recordState.queue, NULL);
            if(status == 0)
            {
                statusLabel.text = @"Recording";
            }
        }
    }
    
    if(status != 0)
    {
        [self stopRecording];
        statusLabel.text = @"Record Failed";
    }
}

- (void)stopRecording
{
    recordState.recording = false;
    [recordButton setTitle:@"Record" forState:UIControlStateNormal];
    
    AudioQueueStop(recordState.queue, true);
    for(int i = 0; i < NUM_BUFFERS; i++)
    {
        AudioQueueFreeBuffer(recordState.queue,
                             recordState.buffers[i]);
    }
    
    AudioQueueDispose(recordState.queue, true);
    AudioFileClose(recordState.audioFile);
    statusLabel.text = @"Idle";
}

// Problem here, if the user plays the entire duration of the recording, 
// play button's text does not get set back to 'play' instead it is still 'stop'.
// Also, the statusLabel does not get reset back to 'idle'
- (void)startPlayback
{
    playState.currentPacket = 0;
    
    [self setupAudioFormat:&playState.dataFormat];
    
    OSStatus status;
    //status = AudioFileOpenURL(fileURL, fsRdPerm, kAudioFileAIFFType, &playState.audioFile);
    status = AudioFileOpenURL(fileURL, kAudioFileReadWritePermission, kAudioFileAIFFType, &playState.audioFile);
    if(status == 0)
    {
        status = AudioQueueNewOutput(&playState.dataFormat,
                                     AudioOutputCallback,
                                     &playState,
                                     CFRunLoopGetCurrent(),
                                     kCFRunLoopCommonModes,
                                     0,
                                     &playState.queue);
        
        if(status == 0)
        {
            playState.playing = true;
            [playButton setTitle:@"Stop" forState:UIControlStateNormal];
            for(int i = 0; i < NUM_BUFFERS && playState.playing; i++)
            {
                if(playState.playing)
                {
                    AudioQueueAllocateBuffer(playState.queue, 16000, &playState.buffers[i]);
                    AudioOutputCallback(&playState, playState.queue, playState.buffers[i]);
                }
            }
            
            if(playState.playing)
            {
                status = AudioQueueStart(playState.queue, NULL);
                if(status == 0)
                {
                    statusLabel.text = @"Playing";
                }
            }
        }        
    }
    
    if(status != 0)
    {
        [self stopPlayback];
        statusLabel.text = @"Play failed";
    }
}

- (void)stopPlayback
{
    playState.playing = false;
    [playButton setTitle:@"Play" forState:UIControlStateNormal];
    
    for(int i = 0; i < NUM_BUFFERS; i++)
    {
        AudioQueueFreeBuffer(playState.queue, playState.buffers[i]);
    }
    
    AudioQueueDispose(playState.queue, true);
    AudioFileClose(playState.audioFile);
    statusLabel.text = @"Idle";
}    

- (BOOL)getFilename:(char*)buffer maxLenth:(int)maxBufferLength
{
    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, 
                                                         NSUserDomainMask, YES); 
    NSString* docDir = [paths objectAtIndex:0];
    NSString* file = [docDir stringByAppendingString:@"/mysound.wav"];
    return [file getCString:buffer maxLength:maxBufferLength encoding:NSUTF8StringEncoding];
}


// http://www.iphonedevsdk.com/forum/iphone-sdk-development/45613-creating-audio-programmatically.html
- (void)generateWAVFile:(RGBPixel*)imageData withImageSize:(CGSize) imageSize;
{
    NSString *path = [[NSBundle mainBundle] pathForResource:@"WavHeader" ofType:@"wav"];
    
    NSMutableData *audioData = [[NSMutableData alloc] initWithContentsOfFile:path];
    
    int samples = 22050;
    
    uint8_t dataToRecord[(int)(imageSize.width*imageSize.height)];
    
    float PI2 = 3.14*2;
    float freq1 = 10;
    float freq2 = 10;
    int imageWidth = (int)imageSize.width;
    int imageHeight = (int)imageSize.height;
    
    for(int i = 0; i < imageWidth; i++) {
        for (int j = 0; j < imageHeight; j++)
        {
        // populate the "dataToRecord" array with audio data. //
        dataToRecord[(imageWidth*j + i)] = (sin((imageWidth*j + i)*(PI2*(8000/freq1))) + sin((imageWidth*j + i)*(PI2*(8000/freq2)))) * 16383;
        }
    }
    
    [audioData appendBytes:(const void *)dataToRecord length:samples];
    
    // Get path to documents folder and set a file name  //
    
    [audioData writeToFile:@"/mysound.wav" atomically:YES];
    
    NSURL *url = [NSURL fileURLWithPath:path];
    
    fileURL = url;
    
    //char pathc[256];
    //[self getFilename:pathc maxLenth:sizeof pathc];
    //fileURL = CFURLCreateFromFileSystemRepresentation(NULL, (UInt8*)path, strlen(pathc), false);
    
    
}

@end
