//
//  AudioPlayer.m
//  RNRecorder
//
//  Created by GJS on 2018/8/22.
//  Copyright © 2018年 Facebook. All rights reserved.
//

#import "AudioPlayer.h"

#import <AudioUnit/AudioUnit.h>
#import <AVFoundation/AVFoundation.h>
#import <assert.h>

// Utility file includes
#import "CAXException.h"
#import "CAStreamBasicDescription.h"

const uint32_t CONST_BUFFER_SIZE = 0x10000;

#define INPUT_BUS (1)
#define OUTPUT_BUS (0)
#define NO_MORE_DATA (-12306)

typedef enum aurioTouchDisplayMode {
    aurioTouchDisplayModeOscilloscopeWaveform,
    aurioTouchDisplayModeOscilloscopeFFT,
    aurioTouchDisplayModeSpectrum
} aurioTouchDisplayMode;

struct CallbackData {
    AudioUnit               rioUnit;
    BufferManager*          bufferManager;
    DCRejectionFilter*      dcRejectionFilter;
    BOOL*                   muteAudio;
    BOOL*                   audioChainIsBeingReconstructed;
    
    CallbackData(): rioUnit(NULL), bufferManager(NULL), muteAudio(NULL), audioChainIsBeingReconstructed(NULL) {}
} playerCD;

@implementation AudioPlayer
{
    NSURL *_url;
    BOOL _finished;
    BOOL _isPlaying;
    //
    AudioFileID audioFileID;
    AudioStreamBasicDescription audioFileFormat;
    AudioStreamPacketDescription *audioPacketFormat;
    
    SInt64 readedPacket; // 已读的packet数量
    UInt64 packetNums; // 总的packet数量
    UInt64 packetNumsInBuffer; // buffer中最多的buffer数量
    
    AudioUnit audioUnit;
    AudioBufferList *buffList;
    Byte *convertBuffer;
    
    AudioConverterRef audioConverter;
}

OSStatus ComplexInputDataProc(AudioConverterRef inAudioConverter, UInt32 *ioNumberDataPackets, AudioBufferList *ioData, AudioStreamPacketDescription **outDataPacketDescription, void *inUserData)
{
    AudioPlayer *player = (__bridge AudioPlayer *)(inUserData);
    
    /*
    AudioFileID fileID = player->audioFileID; //Open方法返回的AudioFileID
    UInt32 ioNumPackets = *ioNumberDataPackets; //要读取多少个packet
    SInt64 inStartingPacket = 0; //从第几个Packet开始读取
    
    //UInt32 bitRate = player->audioFileFormat.mBitsPerChannel; //AudioFileGetProperty读取kAudioFilePropertyBitRate
    //UInt32 sampleRate = player->audioFileFormat.mSampleRate; //AudioFileGetProperty读取kAudioFilePropertyDataFormat或kAudioFilePropertyFormatList
    //UInt32 byteCountPerPacket = 144 * bitRate / sampleRate; //MP3数据每个Packet的近似大小
    UInt32 byteCountPerPacket = player->audioFileFormat.mBytesPerPacket;
    
    UInt32 descSize = sizeof(AudioStreamPacketDescription) * ioNumPackets;
    AudioStreamPacketDescription * outPacketDescriptions = (AudioStreamPacketDescription *)malloc(descSize);
    
    UInt32 ioNumBytes = byteCountPerPacket * ioNumPackets;
    void * outBuffer = (void *)malloc(ioNumBytes);
    
    OSStatus status = AudioFileReadPacketData(fileID,
                                              false,
                                              &ioNumBytes,
                                              outPacketDescriptions,
                                              inStartingPacket,
                                              &ioNumPackets,
                                              outBuffer);
    */
    
    UInt32 byteSize = CONST_BUFFER_SIZE;
    OSStatus status = AudioFileReadPacketData(player->audioFileID, NO, &byteSize, player->audioPacketFormat, player->readedPacket, ioNumberDataPackets, player->convertBuffer);
    
    if (outDataPacketDescription) { // 这里要设置好packetFormat，否则会转码失败
        *outDataPacketDescription = player->audioPacketFormat;
    }
    
    if(status) {
        NSLog(@"读取文件失败");
    }
    
    if (!status && (NSInteger)ioNumberDataPackets > 0 && byteSize > 0) {
        ioData->mBuffers[0].mDataByteSize = byteSize;
        ioData->mBuffers[0].mData = player->convertBuffer;
        player->readedPacket += *ioNumberDataPackets;
        return noErr;
    }
    else {
        return NO_MORE_DATA;
    }
}

// Render callback function
OSStatus PlayCallback(void *inRefCon,
                      AudioUnitRenderActionFlags *ioActionFlags,
                      const AudioTimeStamp *inTimeStamp,
                      UInt32 inBusNumber,
                      UInt32 inNumberFrames,
                      AudioBufferList *ioData) {
    OSStatus err = noErr;
    
    if (*playerCD.audioChainIsBeingReconstructed == NO)
    {
        AudioPlayer *player = (__bridge AudioPlayer *)inRefCon;
        
        player->buffList->mBuffers[0].mDataByteSize = CONST_BUFFER_SIZE;
        OSStatus status = AudioConverterFillComplexBuffer(player->audioConverter, ComplexInputDataProc, inRefCon, &inNumberFrames, player->buffList, NULL);
        if (status) {
            NSLog(@"转换格式失败 %d", status);
        }
        
        NSLog(@"out size: %d", player->buffList->mBuffers[0].mDataByteSize);
        //[player printBuffer:player->buffList->mBuffers[0].mData byteSize:player->buffList->mBuffers[0].mDataByteSize];
        memcpy(ioData->mBuffers[0].mData, player->buffList->mBuffers[0].mData, player->buffList->mBuffers[0].mDataByteSize);
        ioData->mBuffers[0].mDataByteSize = player->buffList->mBuffers[0].mDataByteSize;
        
        fwrite(player->buffList->mBuffers[0].mData, player->buffList->mBuffers[0].mDataByteSize, 1, [player pcmFile]);
        
        if (player->buffList->mBuffers[0].mDataByteSize <= 0) {
            NSLog(@"player->buffList->mBuffers[0].mDataByteSize <= 0: %d", player->buffList->mBuffers[0].mDataByteSize);
            dispatch_async(dispatch_get_main_queue(), ^{
                [player onPlayEnd];
            });
        } else {
            // based on the current display mode, copy the required data to the buffer manager
            if (playerCD.bufferManager->GetDisplayMode() == aurioTouchDisplayModeOscilloscopeWaveform)
            {
                playerCD.bufferManager->CopyAudioDataToDrawBuffer((Float32*)player->buffList->mBuffers[0].mData, inNumberFrames);
            }
            
            else if ((playerCD.bufferManager->GetDisplayMode() == aurioTouchDisplayModeSpectrum) || (playerCD.bufferManager->GetDisplayMode() == aurioTouchDisplayModeOscilloscopeFFT))
            {
                if (playerCD.bufferManager->NeedsNewFFTData()) {
                    playerCD.bufferManager->CopyAudioDataToFFTInputBuffer((Float32*)player->buffList->mBuffers[0].mData, inNumberFrames);
                    [player handlePCM];
                }
                
            }
        }
        
        // mute audio if needed
        if (*playerCD.muteAudio)
        {
            for (UInt32 i=0; i<ioData->mNumberBuffers; ++i)
                memset(ioData->mBuffers[i].mData, 0, ioData->mBuffers[i].mDataByteSize);
        }
    }
    
    return err;
}

- (instancetype)init {
    return [self initWithUrl:nil];
}

- (instancetype)initWithUrl:(NSURL *)url {
    if (self = [super init]) {
        _url = url;
        _bufferManager = NULL;
        _dcRejectionFilter = NULL;
        _muteAudio = YES;
        _finished = YES;
        _isPlaying = NO;
        [self setupAudioChain];
        [self open];
    }
    
    return self;
}

- (void)handleInterruption:(NSNotification *)notification
{
    try {
        UInt8 theInterruptionType = [[notification.userInfo valueForKey:AVAudioSessionInterruptionTypeKey] intValue];
        NSLog(@"Session interrupted > --- %s ---\n", theInterruptionType == AVAudioSessionInterruptionTypeBegan ? "Begin Interruption" : "End Interruption");
        
        if (theInterruptionType == AVAudioSessionInterruptionTypeBegan) {
            [self pause];
        }
        
        if (theInterruptionType == AVAudioSessionInterruptionTypeEnded) {
            if (!_finished) {
                // make sure to activate the session
                [self setupAudioSession];
                
                [self startIOUnit];
            }
        }
    } catch (CAXException e) {
        char buf[256];
        fprintf(stderr, "Error: %s (%s)\n", e.mOperation, e.FormatError(buf));
    }
}


- (void)handleRouteChange:(NSNotification *)notification
{
    UInt8 reasonValue = [[notification.userInfo valueForKey:AVAudioSessionRouteChangeReasonKey] intValue];
    AVAudioSessionRouteDescription *routeDescription = [notification.userInfo valueForKey:AVAudioSessionRouteChangePreviousRouteKey];
    
    NSLog(@"Route change:");
    switch (reasonValue) {
        case AVAudioSessionRouteChangeReasonNewDeviceAvailable:
            NSLog(@"     NewDeviceAvailable");
            break;
        case AVAudioSessionRouteChangeReasonOldDeviceUnavailable:
            NSLog(@"     OldDeviceUnavailable");
            break;
        case AVAudioSessionRouteChangeReasonCategoryChange:
            NSLog(@"     CategoryChange");
            NSLog(@" New Category: %@", [[AVAudioSession sharedInstance] category]);
            break;
        case AVAudioSessionRouteChangeReasonOverride:
            NSLog(@"     Override");
            break;
        case AVAudioSessionRouteChangeReasonWakeFromSleep:
            NSLog(@"     WakeFromSleep");
            break;
        case AVAudioSessionRouteChangeReasonNoSuitableRouteForCategory:
            NSLog(@"     NoSuitableRouteForCategory");
            break;
        default:
            NSLog(@"     ReasonUnknown");
    }
    
    NSLog(@"Previous route:\n");
    NSLog(@"%@\n", routeDescription);
    NSLog(@"Current route:\n");
    NSLog(@"%@\n", [AVAudioSession sharedInstance].currentRoute);
}

- (void)handleMediaServerReset:(NSNotification *)notification
{
    NSLog(@"Media server has reset");
    _audioChainIsBeingReconstructed = YES;
    
    usleep(25000); //wait here for some time to ensure that we don't delete these objects while they are being accessed elsewhere
    
    // rebuild the audio chain
    delete _bufferManager;      _bufferManager = NULL;
    delete _dcRejectionFilter;  _dcRejectionFilter = NULL;
    
    [self setupAudioChain];
    if (!_finished) {
        [self setupAudioSession];
        [self startIOUnit];
    }
    
    _audioChainIsBeingReconstructed = NO;
}

- (AudioStreamBasicDescription)defaultOutputFormat {
    
    static AudioStreamBasicDescription outputFormat;
    
    static dispatch_once_t onceToken;
    dispatch_once(&onceToken, ^{
        memset(&outputFormat, 0, sizeof(outputFormat));
        outputFormat.mSampleRate       = 44100;
        outputFormat.mFormatID         = kAudioFormatLinearPCM;
        outputFormat.mFormatFlags      = kLinearPCMFormatFlagIsSignedInteger;
        outputFormat.mBytesPerPacket   = 2;
        outputFormat.mFramesPerPacket  = 1;
        outputFormat.mBytesPerFrame    = 2;
        outputFormat.mChannelsPerFrame = 1;
        outputFormat.mBitsPerChannel   = 16;
    });
    
    return outputFormat;
}

- (AudioStreamBasicDescription)defaultOutputFormat1
{
    static AudioStreamBasicDescription defaultOutputFormat;
    
    static dispatch_once_t onceToken;
    dispatch_once(&onceToken, ^{
        defaultOutputFormat.mFormatID = kAudioFormatLinearPCM;
        defaultOutputFormat.mSampleRate = 44100;
        
        defaultOutputFormat.mBitsPerChannel = 16;
        defaultOutputFormat.mChannelsPerFrame = 2;
        defaultOutputFormat.mBytesPerFrame = defaultOutputFormat.mChannelsPerFrame * (defaultOutputFormat.mBitsPerChannel / 8);
        
        defaultOutputFormat.mFramesPerPacket = 1;
        defaultOutputFormat.mBytesPerPacket = defaultOutputFormat.mFramesPerPacket * defaultOutputFormat.mBytesPerFrame;
        
        defaultOutputFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked;
    });
    
    return defaultOutputFormat;
}

- (void)setupAudioSession
{
    [[NSNotificationCenter defaultCenter] removeObserver:self];
    
    try {
        // Configure the audio session
        AVAudioSession *sessionInstance = [AVAudioSession sharedInstance];
        
        // we are going to play and record so we pick that category
        NSError *error = nil;
        //[sessionInstance setCategory:AVAudioSessionCategoryPlayAndRecord error:&error];
        [sessionInstance setCategory:AVAudioSessionCategoryPlayback error:&error]; // 只有播放
        XThrowIfError((OSStatus)error.code, "couldn't set session's audio category");
        
        // set the buffer duration to 5 ms
        NSTimeInterval bufferDuration = .005;
        [sessionInstance setPreferredIOBufferDuration:bufferDuration error:&error];
        XThrowIfError((OSStatus)error.code, "couldn't set session's I/O buffer duration");
        
        // set the session's sample rate
        [sessionInstance setPreferredSampleRate:44100 error:&error];
        XThrowIfError((OSStatus)error.code, "couldn't set session's preferred sample rate");
        
        // add interruption handler
        [[NSNotificationCenter defaultCenter] addObserver:self
                                                 selector:@selector(handleInterruption:)
                                                     name:AVAudioSessionInterruptionNotification
                                                   object:sessionInstance];
        
        // we don't do anything special in the route change notification
        [[NSNotificationCenter defaultCenter] addObserver:self
                                                 selector:@selector(handleRouteChange:)
                                                     name:AVAudioSessionRouteChangeNotification
                                                   object:sessionInstance];
        
        // if media services are reset, we need to rebuild our audio chain
        [[NSNotificationCenter defaultCenter]    addObserver:    self
                                                    selector:    @selector(handleMediaServerReset:)
                                                        name:    AVAudioSessionMediaServicesWereResetNotification
                                                      object:    sessionInstance];
        
        // activate the audio session
        [[AVAudioSession sharedInstance] setActive:YES error:&error];
        XThrowIfError((OSStatus)error.code, "couldn't set session active");
    }
    
    catch (CAXException &e) {
        NSLog(@"Error returned from setupAudioSession: %d: %s", (int)e.mError, e.mOperation);
    }
    catch (...) {
        NSLog(@"Unknown error returned from setupAudioSession");
    }
    
    return;
}

- (BOOL)open
{
    if (audioFileID != NULL) {
        return YES;
    }
    
    if (![self openWithUrl:_url]) {
        audioFileID = NULL;
        return NO;
    }
    
    return YES;
}

- (BOOL)openWithUrl:(NSURL *)url {
    OSStatus status = AudioFileOpenURL((__bridge CFURLRef)url, kAudioFileReadPermission, 0, &audioFileID);
    if (status) {
        NSLog(@"打开文件失败 %@", url);
        return NO;
    }
    
    UInt32 size = sizeof(AudioStreamBasicDescription);
    status = AudioFileGetProperty(audioFileID, kAudioFilePropertyDataFormat, &size, &audioFileFormat); // 读取文件格式
    //NSAssert(status == noErr, ([NSString stringWithFormat:@"error status %d", status]) );
    if (status != noErr) {
        NSLog(@"%@", [NSString stringWithFormat:@"error status %d", (int)status]);
        return NO;
    }
    
    
    size = sizeof(packetNums);
    status = AudioFileGetProperty(audioFileID,
                                  kAudioFilePropertyAudioDataPacketCount,
                                  &size,
                                  &packetNums); // 读取文件packets总数
    readedPacket = 0;
    
    UInt32 sizePerPacket = audioFileFormat.mFramesPerPacket;
    if (sizePerPacket == 0) {
        size = sizeof(sizePerPacket);
        status = AudioFileGetProperty(audioFileID, kAudioFilePropertyMaximumPacketSize, &size, &sizePerPacket); // 读取单个packet的最大数量
        //NSAssert(status ==noErr && sizePerPacket != 0, @"AudioFileGetProperty error or sizePerPacket = 0");
        if (!(status == noErr && sizePerPacket != 0)) {
            NSLog(@"AudioFileGetProperty error or sizePerPacket = 0");
            return NO;
        }
    }
    
    audioPacketFormat = (AudioStreamPacketDescription *)malloc(sizeof(AudioStreamPacketDescription) * (CONST_BUFFER_SIZE / sizePerPacket + 1));
    //NSAssert(status == noErr, ([NSString stringWithFormat:@"error status %d", status]) );
    if (status != noErr) {
        NSLog(@"%@", [NSString stringWithFormat:@"error status %d", status]);
        return NO;
    }
    
    // 重置AudioConverter
    [self uninitializeAudioConverter];
    convertBuffer = (Byte *)malloc(CONST_BUFFER_SIZE);
    //initFormat
    AudioStreamBasicDescription outputFormat = [self defaultOutputFormat];
    
    [self printAudioStreamBasicDescription:audioFileFormat];
    [self printAudioStreamBasicDescription:outputFormat];
    status = AudioConverterNew(&audioFileFormat, &outputFormat, &audioConverter);
    if (status) {
        NSLog(@"AudioConverterNew eror with status:%d", status);
    }
    
    return status == noErr;
}

- (void)setupIOUnit
{
    try {
        // Create a new instance of AURemoteIO
        
        AudioComponentDescription audioDesc;
        audioDesc.componentType = kAudioUnitType_Output;
        audioDesc.componentSubType = kAudioUnitSubType_RemoteIO;
        audioDesc.componentManufacturer = kAudioUnitManufacturer_Apple;
        audioDesc.componentFlags = 0;
        audioDesc.componentFlagsMask = 0;
        
        AudioComponent inputComponent = AudioComponentFindNext(NULL, &audioDesc);
        AudioComponentInstanceNew(inputComponent, &_rioUnit);
        
        // BUFFER
        buffList = (AudioBufferList *)malloc(sizeof(AudioBufferList));
        buffList->mNumberBuffers = 1;
        buffList->mBuffers[0].mNumberChannels = 1;
        buffList->mBuffers[0].mDataByteSize = CONST_BUFFER_SIZE;
        buffList->mBuffers[0].mData = malloc(CONST_BUFFER_SIZE);
        
        
        //initAudioProperty
        UInt32 flag = 1;
        OSStatus status;
        if (!self.disableOutput) {
            status = AudioUnitSetProperty(_rioUnit,
                                          kAudioOutputUnitProperty_EnableIO,
                                          kAudioUnitScope_Output,
                                          OUTPUT_BUS,
                                          &flag,
                                          sizeof(flag));
            if (status) {
                NSLog(@"AudioUnitSetProperty error with status:%d", status);
            }
        }
        
        
        //initFormat
        AudioStreamBasicDescription outputFormat = [self defaultOutputFormat];
        
        status = AudioUnitSetProperty(_rioUnit,
                                      kAudioUnitProperty_StreamFormat,
                                      kAudioUnitScope_Input,
                                      OUTPUT_BUS,
                                      &outputFormat,
                                      sizeof(outputFormat));
        if (status) {
            NSLog(@"AudioUnitSetProperty eror with status:%d", status);
        }
        
        
        // Set the MaximumFramesPerSlice property. This property is used to describe to an audio unit the maximum number
        // of samples it will be asked to produce on any single given call to AudioUnitRender
        // UInt32 maxFramesPerSlice = 4096;
        UInt32 maxFramesPerSlice = 1024;
        XThrowIfError(AudioUnitSetProperty(_rioUnit, kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Global, 0, &maxFramesPerSlice, sizeof(UInt32)), "couldn't set max frames per slice on AURemoteIO");
        
        // Get the property value back from AURemoteIO. We are going to use this value to allocate buffers accordingly
        UInt32 propSize = sizeof(UInt32);
        XThrowIfError(AudioUnitGetProperty(_rioUnit, kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Global, 0, &maxFramesPerSlice, &propSize), "couldn't get max frames per slice on AURemoteIO");
        
        _bufferManager = new BufferManager(maxFramesPerSlice);
        _dcRejectionFilter = new DCRejectionFilter;
        
        // We need references to certain data in the render callback
        // This simple struct is used to hold that information
        
        playerCD.rioUnit = _rioUnit;
        playerCD.bufferManager = _bufferManager;
        playerCD.dcRejectionFilter = _dcRejectionFilter;
        playerCD.muteAudio = &_muteAudio;
        playerCD.audioChainIsBeingReconstructed = &_audioChainIsBeingReconstructed;
        playerCD.bufferManager->SetDisplayMode(aurioTouchDisplayModeOscilloscopeFFT); //默认展示频谱 FFT
        
        AURenderCallbackStruct playCallback;
        playCallback.inputProc = PlayCallback;
        playCallback.inputProcRefCon = (__bridge void *)self;
        status = AudioUnitSetProperty(_rioUnit,
                                      kAudioUnitProperty_SetRenderCallback,
                                      kAudioUnitScope_Input,
                                      OUTPUT_BUS,
                                      &playCallback,
                                      sizeof(playCallback));
        if (status) {
            NSLog(@"AudioUnitSetProperty eror with status:%d", status);
        }
        
        
        OSStatus result = AudioUnitInitialize(_rioUnit);
        NSLog(@"result %d", result);
    }
    
    catch (CAXException &e) {
        NSLog(@"Error returned from setupIOUnit: %d: %s", (int)e.mError, e.mOperation);
    }
    catch (...) {
        NSLog(@"Unknown error returned from setupIOUnit");
    }
    
    return;
}

- (void)setupIOUnit1
{
    try {
        // Create a new instance of AURemoteIO
        
        AudioComponentDescription desc;
        desc.componentType = kAudioUnitType_Output;
        desc.componentSubType = kAudioUnitSubType_RemoteIO;
        desc.componentManufacturer = kAudioUnitManufacturer_Apple;
        desc.componentFlags = 0;
        desc.componentFlagsMask = 0;
        
        AudioComponent comp = AudioComponentFindNext(NULL, &desc);
        XThrowIfError(AudioComponentInstanceNew(comp, &_rioUnit), "couldn't create a new instance of AURemoteIO");
        
        //  Enable input and output on AURemoteIO
        //  Input is enabled on the input scope of the input element
        //  Output is enabled on the output scope of the output element
        
        UInt32 one = 1;
        XThrowIfError(AudioUnitSetProperty(_rioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, 1, &one, sizeof(one)), "could not enable input on AURemoteIO");
        XThrowIfError(AudioUnitSetProperty(_rioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Output, 0, &one, sizeof(one)), "could not enable output on AURemoteIO");
        
        // Explicitly set the input and output client formats
        // sample rate = 44100, num channels = 1, format = 32 bit floating point
        
        CAStreamBasicDescription ioFormat = CAStreamBasicDescription(44100, 1, CAStreamBasicDescription::kPCMFormatFloat32, false);
        XThrowIfError(AudioUnitSetProperty(_rioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &ioFormat, sizeof(ioFormat)), "couldn't set the input client format on AURemoteIO");
        XThrowIfError(AudioUnitSetProperty(_rioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &ioFormat, sizeof(ioFormat)), "couldn't set the output client format on AURemoteIO");
        
        // Set the MaximumFramesPerSlice property. This property is used to describe to an audio unit the maximum number
        // of samples it will be asked to produce on any single given call to AudioUnitRender
        UInt32 maxFramesPerSlice = 4096;
        XThrowIfError(AudioUnitSetProperty(_rioUnit, kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Global, 0, &maxFramesPerSlice, sizeof(UInt32)), "couldn't set max frames per slice on AURemoteIO");
        
        // Get the property value back from AURemoteIO. We are going to use this value to allocate buffers accordingly
        UInt32 propSize = sizeof(UInt32);
        XThrowIfError(AudioUnitGetProperty(_rioUnit, kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Global, 0, &maxFramesPerSlice, &propSize), "couldn't get max frames per slice on AURemoteIO");
        
        _bufferManager = new BufferManager(maxFramesPerSlice);
        _dcRejectionFilter = new DCRejectionFilter;
        
        // We need references to certain data in the render callback
        // This simple struct is used to hold that information
        
        playerCD.rioUnit = _rioUnit;
        playerCD.bufferManager = _bufferManager;
        playerCD.dcRejectionFilter = _dcRejectionFilter;
        playerCD.muteAudio = &_muteAudio;
        playerCD.audioChainIsBeingReconstructed = &_audioChainIsBeingReconstructed;
        
        // Set the render callback on AURemoteIO
        AURenderCallbackStruct renderCallback;
        renderCallback.inputProc = PlayCallback;
        renderCallback.inputProcRefCon = NULL;
        XThrowIfError(AudioUnitSetProperty(_rioUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, 0, &renderCallback, sizeof(renderCallback)), "couldn't set render callback on AURemoteIO");
        
        // Initialize the AURemoteIO instance
        XThrowIfError(AudioUnitInitialize(_rioUnit), "couldn't initialize AURemoteIO instance");
    }
    
    catch (CAXException &e) {
        NSLog(@"Error returned from setupIOUnit: %d: %s", (int)e.mError, e.mOperation);
    }
    catch (...) {
        NSLog(@"Unknown error returned from setupIOUnit");
    }
    
    return;
}

- (void)setupAudioChain
{
    [self setupIOUnit];
}

- (OSStatus)startIOUnit
{
    OSStatus err = AudioOutputUnitStart(_rioUnit);
    if (err) NSLog(@"couldn't start AURemoteIO: %d", (int)err);
    return err;
}

- (OSStatus)stopIOUnit
{
    OSStatus err = AudioOutputUnitStop(_rioUnit);
    if (err) NSLog(@"couldn't stop AURemoteIO: %d", (int)err);
    return err;
}

- (double)sessionSampleRate
{
    return [[AVAudioSession sharedInstance] sampleRate];
}

- (BufferManager*)getBufferManagerInstance
{
    return _bufferManager;
}

- (BOOL)audioChainIsBeingReconstructed
{
    return _audioChainIsBeingReconstructed;
}

- (void)dealloc
{
    delete _bufferManager;      _bufferManager = NULL;
    delete _dcRejectionFilter;  _dcRejectionFilter = NULL;
    [[NSNotificationCenter defaultCenter] removeObserver:self];
}

- (void)play {
    if (audioFileID != NULL) {
        [self setupAudioSession];
        OSStatus err = [self startIOUnit];
        if (!err) {
            _finished = NO;
            _isPlaying = YES;
        }
    }
}

- (void)pause {
    if (audioFileID != NULL) {
        OSStatus err = [self stopIOUnit];
        if (!err) {
            _isPlaying = NO;
        }
    }
}

- (void)stop {
    if (audioFileID != NULL) {
        [self stopIOUnit];
    }
    _finished = YES;
    _isPlaying = NO;
    readedPacket = 0;
}

- (double)getCurrentTime {
    Float64 timeInterval = (readedPacket * 1.0) / packetNums;
    return timeInterval;
}

- (FILE *)pcmFile {
    static FILE *_pcmFile;
    if (!_pcmFile) {
        NSString *filePath = [NSTemporaryDirectory() stringByAppendingPathComponent:@"test.pcm"];
        NSFileManager *fm = [NSFileManager defaultManager];
        if ([fm fileExistsAtPath:filePath]) {
            [fm removeItemAtPath:filePath error:nil];
        }
        _pcmFile = fopen(filePath.UTF8String, "w");
        
    }
    return _pcmFile;
}

- (void)closePcmFile {
    FILE *_pcmFile = [self pcmFile];
    fclose(_pcmFile);
    _pcmFile = NULL;
}

- (void)handlePCM {
    if (self.delegate && [self.delegate respondsToSelector:@selector(onHandlePCM:)]) {
        __strong typeof (AudioPlayer) *player = self;
        [self.delegate onHandlePCM:player];
    }
}

- (void)uninitializeAudioConverter {
    if (convertBuffer != NULL) {
        free(convertBuffer);
        convertBuffer = NULL;
    }
    if (audioConverter) {
        AudioConverterDispose(audioConverter);
        audioConverter = NULL;
    }
}

- (void)uninitializeAudioUnit {
    if (_rioUnit != NULL) {
        AudioOutputUnitStop(_rioUnit);
        AudioUnitUninitialize(_rioUnit);
        AudioComponentInstanceDispose(_rioUnit);
        _rioUnit = NULL;
    }
    if (buffList != NULL) {
        if (buffList->mBuffers[0].mData) {
            free(buffList->mBuffers[0].mData);
            buffList->mBuffers[0].mData = NULL;
        }
        
        free(buffList);
        buffList = NULL;
    }
    //
    [self uninitializeAudioConverter];
}

- (void)onPlayEnd {
    [self uninitializeAudioUnit];
    [self closePcmFile];
    
    if (self.delegate && [self.delegate respondsToSelector:@selector(audioPlayDidFinished:)]) {
        __strong typeof (AudioPlayer) *player = self;
        [self.delegate audioPlayDidFinished:player];
    }
}

- (void)printAudioStreamBasicDescription:(AudioStreamBasicDescription)asbd {
    char formatID[5];
    UInt32 mFormatID = CFSwapInt32HostToBig(asbd.mFormatID);
    bcopy (&mFormatID, formatID, 4);
    formatID[4] = '\0';
    printf("Sample Rate:         %10.0f\n",  asbd.mSampleRate);
    printf("Format ID:           %10s\n",    formatID);
    printf("Format Flags:        %10X\n",    (unsigned int)asbd.mFormatFlags);
    printf("Bytes per Packet:    %10d\n",    (unsigned int)asbd.mBytesPerPacket);
    printf("Frames per Packet:   %10d\n",    (unsigned int)asbd.mFramesPerPacket);
    printf("Bytes per Frame:     %10d\n",    (unsigned int)asbd.mBytesPerFrame);
    printf("Channels per Frame:  %10d\n",    (unsigned int)asbd.mChannelsPerFrame);
    printf("Bits per Channel:    %10d\n",    (unsigned int)asbd.mBitsPerChannel);
    printf("\n");
}

- (void)printBuffer:(void *)buffer byteSize:(NSInteger)bufferSize {
    if (buffList != NULL) {
        //printf("buffList->mBuffers[0].mDataByteSize:         %10.0u\n", (unsigned int)buffList->mBuffers[0].mDataByteSize);
        //Float32 *frame = (Float32*)buffList->mBuffers[0].mData;
        printf("[");
        for (int i=0; i<bufferSize; i++) {
            printf("%10.0hhu ", ((Byte *)buffer)[i]);
        }
        printf("]\n");
    }
}

@end

