//
//  ASRPcmRecorder.m
//  testASR
//
//  Created by YangShuai on 16/9/22.
//  Copyright © 2016年 WanKang. All rights reserved.
//

#import "ASRPcmRecorder.h"
#import "ASRDebugLog.h"

@interface ASRPcmRecorder (private)

- (void)setupAudioFormat:(AudioStreamBasicDescription*)format;

static void interruptionListener(void * inClientData, UInt32 inInterruptionState);
static void HandleInputBuffer (void *aqData,AudioQueueRef inAQ,AudioQueueBufferRef inBuffer,const AudioTimeStamp *inStartTime,UInt32 inNumPackets,const AudioStreamPacketDescription   *inPacketDesc);
@end

@implementation ASRPcmRecorder

@synthesize delegate = _delegate;

void DeriveBufferSize (
                       AudioQueueRef                audioQueue,
                       AudioStreamBasicDescription  ASBDescription,
                       Float64                      seconds,
                       UInt32                       *outBufferSize
                       );

#pragma system
- (id) init
{
    if (self = [super init]) {
        state.recording = NO;
        state.recorder = self;
        
        _refreshHz = 1. / 30.;
        mSampleRate = 8000;
        mBits = 16;
        mChannels = 1;
        [self setupAudioFormat : &state.dataFormat];
        state.currentPacket = 0;
        self.isTimerRuning = NO;
        
        [self initAudioSession];
    }
    return self;
}

- (void) dealloc
{
    //[super dealloc];
}

#pragma developer
- (BOOL) start
{
    NSDate *d1 = [NSDate date];
    OSStatus error= AudioQueueNewInput(&state.dataFormat,
                                       HandleInputBuffer,
                                       &state,
                                       CFRunLoopGetCurrent(),
                                       kCFRunLoopCommonModes,
                                       0,
                                       &state.queue);
    if (error) {
        [ASRDebugLog showLog:@"%s|AudioQueueNewInput error",__func__];
        return NO;
    }
    DeriveBufferSize(state.queue, state.dataFormat, 0.15, &state.bufferByteSize);
    for(int i = 0; i < NUM_BUFFERS; i++)
    {
        error = AudioQueueAllocateBuffer(state.queue,
                                         state.bufferByteSize,
                                         &state.buffers[i]);
        if (error) {
            [ASRDebugLog showLog:@"%s|AudioQueueAllocateBuffer error",__func__];
            return NO;
        }
        error = AudioQueueEnqueueBuffer(state.queue, state.buffers[i], 0, NULL);
        if (error) {
            [ASRDebugLog showLog:@"%s|AudioQueueEnqueueBuffer error",__func__];
            return NO;
        }
    }
    
    
    error = AudioQueueStart(state.queue, NULL);
    NSDate *d2 = [NSDate date];
    NSTimeInterval n1 = [d2 timeIntervalSinceDate:d1];
    [ASRDebugLog showLog:@"ISRRecorder|n1:%f",n1];
    if (error != 0) {
        [ASRDebugLog showLog:@"%s|AudioQueueStart error",__func__];
        AudioQueueStop(state.queue, YES);
        return NO;
    }
    
    // allocate the memory needed to store audio level information
    state.audioLevels = (AudioQueueLevelMeterState *) calloc (sizeof (AudioQueueLevelMeterState), mChannels);
    
    UInt32 trueValue = true;
    AudioQueueSetProperty (state.queue,kAudioQueueProperty_EnableLevelMetering,&trueValue,sizeof (UInt32));
    state.currentPacket = 0;
    state.recording = YES;
    
    return YES;
}

- (void) stop
{
    if (_updateTimer && self.isTimerRuning){
        [_updateTimer invalidate];
        self.isTimerRuning = NO;
    }
    _delegate = nil;
    if (state.recording== YES) {
        OSStatus error ;
        error = AudioQueueFlush(state.queue);
        if (error) {
            [ASRDebugLog showLog:@"%s|AudioQueueFlush error", __func__];
        }
        error = AudioQueueStop(state.queue, true);
        if (error) {
            [ASRDebugLog showLog:@"%s|AudioQueueStop error", __func__];
        }
        
        state.recording = NO;
        error = AudioQueueDispose(state.queue, true);
        if (error) {
            [ASRDebugLog showLog:@"%s|AudioQueueDispose error", __func__];
        }
        if(state.audioLevels) {
            free(state.audioLevels);
            state.audioLevels = NULL;
        }
        
        error = AudioSessionSetActive(false);
        if (error) {
            [ASRDebugLog showLog:@"%s|AudioSessionSetActive error", __func__];
        }
    }
}

- (void) setSample:(int) rate
{
    mSampleRate = (Float32)rate;
}

#pragma mark private

- (void)setupAudioFormat:(AudioStreamBasicDescription*)format
{
    format->mSampleRate = mSampleRate;
    format->mFormatID = kAudioFormatLinearPCM;
    format->mFormatFlags = kLinearPCMFormatFlagIsSignedInteger| kLinearPCMFormatFlagIsPacked;
    
    format->mChannelsPerFrame = mChannels;
    format->mBitsPerChannel = mBits;
    format->mFramesPerPacket = 1;
    format->mBytesPerPacket = 2;
    
    format->mBytesPerFrame = 2;		// not used, apparently required
    format->mReserved = 0;
}

static void HandleInputBuffer (void *aqData,AudioQueueRef inAQ,AudioQueueBufferRef inBuffer,const AudioTimeStamp *inStartTime,UInt32 inNumPackets,const AudioStreamPacketDescription   *inPacketDesc)
{
    RecordState *pAqData = (RecordState *) aqData;
    ASRPcmRecorder *recorder = pAqData->recorder;
    if (inNumPackets == 0 && pAqData->dataFormat.mBytesPerPacket != 0)
        inNumPackets = inBuffer->mAudioDataByteSize / pAqData->dataFormat.mBytesPerPacket;
    if (recorder.delegate) {
        [recorder.delegate onReturnBuffer:inBuffer->mAudioData bufferSize:inBuffer->mAudioDataByteSize];
    }
    pAqData->currentPacket += inNumPackets;
    
    if (pAqData->recording == 0) {
        return;
    }
    AudioQueueEnqueueBuffer (pAqData->queue,inBuffer,0,NULL);
}

-(BOOL)initAudioSession
{
    OSStatus error = AudioSessionInitialize(NULL, NULL, interruptionListener, (__bridge void *)(self));
    
    if (error) {
        [ASRDebugLog showLog:@"%s|AudioSessionInitialize error:%ld",__func__,error];
        //        return NO;
    }
    
    UInt32 category = kAudioSessionCategory_PlayAndRecord;
    UInt32 size;
    error = AudioSessionGetPropertySize(kAudioSessionProperty_AudioCategory, &size);
    if (error) {
        [ASRDebugLog showLog:@"%s|AudioSessionGetPropertySize error:%ld",__func__,error];
    }
    
    error = AudioSessionSetProperty(kAudioSessionProperty_AudioCategory, size, &category);
    if (error) {
        [ASRDebugLog showLog:@"%s|AudioSessionSetProperty error:%ld", __func__,error];
        //        return NO;
    }
    
    UInt32 audioRouteOverride = kAudioSessionProperty_OverrideCategoryDefaultToSpeaker;
    error = AudioSessionGetPropertySize(kAudioSessionProperty_OverrideCategoryDefaultToSpeaker, &size);
    if (error) {
        [ASRDebugLog showLog:@"%s|AudioSessionGetPropertySize kAudioSessionProperty_OverrideCategoryDefaultToSpeaker error", __func__];
    }
    error = AudioSessionSetProperty(kAudioSessionProperty_OverrideCategoryDefaultToSpeaker,size,&audioRouteOverride);
    if (error) {
        [ASRDebugLog showLog:@"%s|AudioSessionSetProperty kAudioSessionProperty_OverrideCategoryDefaultToSpeaker error:%ld", __func__,error];
    }
    
    UInt32 allowBluetoothInput = kAudioSessionProperty_OverrideCategoryEnableBluetoothInput;
    error = AudioSessionGetPropertySize(kAudioSessionProperty_OverrideCategoryEnableBluetoothInput, &size);
    if (error) {
        [ASRDebugLog showLog:@"%s|AudioSessionGetPropertySize kAudioSessionProperty_OverrideCategoryEnableBluetoothInput error:%ld", __func__,error];
        
    }
    error =  AudioSessionSetProperty (kAudioSessionProperty_OverrideCategoryEnableBluetoothInput,
                                      size,
                                      &allowBluetoothInput
                                      );
    if (error) {
        [ASRDebugLog showLog:@"%s|AudioSessionSetProperty kAudioSessionProperty_OverrideCategoryEnableBluetoothInput error:%ld", __func__,error];
        
    }
    
    error = AudioSessionSetActive(true);
    if (error) {
        [ASRDebugLog showLog:@"%s|AudioSessionSetActive error:%ld", __func__,error];
        return NO;
    }
    return YES;
}


void DeriveBufferSize (
                       AudioQueueRef                audioQueue,
                       AudioStreamBasicDescription  ASBDescription,
                       Float64                      seconds,
                       UInt32                       *outBufferSize
                       )
{
    static const int maxBufferSize = 0x50000;
    int maxPacketSize = ASBDescription.mBytesPerPacket;
    
    if (maxPacketSize == 0) {
        UInt32 maxVBRPacketSize = sizeof(maxPacketSize);
        AudioQueueGetProperty (audioQueue,kAudioConverterPropertyMaximumOutputPacketSize,&maxPacketSize,&maxVBRPacketSize);
    }
    
    Float64 numBytesForTime =ASBDescription.mSampleRate * maxPacketSize * seconds;
    *outBufferSize =  (UInt32) ((numBytesForTime < maxBufferSize) ? numBytesForTime : maxBufferSize);                     // 9
}


//
static void interruptionListener(void * inClientData, UInt32 inInterruptionState)
{
    [ASRDebugLog showLog:@"%s|interruptionListener",__func__];
}

- (void)getAudioPower
{
    //    state.queue = v;
    if (_updateTimer && self.isTimerRuning ){
        [_updateTimer invalidate];
        self.isTimerRuning = NO;
    }
    
    _updateTimer = [NSTimer
                    scheduledTimerWithTimeInterval:_refreshHz
                    target:self
                    selector:@selector(_refresh)
                    userInfo:nil
                    repeats:YES
                    ];
    [_updateTimer fire];
    self.isTimerRuning = YES;
}

- (void)_refresh
{
    
    // if we have no queue, but still have levels, gradually bring them down
    
    UInt32 data_sz = sizeof(AudioQueueLevelMeterState) * mChannels;
    OSErr status = AudioQueueGetProperty(state.queue, kAudioQueueProperty_CurrentLevelMeterDB, state.audioLevels, &data_sz);
    if (status != noErr){
        //        NSLog(@"音量 Error");
    }
    if (state.audioLevels){
        if (self.delegate != nil) {
            [self.delegate onRecorderPower:(state.audioLevels->mAveragePower)];
        }
        //        NSLog(@"average:%f,peak:%f",state.audioLevels->mAveragePower,state.audioLevels->mPeakPower);
    }
    
}
@end


@implementation ASRPcmRecorder(Extended)

- (void) setAudioFormat:(Float64)sampleRate theBitsPerChannel:(UInt32)bits theChannelsPerFrame:(UInt32)channels{
    mSampleRate = sampleRate;
    mBits = bits;
    mChannels = channels;
}

@end


