//
//  IFlyAudioRecorder.m
//  AIKITDemo
//
//  Created by xyzhang on 2023/3/1.
//

#import "IFlyAudioRecorder.h"
#import "IFlyDebugLog.h"
#import <UIKit/UIKit.h>

#define NUM_BUFFERS 10
#define RECORD_CYCLE   0.003    //录音音量回调时间间隔

typedef NS_ENUM(NSInteger,IFlyRECState) {
    RECEnd          = 0x00,             //录音结束
    RECIng          = 0x01,             //正在录音
    RECPause        = 0x02,             //正在暂停
    RECCancel       = 0x03,             //取消状态
};

// 内部录音单元
typedef struct {
    AudioFileID                 audioFile;
    AudioStreamBasicDescription dataFormat;
    AudioQueueRef               queue;
    AudioQueueLevelMeterState  *audioLevels;
    AudioQueueBufferRef         buffers[NUM_BUFFERS];
    UInt32                      bufferByteSize;
    SInt64                      currentPacket;
    IFlyRECState                RECState;
    IFlyAudioRecorder          *recorder;
    
} IFlyRecordState;

@interface IFlyAudioRecorder ()
{
    IFlyRecordState state;
}

@property (nonatomic, assign) BOOL isRegisterRunningCB;

@property (nonatomic, assign) Float64  mSampleRate;    //采样率
@property (nonatomic, assign) UInt32   mBits;            //比特率
@property (nonatomic, assign) UInt32   mChannels;        //声道数
@property (nonatomic, strong) NSTimer *mGetPowerTimer; //音量获取时钟
@property (nonatomic, copy) NSString *mSaveAudioPath; //保存文件路径
@property (nonatomic, assign) FILE*    mSaveFile;       //保存文件句柄

@property(nonatomic,assign)float    mPowerGetCycle;  //音量获取时间间隔

@end

@interface IFlyAudioRecorder (State)

@end

@implementation IFlyAudioRecorder

@synthesize delegate = _delegate;
@synthesize mSampleRate;
@synthesize mBits;
@synthesize mChannels;
@synthesize mGetPowerTimer;
@synthesize mSaveAudioPath;
@synthesize mSaveFile;
@synthesize mPowerGetCycle;

#pragma mark - system

+ (IFlyAudioRecorder *)sharedInstance {
    static IFlyAudioRecorder *instance = nil;
    static dispatch_once_t predict;
    dispatch_once(&predict, ^{
        instance = [[IFlyAudioRecorder alloc] init];
    });
    return instance;
}

- (instancetype) init{
    if (self = [super init]) {
        mSampleRate = 16000.0;
        mBits = 16;
        mChannels = 1;
        
        state.RECState = RECEnd;
        state.recorder = self;
        [self setupAudioFormat:&state.dataFormat];
        state.currentPacket = 0;
        
        mSaveAudioPath = nil;
        mSaveFile = NULL;
        mPowerGetCycle = RECORD_CYCLE;
        
        _isRegisterRunningCB = NO;
    }
    return self;
}

- (void) dealloc{
    self.delegate = nil;
    [self setGetPowerTimerInvalidate];
    if(mSaveAudioPath)
    {
        mSaveAudioPath = nil;
    }

}

#pragma mark - system call back

void AQRecordRecordListenBack(void *inUserData, AudioQueueRef inAQ, AudioQueuePropertyID inID) {
    IFlyRecordState *state = inUserData;
    UInt32 running;
    UInt32 size;
    OSStatus err ;
    
    [IFlyDebugLog showLog:@"%s[IN],state=%d", __func__, state->RECState];
    
    if (state->RECState == RECEnd){
        return;
    }

    AudioQueueGetPropertySize(inAQ, kAudioQueueProperty_IsRunning, &size);
    err = AudioQueueGetProperty(inAQ, kAudioQueueProperty_IsRunning, &running, &size);
    if (err){
        [IFlyDebugLog showLog: @"get kAudioQueueProperty_IsRunning error:%d", err];
        return;
    }
    if (!running){
        [IFlyDebugLog showLog:@"stop recording success"];
        
        dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^{
            [state->recorder setRecorderState:RECEnd];
        });
    }
    
    [IFlyDebugLog showLog:@"%s[OUT]",__func__];
}



void HandleInputBuffer (void *aqData,AudioQueueRef inAQ,AudioQueueBufferRef inBuffer,const AudioTimeStamp *inStartTime,UInt32 inNumPackets,const AudioStreamPacketDescription   *inPacketDesc){
    IFlyRecordState *pAqData = (IFlyRecordState *) aqData;
    IFlyAudioRecorder *recorder = pAqData->recorder;
    
//    [IFlyDebugLog showLog:@"%s[IN], state=%d", __func__, pAqData->RECState];
    
    if(pAqData->RECState == RECEnd || pAqData->RECState == RECCancel){
        return;
    }
    
    //音量回调放在第一块音频输出时才调用，避免外部说话时机先于录音启动
    if(!recorder->mGetPowerTimer && recorder->_delegate){
        recorder->mGetPowerTimer = [NSTimer timerWithTimeInterval:recorder->mPowerGetCycle target:recorder selector:@selector(getPower) userInfo:nil repeats:YES];//RECORD_CYCLE
        
        [[NSRunLoop mainRunLoop] addTimer:recorder->mGetPowerTimer forMode:NSRunLoopCommonModes];
        
        [recorder->mGetPowerTimer fire];
    }
    
    if (inNumPackets == 0 && pAqData->dataFormat.mBytesPerPacket != 0){
        inNumPackets = inBuffer->mAudioDataByteSize / pAqData->dataFormat.mBytesPerPacket;
    }
    
    if (recorder.delegate && pAqData->RECState != RECEnd && pAqData->RECState != RECCancel){
        //保存文件
        if(recorder->mSaveFile != NULL){
            fseek(recorder->mSaveFile, 0, SEEK_END);
            fwrite(inBuffer->mAudioData, inBuffer->mAudioDataByteSize, 1, recorder->mSaveFile);
        }
        
        [recorder.delegate onIFlyRecorderBuffer:inBuffer->mAudioData bufferSize:inBuffer->mAudioDataByteSize];
    }
    
    pAqData->currentPacket += inNumPackets;
    
    if (pAqData->RECState == RECIng){
        AudioQueueEnqueueBuffer (pAqData->queue,inBuffer,0,NULL);
    }
}

void DeriveBufferSize (AudioQueueRef audioQueue,AudioStreamBasicDescription ASBDescription,Float64 seconds,UInt32 *outBufferSize){
    static const int maxBufferSize = 0x50000;
    int maxPacketSize = ASBDescription.mBytesPerPacket;
    if (maxPacketSize == 0){
        UInt32 maxVBRPacketSize = sizeof(maxPacketSize);
        AudioQueueGetProperty (audioQueue,kAudioConverterPropertyMaximumOutputPacketSize,&maxPacketSize,&maxVBRPacketSize);
    }
    
    Float64 numBytesForTime =ASBDescription.mSampleRate * maxPacketSize * seconds;
    *outBufferSize =  (UInt32) ((numBytesForTime < maxBufferSize) ? numBytesForTime : maxBufferSize);                     // 9
}


#pragma mark - funcs

/**
 *  开始录音
 *  在开始录音前可以调用IFlyAudioSession +(BOOL) initRecordingAudioSession; 方法初始化音频队列
 *
 *  @return  开启录音成功返回YES，否则返回NO
 */
- (BOOL)start {
    
    @synchronized(self) {
        
        [IFlyDebugLog showLog:@"%s,[IN]",__func__];
        
        if(state.RECState != RECEnd){
            [IFlyDebugLog showLog:@"%s,[OUT],_state=%d", __func__, state.RECState];
            return NO;
        }
        
        if(state.queue != NULL){
            if(_isRegisterRunningCB){
                AudioQueueRemovePropertyListener(state.queue, kAudioQueueProperty_IsRunning, AQRecordRecordListenBack, &state);
                _isRegisterRunningCB = NO;
            }
            
            OSStatus error;
            error = AudioQueueDispose(state.queue, true);
            if (error){
                [IFlyDebugLog showLog:@"%s|AudioQueueDispose error:%d", __func__, error];
            }
            [IFlyDebugLog showLog:@"%s|AudioQueueDispose", __func__];
            
            state.queue = NULL;
        }
        
        OSStatus error = 0;
        NSError *avError;
        
        if(![self isMicrophonePermissionGranted]){
            [IFlyDebugLog showLog:@"%s System Recorder no permission",__func__];
            return NO;
        }
        
        BOOL success = [[AVAudioSession sharedInstance] setActive:YES error:&avError];
        if (!success){
            [IFlyDebugLog showLog:@"%s| avSession setActive YES error:@%",__func__,avError];
        }
        
        error= AudioQueueNewInput(&state.dataFormat,HandleInputBuffer,&state,NULL,NULL,0,&state.queue);
        if (error){
            [IFlyDebugLog showLog:@"%s|AudioQueueNewInput error:%d",__func__,error];
            //终止获取录音音量timer
            [self setGetPowerTimerInvalidate];
            return NO;
        }
        
        DeriveBufferSize(state.queue, state.dataFormat, 0.15, &state.bufferByteSize);
        
        for(int i = 0; i < NUM_BUFFERS; i++){
            error = AudioQueueAllocateBuffer(state.queue,state.bufferByteSize,&state.buffers[i]);
            if (error){
                [IFlyDebugLog showLog:@"%s|AudioQueueAllocateBuffer error:%d",__func__,error];
                [self setGetPowerTimerInvalidate];
                return NO;
            }
            
            error = AudioQueueEnqueueBuffer(state.queue, state.buffers[i], 0, NULL);
            if (error){
                [IFlyDebugLog showLog:@"%s|AudioQueueEnqueueBuffer error:%d",__func__,error];
                [self setGetPowerTimerInvalidate];
                return NO;
            }
        }
        
        error = AudioQueueAddPropertyListener(state.queue, kAudioQueueProperty_IsRunning, AQRecordRecordListenBack, &state);
        if (error){
            [IFlyDebugLog showLog:@"%s| AudioQueueAddPropertyListener error:%d",__func__,error];
            [self setGetPowerTimerInvalidate];
            return NO;
        }

        _isRegisterRunningCB = YES;
        
        error = AudioQueueStart(state.queue, NULL);
        if (error != 0) {
            [IFlyDebugLog showLog:@"%s|AudioQueueStart error:%d",__func__,error];
            OSStatus err;
            err = AudioQueueFlush(state.queue);
            if (error){
                [IFlyDebugLog showLog:@"%s|AudioQueueFlush error:%d", __func__, err];
            }
            [IFlyDebugLog showLog:@"%s|AudioQueueFlush", __func__];
//            AudioQueueStop(state.queue, true);
            [self setGetPowerTimerInvalidate];
            return NO;
        }

        [self setRecorderState:RECIng];
        
        // allocate the memory needed to store audio level information
        state.audioLevels = (AudioQueueLevelMeterState *) calloc (sizeof (AudioQueueLevelMeterState), mChannels);
        UInt32 trueValue = true;
        AudioQueueSetProperty (state.queue,kAudioQueueProperty_EnableLevelMetering,&trueValue,sizeof (UInt32));
        state.currentPacket = 0;
        
        // add Observer
        [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(interruption:) name:AVAudioSessionInterruptionNotification object:nil];

        //打开文件
        if(mSaveFile == NULL){
            //删除之前文件
            mSaveFile = fopen([mSaveAudioPath UTF8String], "rb");
            if(mSaveFile){
                fclose(mSaveFile);
                mSaveFile = nil;
                remove([mSaveAudioPath UTF8String]);
            }
            mSaveFile = fopen([mSaveAudioPath UTF8String], "wb+");
        }

        [IFlyDebugLog showLog:@"%s,[OUT],ret =%d",__func__,error];
        
        return YES;

    }
}

- (void)stop {
    
    @synchronized(self) {
        
        [IFlyDebugLog showLog:@"%s[IN],state=%d",__func__,state.RECState];
        
        //xlhou add
        if(!state.queue){
            return;
        }
        
        if (state.RECState == RECEnd || state.RECState == RECCancel){
            return;
        }
        
        [self setRecorderState:RECCancel];
        
        //终止获取录音音量timer
        [self setGetPowerTimerInvalidate];
        
        if (self.mSaveFile) {
            fclose(self.mSaveFile);
            self.mSaveFile = NULL;
        }
        
        if (state.audioLevels) {
            free(state.audioLevels);
            state.audioLevels = NULL;
        }
        
        self.delegate = nil;
        
        OSStatus error;
        error = AudioQueueFlush(state.queue);
        if (error){
            [IFlyDebugLog showLog:@"%s|AudioQueueFlush error:%d", __func__, error];
        }
        [IFlyDebugLog showLog:@"%s|AudioQueueFlush", __func__];
        
        OSStatus err;
        err = AudioQueueStop(state.queue, true);
        if (error){
            [IFlyDebugLog showLog:@"%s|AudioQueueStop error:%d", __func__, err];
        }
        [IFlyDebugLog showLog:@"%s|AudioQueueStop", __func__];
        
        [IFlyDebugLog showLog:@"%s[OUT]",__func__];
    }
}


/*
 * 设置sample参数
 */
- (void)setSample:(NSString *)rate {
    [IFlyDebugLog showLog:@"%s,rate=%@",__func__,rate];
     mSampleRate = [rate floatValue];
    [self setupAudioFormat:&state.dataFormat];
}

/*
 * 设置录音时间间隔参数
 */
- (void)setPowerCycle:(float)cycle {
    [IFlyDebugLog showLog:@"%s",__func__];
    mPowerGetCycle = cycle;
}


/*
 * 设置保存路径
 */
- (void)setSaveAudioPath:(NSString *)savePath {
    if (mSaveAudioPath) {
        mSaveAudioPath = nil;
    }
    
    if (savePath.length > 0) {
        mSaveAudioPath = [[NSString alloc] initWithFormat:@"%@",savePath];
    }
}

- (BOOL)isCompleted {
    if(state.RECState == RECEnd) {
        return YES;
    }else{
        return NO;
    }
}

#pragma mark - private

- (void)freeRecorderRes {
    
    if(state.queue != NULL){
        if(_isRegisterRunningCB){
            AudioQueueRemovePropertyListener(state.queue, kAudioQueueProperty_IsRunning, AQRecordRecordListenBack, &state);
            _isRegisterRunningCB = NO;
        }
        
        OSStatus error;
        error = AudioQueueDispose(state.queue, true);
        if (error){
            [IFlyDebugLog showLog:@"%s|AudioQueueDispose error", __func__];
        }
        [IFlyDebugLog showLog:@"%s|AudioQueueDispose", __func__];
        
        state.queue = NULL;
    }
    
    // removeObserver
    [[NSNotificationCenter defaultCenter] removeObserver:state.recorder  name:AVAudioSessionInterruptionNotification object:nil];
}

/*
 * @ 设置录音器的状态
 */
- (void)setRecorderState:(IFlyRECState)recState {
    @synchronized(self) {
        
        if(state.RECState == recState){
            return;
        }
        state.RECState = recState;
        
        switch (recState) {
            case RECIng:
                break;
            case RECPause:
                break;
            case RECCancel:
                break;
            case RECEnd:
            {
                [IFlyDebugLog showLog:@"%s,state=%d",__func__,RECEnd];
                
                [self freeRecorderRes];
                
                [[AVAudioSession sharedInstance] setActive:NO withOptions:AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation error:NULL];
            }
                break;
            default:
                break;
        }
    }
}


- (void)setGetPowerTimerInvalidate {
    if(mGetPowerTimer) {
        [mGetPowerTimer invalidate];
        mGetPowerTimer = nil;
    }
}

/// 检测麦克风权限
- (BOOL)isMicrophonePermissionGranted {
    if ([[AVAudioSession sharedInstance] respondsToSelector:@selector(requestRecordPermission:)]) {
        dispatch_semaphore_t sema = dispatch_semaphore_create(0);
        __block BOOL isGranted=YES;
        [[AVAudioSession sharedInstance] performSelector:@selector(requestRecordPermission:) withObject:^(BOOL granted) {
            isGranted=granted;
            dispatch_semaphore_signal(sema);
        }];
        dispatch_semaphore_wait(sema, DISPATCH_TIME_FOREVER);
        return isGranted;
    }
    else{
        return YES;
    }
}

- (void)setupAudioFormat:(AudioStreamBasicDescription*)format{
    format->mSampleRate = mSampleRate;
    format->mFormatID = kAudioFormatLinearPCM;
    format->mFormatFlags = kLinearPCMFormatFlagIsSignedInteger| kLinearPCMFormatFlagIsPacked;
    
    format->mChannelsPerFrame = mChannels;
    format->mBitsPerChannel = mBits;
    format->mFramesPerPacket = 1;
    format->mBytesPerPacket = 2;
    
    format->mBytesPerFrame = 2;        // not used, apparently required
    format->mReserved = 0;
}

/*
 获取音量
 */
- (void)getPower {
    
    @synchronized(self) {
        
        if(state.RECState == RECIng){
            UInt32 propertySize = mChannels * sizeof (AudioQueueLevelMeterState);
            OSStatus error=AudioQueueGetProperty (state.queue,(AudioQueuePropertyID) kAudioQueueProperty_CurrentLevelMeter,state.audioLevels,&propertySize);
            if(error){
                [IFlyDebugLog showLog:@"%s|getPower error", __func__];
                if (_delegate && [_delegate respondsToSelector:@selector(onIFlyRecorderVolumeChanged:)]){
                    //当获取录音音量出错时，音量值为-1，外部可根据此值判断处理
                    int volume = -1;
                    [_delegate onIFlyRecorderVolumeChanged:volume];
                }
                return;
            }
            
            if (_delegate && [_delegate respondsToSelector:@selector(onIFlyRecorderVolumeChanged:)]){
                //录音开始，并来电时，state.audioLevels有可能为空
                if(state.audioLevels){
                    int volume = state.audioLevels[0].mPeakPower * 30;
                    //volume超过30时，按照30处理，注意volume处理后的值是可能大于30的，并不需要按照其最大值来30等分，因为人的录音音量的跨度没有机器允许的值这么大。总之处理不当会引起录音的波形不明显。
                    if(volume> 30){
                        volume = 30;
                    }

                    [_delegate onIFlyRecorderVolumeChanged:volume];
                }
            }
        }
    }
}

-(void)beginInterruption{
    
    NSLog(@"%s", __func__);
    
    @synchronized(self) {
        
        [IFlyDebugLog showLog:@"%s,_state=%d",__func__,state.RECState];
        
        if(state.RECState == RECEnd || state.RECState == RECCancel){
            return;
        }
        
        if(state.RECState != RECPause){
            
            [self setGetPowerTimerInvalidate];
            
            OSStatus error ;
            error = AudioQueuePause(state.queue);
            if (error){
                [IFlyDebugLog showLog:@"puase Recorder error:%d",error];
            }
            [self setRecorderState:RECPause];
        }
    }
}

-(void)endInterruption{
    
    NSLog(@"%s", __func__);
    
    @synchronized(self) {
        
        [IFlyDebugLog showLog:@"%s,_state=%d",__func__,state.RECState];
        
        if(state.RECState == RECEnd || state.RECState == RECCancel){
            return;
        }
        
        if(state.RECState == RECPause){
            
            [self setRecorderState:RECIng];
            
            OSStatus error = AudioQueueStart(state.queue, NULL);
            if (error){
                [IFlyDebugLog showLog:@"resume Recorder error:%d",error];
                
                OSStatus err;
                err = AudioQueueFlush(state.queue);
                if (error){
                    [IFlyDebugLog showLog:@"%s|AudioQueueFlush error:%d", __func__, err];
                }
                [IFlyDebugLog showLog:@"%s|AudioQueueFlush", __func__];
                
                [self setRecorderState:RECEnd];
            }else{
                mGetPowerTimer = [NSTimer scheduledTimerWithTimeInterval:mPowerGetCycle target:self selector:@selector(getPower) userInfo:nil repeats:YES];
                [[NSRunLoop currentRunLoop] addTimer:mGetPowerTimer forMode:NSRunLoopCommonModes];
                [mGetPowerTimer fire];
            }
        }
    }
}

//Interruption handler
-(void) interruption:(NSNotification*) aNotification{
    
    NSLog(@"%s", __func__);
    
    NSDictionary *interuptionDict = aNotification.userInfo;
    NSNumber* interuptionType = (NSNumber*)[interuptionDict valueForKey:AVAudioSessionInterruptionTypeKey];
    if([interuptionType intValue] == AVAudioSessionInterruptionTypeBegan){
        [self beginInterruption];
    }
    else if ([interuptionType intValue] == AVAudioSessionInterruptionTypeEnded){
        [self endInterruption];
    }
}

@end
