
#import "RNRecorder.h"

#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>

#import <AudioUnit/AudioUnit.h>
#import <AVFoundation/AVFoundation.h>
#import <assert.h>

#import "AudioManagerAdapter.h"
#import "EZAudioManager.h"
#import "VoiceConverter.h"

#import "UtilForFileCache.h"
#import "AudioController.h"
#import "BufferManager.h"

#import "AudioPlayer.h"

#ifndef CLAMP
#define CLAMP(min,x,max) (x < min ? min : (x > max ? max : x))
#endif

#define IOS_VERSION_8_OR_ABOVE (([[[UIDevice currentDevice] systemVersion] floatValue] >= 8.0)? (YES):(NO))
#define kPresentedController  [UIApplication sharedApplication].keyWindow.rootViewController.presentedViewController?:[UIApplication sharedApplication].keyWindow.rootViewController

#define kMaxRecorderDuration 6
#define kAudioManagerID @"RNRecorder"

NSString *const RecordingProgress = @"recordingProgress";
NSString *const PlayingProgress = @"playingProgress";
NSString *const AmplitudeProgress = @"amplitudeProgress";
NSString *const SpectrumProgress = @"spectrumProgress";

typedef void (^StartRecordingBlock)(id result);

@interface RNPromiseResponseForRecorder : NSObject

- (instancetype)initWithType:(int)type;

@property (nonatomic, assign, readonly) int currentType;

@property (nonatomic, copy) RCTPromiseResolveBlock successBlock;
@property (nonatomic, copy) RCTPromiseRejectBlock  errorBlock;

@end

@implementation RNPromiseResponseForRecorder

@synthesize currentType = _type;

- (instancetype)initWithType:(int)type {
    self = [super init];
    if (self){
        _type = type;
    }
    return self;
}

@end

@interface RNRecorder () <AudioRecordDelegate, AudioPlayDelegate>
{
    double _recorderVoiceMeter;
    NSString *_voicePath;
    NSTimeInterval _playLength;
    NSTimer *playTimer;

    Float32 *l_fftData;

    AudioController * audioController;

    // AudioFile Service
    AudioFileID audioFileID;
    AudioStreamBasicDescription audioFileFormat;
    AudioStreamPacketDescription *audioPacketFormat;

    SInt64 readedPacket; // 已读的packet数量
    UInt64 packetNums; // 总的packet数量
    UInt64 packetNumsInBuffer; // buffer中最多的buffer数量

    AudioUnit audioUnit;
    AudioBufferList *buffList;
    Byte *convertBuffer;

    AudioConverterRef audioConverter;
}

@property (nonatomic, strong) AudioPlayer *audioUnitPlayer;

@property (nonatomic) NSTimeInterval maxRecorderDuration;
@property (nonatomic, strong) AudioManager *audioManager;
@property (nonatomic, strong) EZAudioManager *ezAudioManager;

@property (nonatomic, strong) NSMutableDictionary<NSNumber *,RNPromiseResponseForRecorder *> *responseCache;

@property (nonatomic, strong) NSMutableDictionary *audioRecordMessage;

@property (nonatomic, strong) RNPromiseResponseForRecorder *endAudioRecordResponse;
@property (nonatomic, strong) RNPromiseResponseForRecorder *beginAudioRecordResponse;
@property (nonatomic, copy) StartRecordingBlock startRecordingBlock; // 录音和播放似乎不能同时进行，应该是因为只有一个实例在来回切换模式？录音模式会停止其他应用的声音

@end

@implementation RNRecorder

@synthesize maxRecorderDuration = _maxRecorderDuration;
@synthesize audioManager = _audioManager;

RCT_EXPORT_MODULE()

- (dispatch_queue_t)methodQueue
{
    return dispatch_get_main_queue();
}

- (instancetype)init
{
    self = [super init];
    if (self) {
    }
    return self;
}

#if __has_include(<React/RCTEventEmitter.h>)
/**
 * These methods will be called when the first observer is added and when the
 * last observer is removed (or when dealloc is called), respectively. These
 * should be overridden in your subclass in order to start/stop sending events.
 */
- (void)startObserving
{
    //
}

- (void)stopObserving
{
    //
}

- (NSArray<NSString *> *)supportedEvents{
    return @[
             RecordingProgress,
             PlayingProgress,
             AmplitudeProgress,
             SpectrumProgress,
             ];
}
#endif

- (void)postMessgeNotif:(NSDictionary *)data messageType:(NSString *)type{

#if __has_include(<React/RCTEventEmitter.h>)
    if (self.bridge) {
        [self sendEventWithName:type body:data];
    }
#else
    if (self.bridge) {
        [self.bridge.eventDispatcher sendDeviceEventWithName:type
                                                        body:data];
    }
#endif

}

#pragma mark -- properites

- (AudioManager *)audioManager {
    if (!_audioManager) {
        _audioManager = [[AudioManagerAdapter shareInstance] audioManagerWithIdentifier:kAudioManagerID];
    }
    return _audioManager;
}

- (EZAudioManager *)ezAudioManager {
    if (!_ezAudioManager) {
        _ezAudioManager = (EZAudioManager *)[[AudioManagerAdapter shareInstance] audioManagerWithIdentifier:kAudioManagerID withClass:NSClassFromString(@"EZAudioManager")];
    }
    return _ezAudioManager;
}

- (NSMutableDictionary<NSNumber *,RNPromiseResponseForRecorder *> *)responseCache{
    if(!_responseCache){
        _responseCache = [[NSMutableDictionary<NSNumber *,RNPromiseResponseForRecorder *> alloc] init];
    }
    return _responseCache;

}

- (NSTimeInterval)maxRecorderDuration {
    return MAX(_maxRecorderDuration, kMaxRecorderDuration);
}

- (void)setMaxRecorderDuration:(NSTimeInterval)second {
    _maxRecorderDuration = MAX(kMaxRecorderDuration, second);
}

- (AudioPlayer *)audioUnitPlayer {
    if (!_audioUnitPlayer) {
        _audioUnitPlayer = [[AudioPlayer alloc] init];
        _audioUnitPlayer.disableOutput = YES;
        _audioUnitPlayer.delegate = self;
        l_fftData = (Float32*) calloc([_audioUnitPlayer getBufferManagerInstance]->GetFFTOutputBufferLength(), sizeof(Float32));
    }
    return _audioUnitPlayer;
}

#pragma mark -- export method to js
//开始录音
RCT_EXPORT_METHOD(startRecording:(id)options resolver:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject){
    NSDictionary *params = nil;
    if ([options isKindOfClass:[NSDictionary class]]) {
        params = [options copy];
    }
    if ([options isKindOfClass:[NSNumber class]]) {
        params = @{@"maxRecordDuration": (NSNumber *)options};
    }

    self.maxRecorderDuration = [RCTConvert double:params[@"maxRecordDuration"]];
    [self.ezAudioManager stopPlaying];
    _recorderVoiceMeter = 0;

    if (self.ezAudioManager.isRecording && self.ezAudioManager.isRecordingPaused) {
        [self.ezAudioManager resumeRecording];
        if (resolve) {
            resolve(@YES);
        }
        return;
    }

    if (!self.ezAudioManager.isRecording){
        // 代理相比block让人头疼的地方。。
        __weak typeof (self) weakSelf = self;
        self.startRecordingBlock = ^(id result) { // 先存放录音任务
            [weakSelf.ezAudioManager startRecordingWithDelegate:weakSelf];
        };
        /*
        // 播放"开始录音"的音效
        NSString *path = [[[NSBundle ChatBundle] resourcePath] stringByAppendingPathComponent:@"record_start.mp3"];
        if(path){
            [self.ezAudioManager startPlayingWithPath:path delegate:self userinfo:nil continuePlaying:NO];
        }
         */
        if (self.ezAudioManager.isPlaying) {
            [self.ezAudioManager stopPlaying];
        }

        if (self.startRecordingBlock) {
            self.startRecordingBlock(@(YES));
            self.startRecordingBlock = nil;
        }

        self.audioRecordMessage = params ? [params mutableCopy] : [NSMutableDictionary new];
        RNPromiseResponseForRecorder *pRes = [[RNPromiseResponseForRecorder alloc] init];
        pRes.successBlock = resolve;
        pRes.errorBlock = reject;
        self.beginAudioRecordResponse = pRes;
    } else {
        // 下次录音先停止先前的，一般进这里就表示上次的没处理好。。
        [self.ezAudioManager stopRecording];
        if (reject) {
            NSError *error = [NSError errorWithDomain:@"startRecording" code:1 userInfo:@{
                                                                                          NSLocalizedDescriptionKey: @"is recording" }];
            reject(@"1", @"is recording", error);
        }
    }
}

//停止录音
RCT_REMAP_METHOD(stopRecording, stopRecordingResolver:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject){
    [self.ezAudioManager stopRecording];
    /*
    // 播放停止录音音效
    NSString *path = [[[NSBundle ChatBundle] resourcePath] stringByAppendingPathComponent:@"record_end.mp3"];
    if(path){
        [[AudioManagerAdapter shareInstance].defaultImAudioManager startPlayingWithPath:path delegate:nil userinfo:nil continuePlaying:NO];
    }
     */

    RNPromiseResponseForRecorder *pRes = [[RNPromiseResponseForRecorder alloc] init];
    pRes.successBlock = resolve;
    pRes.errorBlock = reject;
    self.endAudioRecordResponse = pRes;
}

//暂停录音
RCT_REMAP_METHOD(pauseRecording, pauseRecordingResolver:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject){
    [self.ezAudioManager pauseRecording];

    if (resolve) {
        resolve(@YES);
    }
}

//开始播放
RCT_EXPORT_METHOD(startPlaying:(NSString *)url position:(NSUInteger)position){
    [self startPlay:@{@"url": url ?: @"", @"position": @(position)}];
}

//开始播放
RCT_EXPORT_METHOD(startPlay:(id)options){
    NSMutableDictionary *params = nil;
    if ([options isKindOfClass:[NSDictionary class]]) {
        params = [options mutableCopy];
    } else if ([options isKindOfClass:[NSString class]]) {
        params = [@{@"url": (NSString *)options} mutableCopy];
    } else {
        params = [@{@"url": _voicePath ?: @""} mutableCopy];
    }

    NSString *url = params[@"url"];
    url = url ? [url stringByTrimmingCharactersInSet:[NSCharacterSet whitespaceAndNewlineCharacterSet]] : @"";
    if (url.length == 0) {
        url = _voicePath ?: @"";
    }
    if (url.length == 0) {
        return;
    }

    if ([[NSFileManager defaultManager] fileExistsAtPath:url] && [VoiceConverter isAMRFile:url]) {
        NSString *wav_path = [NSString stringWithFormat:@"%@.wav", [NSUUID UUID].UUIDString];
        NSString *temp = [[UtilForFileCache instance].cachePath stringByAppendingPathComponent:wav_path];
        if ([VoiceConverter convertAMR:url toWAV:temp]) {
            url = [[UtilForFileCache instance].cachePath stringByAppendingPathComponent:wav_path];
        }
    }

    [self handleAudioNotifications:NO];
    [self setProximity];
    if (playTimer) {
        [playTimer invalidate];
        playTimer = nil;
    }

    playTimer = [NSTimer timerWithTimeInterval:1 target:self selector:@selector(timerHandler:) userInfo:nil repeats:YES];
    [[NSRunLoop mainRunLoop] addTimer:playTimer forMode:NSRunLoopCommonModes];
    _playLength = 0;

    NSDictionary *userInfo = @{@"position": params[@"position"] ?: @(0)};
    [self.ezAudioManager startPlayingWithPath:url delegate:self userinfo:userInfo continuePlaying:NO];
    [self handleAudioNotifications:YES];
}

//开始播放
RCT_EXPORT_METHOD(resumePlaying){
    [self handleAudioNotifications:YES];
    if (playTimer) {
        [playTimer setFireDate:[NSDate date]];
    } else {
        playTimer = [NSTimer timerWithTimeInterval:1 target:self selector:@selector(timerHandler:) userInfo:nil repeats:YES];
        [[NSRunLoop mainRunLoop] addTimer:playTimer forMode:NSRunLoopCommonModes];
    }

    [self.ezAudioManager resumeCurrentPlaying];
}

//停止播放
RCT_EXPORT_METHOD(stopPlaying){
    _playLength = 0;
    [playTimer invalidate];
    [self handleAudioNotifications:NO];
    [self.ezAudioManager stopCurrentPlaying];
}

//暂停播放
RCT_EXPORT_METHOD(pausePlaying){
    [playTimer setFireDate:[NSDate distantFuture]];
    [self handleAudioNotifications:NO];
    [self.ezAudioManager pauseCurrentPlaying];
}

//是否扬声器播放
RCT_REMAP_METHOD(isSpeakerphoneOn, isSpeakerphoneOnResolver:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject){
    if (resolve) {
        BOOL isSpeakerphoneOn = NO;
        AVAudioSessionRouteDescription* route = [[AVAudioSession sharedInstance] currentRoute];
        for (AVAudioSessionPortDescription* desc in [route outputs]) {
            if ([[desc portType] isEqualToString:AVAudioSessionPortBuiltInSpeaker])
            {
                isSpeakerphoneOn = YES;
                return;
            }
        }
        resolve(@(isSpeakerphoneOn));
    }
}

//切换听筒/外放模式
RCT_EXPORT_METHOD(setSpeakerphoneOn:(id)options resolver:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject){
    NSDictionary *params = nil;
    if ([options isKindOfClass:[NSDictionary class]]) {
        params = [options copy];
    } else if ([options isKindOfClass:[NSNumber class]]) {
        params = @{@"isSpeakerphoneOn": (NSNumber *)options};
    }

    BOOL isSpeakerphoneOn = params ? [params[@"isSpeakerphoneOn"] boolValue] : YES;
    BOOL isSuccessed = NO;
    if (isSpeakerphoneOn)
    {
        // NSLog(@"Device is not close to user");
        isSuccessed = [[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayAndRecord withOptions:AVAudioSessionCategoryOptionDefaultToSpeaker error:nil];
        isSuccessed = [[AVAudioSession sharedInstance] overrideOutputAudioPort:AVAudioSessionPortOverrideSpeaker error:nil];
    }
    else
    {
        //NSLog(@"Device is close to user");
        isSuccessed = [[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayAndRecord error:nil];
    }
    if (resolve) {
        resolve(@{@"isSuccess": @(isSuccessed)});
    }
}

#pragma mark -- handlers
- (void)timerHandler:(NSTimer *)timer {
    _playLength += 1000;

    NSMutableDictionary *mDict = [NSMutableDictionary new];
    mDict[@"isPlay"] = @true;
    mDict[@"playingRealTime"] = @(_playLength);
    mDict[@"playingRestTime"] = @(MAX(0, self.ezAudioManager.getAudioDuration - _playLength));
    mDict[@"duration"] = @(self.ezAudioManager.getAudioDuration);

    // 向 js 发通知
    [self postMessgeNotif:mDict messageType:PlayingProgress];
}

- (void)fftHandler {
    BufferManager* bufferManager = [self.audioUnitPlayer getBufferManagerInstance];
    Float32** drawBuffers = bufferManager->GetDrawBuffers();
    NSMutableArray* arrayBuffers = [NSMutableArray array];
    if (bufferManager->HasNewFFTData())
    {
        bufferManager->GetFFTOutput(l_fftData);

        int y, maxY;
        maxY = bufferManager->GetCurrentDrawBufferLength();
        int fftLength = bufferManager->GetFFTOutputBufferLength();
        for (y=0; y<maxY; y++) // 为什么是 y<maxY
        {
            CGFloat yFract = (CGFloat)y / (CGFloat)(maxY - 1); // fraction 分数
            CGFloat fftIdx = yFract * ((CGFloat)fftLength - 1);

            double fftIdx_i, fftIdx_f;
            fftIdx_f = modf(fftIdx, &fftIdx_i); // 浮点数分解为整数和小数

            CGFloat fft_l_fl, fft_r_fl;
            CGFloat interpVal;

            int lowerIndex = (int) fftIdx_i;
            int upperIndex = (int) fftIdx_i + 1;
            upperIndex = (upperIndex == fftLength) ? fftLength - 1 : upperIndex;

            // 负数是什么鬼？
            fft_l_fl = (CGFloat)(l_fftData[lowerIndex] + 80) / 64.; // 为什么 + 80 再 / 64. 归一化算法？
            fft_r_fl = (CGFloat)(l_fftData[upperIndex] + 80) / 64.;
            interpVal = fft_l_fl * (1. - fftIdx_f) + fft_r_fl * fftIdx_f;

            drawBuffers[0][y] = CLAMP(0., interpVal, 1.);

            if (isnan(interpVal) || isinf(interpVal)) {
                // 数据有误?
                // NSLog(@"interpVal == NaN or INF");
                [arrayBuffers addObject:@(0)];
            } else {
                [arrayBuffers addObject:@(CLAMP(0., abs(interpVal), 1.))];
            }
        }

        // 向 js 发通知
        [self postMessgeNotif:@{@"spectrum":arrayBuffers} messageType:SpectrumProgress];
    }
}

#pragma mark --设置距离传感器

- (void)setProximity{

    AVAudioSession *session = [AVAudioSession sharedInstance];

    NSError *sessionError = nil;

    UInt32 audioRouteOverride = kAudioSessionOverrideAudioRoute_Speaker;

#pragma clang diagnostic push

#pragma clang diagnostic ignored "-Wdeprecated-declarations"

    AudioSessionSetProperty (

                             kAudioSessionProperty_OverrideAudioRoute,

                             sizeof (audioRouteOverride),

                             &audioRouteOverride

                             );

    UInt32 sessionCategory = kAudioSessionCategory_MediaPlayback;

    AudioSessionSetProperty(kAudioSessionProperty_AudioCategory,

                            sizeof(sessionCategory),

                            &sessionCategory);

    //默认情况下扬声器播放
    [session setCategory:AVAudioSessionCategoryPlayAndRecord withOptions:AVAudioSessionCategoryOptionDefaultToSpeaker error:&sessionError];

    [session overrideOutputAudioPort:AVAudioSessionPortOverrideSpeaker error:&sessionError];

    if(!session) {
        NSLog(@"Error creating session: %@", [sessionError description]);
    }
    else {
        [session setActive:YES error:nil];
    }
}

//proximityState 属性 如果用户接近手机，此时属性值为YES，并且屏幕关闭（非休眠）。

- (void)sensorStateChange:(NSNotificationCenter *)notification{

    AVAudioSession *session = [AVAudioSession sharedInstance];

    if ([[UIDevice currentDevice] proximityState] == YES)
    {
        //NSLog(@"Device is close to user");
        [session setCategory:AVAudioSessionCategoryPlayAndRecord error:nil];
    }
    else
    {
        // NSLog(@"Device is not close to user");
        [session setCategory:AVAudioSessionCategoryPlayAndRecord withOptions:AVAudioSessionCategoryOptionDefaultToSpeaker error:nil];
        [session overrideOutputAudioPort:AVAudioSessionPortOverrideSpeaker error:nil];
    }

}

#pragma mark - 监听听筒or扬声器
- (void)handleAudioNotifications:(BOOL)state
{
    [[UIDevice currentDevice] setProximityMonitoringEnabled:state]; //建议在播放之前设置yes，播放结束设置NO，这个功能是开启红外感应

    if(state)//添加监听
    {
        //添加通知，拔出耳机后暂停播放
        [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(routeChange:) name:AVAudioSessionRouteChangeNotification object:nil];
        [[NSNotificationCenter defaultCenter] addObserver:self
                                                 selector:@selector(sensorStateChange:) name:UIDeviceProximityStateDidChangeNotification
                                                   object:nil];
    }
    else//移除监听
    {
        [[NSNotificationCenter defaultCenter] removeObserver:self name:AVAudioSessionRouteChangeNotification object:nil];
        [[NSNotificationCenter defaultCenter] removeObserver:self name:UIDeviceProximityStateDidChangeNotification object:nil];
    }
}

/**
 *  一旦输出改变则执行此方法
 *
 *  @param notification 输出改变通知对象
 */
-(void)routeChange:(NSNotification *)notification{
    NSDictionary *dic = notification.userInfo;
    int changeReason = [dic[AVAudioSessionRouteChangeReasonKey] intValue];
    //等于AVAudioSessionRouteChangeReasonOldDeviceUnavailable表示旧输出不可用
    if (changeReason == AVAudioSessionRouteChangeReasonOldDeviceUnavailable) {
        AVAudioSessionRouteDescription *routeDescription = dic[AVAudioSessionRouteChangePreviousRouteKey];
        AVAudioSessionPortDescription *portDescription = [routeDescription.outputs firstObject];
        //原设备为耳机则暂停
        if ([portDescription.portType isEqualToString:@"Headphones"]) {
            [self pausePlaying];
        }
    }
}

#pragma mark -- audio utils
- (void)requestMicroPhonePermission {
    NSString *appName = [[[NSBundle mainBundle] infoDictionary] objectForKey:@"CFBundleDisplayName"];
    if (!appName)appName = [[NSBundle mainBundle].infoDictionary valueForKey:@"CFBundleName"];
    UIAlertController *alertController = [UIAlertController alertControllerWithTitle:@"提示" message:[NSString stringWithFormat:@"请在iPhone的设置-隐私->允许%@访问你的麦克风",appName] preferredStyle:UIAlertControllerStyleAlert];
    UIAlertAction *sureAction = [UIAlertAction actionWithTitle:@"确定" style:UIAlertActionStyleDefault handler:^(UIAlertAction * _Nonnull action) {
        if (IOS_VERSION_8_OR_ABOVE) {
            [[UIApplication sharedApplication] openURL:[NSURL URLWithString:UIApplicationOpenSettingsURLString]];
        } else {
            // 私有API上线会被拒
            //[[UIApplication sharedApplication] openURL:[NSURL URLWithString:@"prefs:root=Privacy&path=MICROPHONE"]];
        }
    }];
    UIAlertAction *cancelAction = [UIAlertAction actionWithTitle:@"取消" style:UIAlertActionStyleCancel handler:nil];
    [alertController addAction:sureAction];
    [alertController addAction:cancelAction];
    [kPresentedController presentViewController:alertController animated:YES completion:nil];
}

#pragma mark -- IMAudioRecordDelegate
//录音开始，此时做一个录音动画
- (void)audioRecordDidStartRecordingWithError:(NSError *)error {

    if(self.beginAudioRecordResponse){
        if(!error){
            NSString *uuid = [NSUUID UUID].UUIDString;
            NSString *amr_path = [NSString stringWithFormat:@"%@.amr", uuid];
            _voicePath = [[UtilForFileCache instance].cachePath stringByAppendingPathComponent:amr_path];
            if (self.beginAudioRecordResponse.successBlock) {
                //NSString *messageID = self.audioRecordMessage[@"messageID"];
                self.beginAudioRecordResponse.successBlock(@YES);
            }
        } else {
            if (self.beginAudioRecordResponse.errorBlock) {
                NSDictionary *userInfo = @{ NSLocalizedDescriptionKey:@"Start Recording Error" };

                NSError *rError = [NSError errorWithDomain:error.domain code:error.code userInfo:userInfo];

                self.beginAudioRecordResponse.errorBlock(@(rError.code).stringValue, @"audioRecordDidStartRecordingWithError", rError);
            }


            /*
             switch (error.code) {
             case kLLErrorRecordTypeAuthorizationDenied: {
             //无法录音
             break;
             }
             case kLLErrorRecordTypeInitFailed: {
             //无法正常访问您的麦克风
             break;
             }
             case kLLErrorRecordTypeMultiRequest:
             //无法正常访问您的麦克风
             break;
             case kLLErrorRecordTypeCreateAudioFileFailed:
             //创建录音文件出错
             break;
             case kLLErrorRecordTypeRecordError:
             //无法正常访问您的麦克风
             break;
             default:
             break;

             }

             return;
             */

            switch (error.code) {
                case kLLErrorRecordTypeAuthorizationDenied: {
                    //无法录音，未授权
                    [self requestMicroPhonePermission];

                    break;
                }
                default:
                    break;

            }
        }

        self.beginAudioRecordResponse = nil;
    }

}

- (void)audioRecordAuthorizationDidGranted {

}

/*
 * averagePower，录音音量
 */
- (void)audioRecordDidUpdateVoiceMeter:(double)averagePower {
    _recorderVoiceMeter = averagePower;
    // 向 js 发通知
    [self postMessgeNotif:@{@"amplitude": @(_recorderVoiceMeter)} messageType:AmplitudeProgress];
}

//录音时长变化，以秒为单位
- (void)audioRecordDurationDidChanged:(NSTimeInterval)duration {

    self.audioRecordMessage[@"filePath"] = _voicePath;
    self.audioRecordMessage[@"isRecord"] = @true;
    self.audioRecordMessage[@"recordingRealTime"] = @(round(duration));
    self.audioRecordMessage[@"recordingRestTime"] = @(round(MAX(0, self.maxRecorderDuration - duration)));
    self.audioRecordMessage[@"amplitude"] = @(_recorderVoiceMeter);
    self.audioRecordMessage[@"msg"] = @"正在录音...";

    // 向 js 发通知
    [self postMessgeNotif:self.audioRecordMessage messageType:RecordingProgress];
}

//录语音结束
- (void)audioRecordDidFinishSuccessed:(NSString *)voiceFilePath duration:(CFTimeInterval)duration {
    //NSData *data = [NSData dataWithContentsOfFile:voiceFilePath];
    //BOOL stored = [[UtilForFileCache instance] storeFile:data toPath:_voicePath];

    //转成amr存储
    BOOL stored = [VoiceConverter convertWAV:voiceFilePath toAMR:_voicePath];

    _recorderVoiceMeter = 0;
    self.audioRecordMessage[@"filePath"] = _voicePath;
    self.audioRecordMessage[@"isRecord"] = @false;
    self.audioRecordMessage[@"recordingRealTime"] = @(round(duration));
    self.audioRecordMessage[@"recordingRestTime"] = @(round(MAX(0, self.maxRecorderDuration - duration)));
    self.audioRecordMessage[@"amplitude"] = @(_recorderVoiceMeter);
    self.audioRecordMessage[@"msg"] = duration >= self.maxRecorderDuration ? @"已达到最大录音时长" : @"录音结束";

    // 向 js 发通知
    [self postMessgeNotif:self.audioRecordMessage messageType:RecordingProgress];

    // 回调
    if(self.endAudioRecordResponse && self.endAudioRecordResponse.successBlock){
        self.endAudioRecordResponse.successBlock(self.audioRecordMessage);
        self.endAudioRecordResponse = nil; // 清掉
    }
}

- (void)audioRecordDidFailed {
    _recorderVoiceMeter = 0;
    if(self.beginAudioRecordResponse && self.beginAudioRecordResponse.errorBlock){

        NSDictionary *userInfo = @{ NSLocalizedDescriptionKey:@"audioRecordDidFailed" };

        NSError *error = [NSError errorWithDomain:@"audioRecordDidFailed"
                                             code:1
                                         userInfo:userInfo];
        self.beginAudioRecordResponse.errorBlock(@(error.code).stringValue, @"audioRecordDidFailed", error);
        self.beginAudioRecordResponse = nil;
    }
}

- (void)audioRecordDidCancelled {
    _recorderVoiceMeter = 0;
}

- (void)audioRecordDurationTooShort {

}

//当设置的最长录音时间到后，派发该消息，但不停止录音，由delegate停止录音
//方便delegate做一些倒计时之类的动作
- (void)audioRecordDurationTooLong{
    [self.ezAudioManager stopRecording];
}

- (NSTimeInterval)audioRecordMaxRecordTime {
    return self.maxRecorderDuration;
}

#pragma mark -- IMAudioPlayDelegate
- (void)audioPlayDidStarted:(id)userinfo {
    NSMutableDictionary *mDict = [NSMutableDictionary new];
    mDict[@"isPlay"] = @false;
    mDict[@"playingRealTime"] = @(_playLength);
    mDict[@"playingRestTime"] = @(MAX(0, self.ezAudioManager.getAudioDuration - _playLength));
    mDict[@"duration"] = @(self.ezAudioManager.getAudioDuration);

    // 向 js 发通知
    [self postMessgeNotif:mDict messageType:PlayingProgress];
}

//播放录音时，系统声音太小
- (void)audioPlayVolumeTooLow {

}

//发生播放错误时，播放Session同时结束
- (void)audioPlayDidFailed:(id)userinfo {

}

//播放结束时考虑到连续播放的需求，仅仅停止了当前播放，没有释放播放的session
- (void)audioPlayDidFinished:(id)userinfo {

    NSMutableDictionary *mDict = [NSMutableDictionary new];
    mDict[@"isPlay"] = @false;
    mDict[@"playingRealTime"] = @(_playLength);
    mDict[@"playingRestTime"] = @(MAX(0, self.ezAudioManager.getAudioDuration - _playLength));
    mDict[@"duration"] = @(self.ezAudioManager.getAudioDuration);
    _playLength = 0;

    [self stopPlaying];

    // 向 js 发通知
    [self postMessgeNotif:mDict messageType:PlayingProgress];

    /*
    if ([data objectForKey:@"ifPlayEndVoice"]) {
        // 播放"停止播放"的音效
        NSString *path = [[[NSBundle ChatBundle] resourcePath] stringByAppendingPathComponent:@"play_end.mp3"];
        if(path){
            [[AudioManagerAdapter shareInstance].defaultImAudioManager startPlayingWithPath:path delegate:nil userinfo:nil continuePlaying:NO];
        }
    }
     */
}

//播放停止时考虑到连续播放的需求，仅仅停止了当前播放，没有停止播放session
- (void)audioPlayDidStopped:(id)userinfo {
    NSMutableDictionary *mDict = [NSMutableDictionary new];
    mDict[@"isPlay"] = @false;
    mDict[@"playingRealTime"] = @(_playLength);
    mDict[@"playingRestTime"] = @(MAX(0, self.ezAudioManager.getAudioDuration - _playLength));
    mDict[@"duration"] = @(self.ezAudioManager.getAudioDuration);
    _playLength = 0;

    // 向 js 发通知
    [self postMessgeNotif:mDict messageType:PlayingProgress];
}

#pragma mark -- AudioPlayerDelegate

- (void)onHandlePCM:(AudioPlayer *)player {
    [self fftHandler];
}

@end

