//
//  EZAudioManager.m
//  RNRecorder
//
//  Created by GJS on 2018/12/6.
//  Copyright © 2018年 Facebook. All rights reserved.
//

#import "EZAudioManager.h"
#import <AVFoundation/AVFoundation.h>
#import "EZAudio.h"
// 豆瓣音频流播放器
#import "DouAudioStreamPlayer.h"
#import "DouTrack.h"
//
#import "VoiceConverter.h"

#define ERROR_AUDIO_DOMAIN @"LLAudioManager_Domain"

//
// By default this will record a file to the application's documents directory
// (within the application's sandbox)
//
//#define kAudioFilePath @"test.m4a"
#define kAudioFilePath @"test.wav"

static vDSP_Length const FFTViewControllerFFTWindowSize = 4096;

//amr临时目录
#define AMR_AUDIO_TMP_FOLDER @"amrAudioTmp"

//wav临时目录
#define WAV_AUDIO_TMP_FOLDER @"wavAudioTmp"

//录音需要的最短时间
#define MIN_RECORD_TIME_REQUIRED 1

//录音允许的最长时间
#define MAX_RECORD_TIME_ALLOWED 60

//计算平均分贝的数组长度
#define AVERAGE_POWER_COUNT 20

@interface EZAudioManager () <EZAudioPlayerDelegate, EZMicrophoneDelegate, EZRecorderDelegate, EZAudioFFTDelegate, AudioStreamPlayerDelegate>

@property (weak, nonatomic) id<AudioRecordDelegate> recordDelegate;
@property (weak, nonatomic) id<AudioPlayDelegate> playerDelegate;
@property (nonatomic) id userinfo;

@property (nonatomic) AVAudioSession *audioSession;


//------------------------------------------------------------------------------

//
// The microphone component
//
@property (nonatomic, strong) EZMicrophone *microphone;

//------------------------------------------------------------------------------

//
// The audio player that will play the recorded file
//
@property (nonatomic, strong) EZAudioPlayer *player;

//------------------------------------------------------------------------------

//
// The recorder component
//
@property (nonatomic, strong) EZRecorder *recorder;

//------------------------------------------------------------------------------

//
// Used to calculate a rolling FFT of the incoming audio data.
//
@property (nonatomic, strong) EZAudioFFTRolling *fft;

//------------------------------------------------------------------------------

//
// The audio player that can play the recorded file or audio stream
//
@property (nonatomic, strong) DouAudioStreamPlayer *douAudiostreamPlayer;

//------------------------------------------------------------------------------

@property (nonatomic, strong) NSDictionary *recordSetting;

@property (nonatomic, copy) dispatch_block_t block;

@property (nonatomic, copy) NSString *previousCategory;

@property (nonatomic) BOOL isCancelRecording;
@property (nonatomic) BOOL isFinishRecording;

@property (nonatomic, strong) NSString *basePath;
@property (nonatomic, strong) NSString *amrAudioTmpPath;
@property (nonatomic, strong) NSString *wavAudioTmpPath;

@property (nonatomic, strong) NSTimer *timer;
@property (nonatomic, assign) NSTimeInterval maxRecordTime;
@property (nonatomic, assign) NSTimeInterval recordDuration;
@property (nonatomic, strong) NSMutableArray *averagePowerArray;
@property (nonatomic, assign) BOOL isPlaySessionActive;

@end

@implementation EZAudioManager

@synthesize audioTmpPath = _audioTmpPath;

+ (instancetype)shareInstance{
    static EZAudioManager *_audioManager = nil;
    static dispatch_once_t onceToken;
    dispatch_once(&onceToken, ^{
        _audioManager = [[self alloc] init];
    });
    
    return _audioManager;
}

//------------------------------------------------------------------------------
#pragma mark - Dealloc
//------------------------------------------------------------------------------

- (void)dealloc
{
    [[NSNotificationCenter defaultCenter] removeObserver:self];
}

//------------------------------------------------------------------------------
#pragma mark - Setup
//------------------------------------------------------------------------------

- (instancetype)init {
    self = [super init];
    if (self) {
        self.audioSession = [AVAudioSession sharedInstance];
        [self setupAudioTmpPath];
        [self setupAudioManager];
    }
    
    return self;
}

- (void)setupAudioTmpPath {
    NSString *strTemPath = self.audioTmpPath;
    if([[self class] createDirectory:strTemPath]){
        self.basePath = strTemPath;
        NSString *path = [strTemPath stringByAppendingPathComponent:AMR_AUDIO_TMP_FOLDER];
        if([[self class] createDirectory:path]){
            self.amrAudioTmpPath = path;
        }
        
        path = [strTemPath stringByAppendingPathComponent:WAV_AUDIO_TMP_FOLDER];
        if([[self class] createDirectory:path]){
            self.wavAudioTmpPath = path;
        }
    }
}

- (NSString *)audioTmpPath {
    if (!_audioTmpPath) {
        _audioTmpPath = [NSString stringWithFormat:@"%@/Library/appdata/audiobuffer", NSHomeDirectory()];
    }
    
    return _audioTmpPath;
}

- (void)setAudioTmpPath:(NSString *)audioTmpPath {
    if (![_audioTmpPath isEqualToString:audioTmpPath]) {
        _audioTmpPath = audioTmpPath;
        [self setupAudioTmpPath];
    }
}

- (NSDictionary *)recordSetting {
    if (!_recordSetting) {
        _recordSetting = [[NSDictionary alloc] initWithObjectsAndKeys:
                          //采样率,影响音频的质量）
                          [NSNumber numberWithFloat: 8000.0],AVSampleRateKey,
                          ////设置录音格式
                          [NSNumber numberWithInt: kAudioFormatLinearPCM],AVFormatIDKey,
                          //线性采样位数  8、16、24、32
                          [NSNumber numberWithInt:16],AVLinearPCMBitDepthKey,
                          //录音通道数  1 或 2
                          [NSNumber numberWithInt: 1], AVNumberOfChannelsKey,
                          nil];
    }
    
    return _recordSetting;
}

- (DouAudioStreamPlayer *)douAudiostreamPlayer {
    if (!_douAudiostreamPlayer) {
        _douAudiostreamPlayer = [[DouAudioStreamPlayer alloc] initWithDelegate:self];
    }
    return _douAudiostreamPlayer;
}

- (void)setupAudioManager {
    
    //
    // Setup the AVAudioSession. EZMicrophone will not work properly on iOS
    // if you don't do this!
    //
    AVAudioSession *session = [AVAudioSession sharedInstance];
    NSError *error;
    [session setCategory:AVAudioSessionCategoryPlayAndRecord error:&error];
    if (error)
    {
        NSLog(@"Error setting up audio session category: %@", error.localizedDescription);
    }
    [session setActive:YES error:&error];
    if (error)
    {
        NSLog(@"Error setting up audio session active: %@", error.localizedDescription);
    }
    
    // Create an instance of the microphone and tell it to use this view controller instance as the delegate
    self.microphone = [EZMicrophone microphoneWithDelegate:self];
    self.player = [EZAudioPlayer audioPlayerWithDelegate:self];
    
    //
    // Create an instance of the EZAudioFFTRolling to keep a history of the incoming audio data and calculate the FFT.
    //
    self.fft = [EZAudioFFTRolling fftWithWindowSize:FFTViewControllerFFTWindowSize
                                         sampleRate:self.microphone.audioStreamBasicDescription.mSampleRate
                                           delegate:self];
    
    //
    // Override the output to the speaker. Do this after creating the EZAudioPlayer
    // to make sure the EZAudioDevice does not reset this.
    //
    [session overrideOutputAudioPort:AVAudioSessionPortOverrideSpeaker error:&error];
    if (error)
    {
        NSLog(@"Error overriding output to the speaker: %@", error.localizedDescription);
    }
    
    //
    // Setup notifications
    //
    [self setupNotifications];
    
    //
    // Log out where the file is being written to within the app's documents directory
    //
    NSLog(@"File written to application sandbox's documents directory: %@",[self testFilePathURL]);
    
    //
    // Start the microphone
    //
    //[self.microphone startFetchingAudio];
}

//------------------------------------------------------------------------------

- (void)setupNotifications
{
    [[NSNotificationCenter defaultCenter] addObserver:self
                                             selector:@selector(playerDidChangePlayState:)
                                                 name:EZAudioPlayerDidChangePlayStateNotification
                                               object:self.player];
    [[NSNotificationCenter defaultCenter] addObserver:self
                                             selector:@selector(playerDidReachEndOfFile:)
                                                 name:EZAudioPlayerDidReachEndOfFileNotification
                                               object:self.player];
}

//------------------------------------------------------------------------------
#pragma mark - Notifications
//------------------------------------------------------------------------------

- (void)playerDidChangePlayState:(NSNotification *)notification
{
    __weak typeof (self) weakSelf = self;
    dispatch_async(dispatch_get_main_queue(), ^{
        EZAudioPlayer *player = [notification object];
        BOOL isPlaying = [player isPlaying];
        if (isPlaying)
        {
            weakSelf.recorder.delegate = nil;
        }
        //weakSelf.playingStateLabel.text = isPlaying ? @"Playing" : @"Not Playing";
        //weakSelf.playingAudioPlot.hidden = !isPlaying;
    });
}

//------------------------------------------------------------------------------

- (void)playerDidReachEndOfFile:(NSNotification *)notification
{
    __weak typeof (self) weakSelf = self;
    dispatch_async(dispatch_get_main_queue(), ^{
        //[weakSelf.playingAudioPlot clear];
        [self handlePlayFinished:YES];
    });
}

//------------------------------------------------------------------------------
#pragma mark - Actions
//------------------------------------------------------------------------------

- (void)playFile:(NSURL *)url
{
    //
    // Update microphone state
    //
    [self.microphone stopFetchingAudio];
    
    //
    // Update recording state
    //
    self.isRecording = NO;
    self.isRecordingPaused = NO;
    //self.recordingStateLabel.text = @"Not Recording";
    //self.recordSwitch.on = NO;
    
    //
    // Close the audio file
    //
    if (self.recorder)
    {
        [self.recorder closeAudioFile];
    }
    
    url = url ?: [self testFilePathURL];
    EZAudioFile *audioFile = [EZAudioFile audioFileWithURL:url];
    [self.player playAudioFile:audioFile];
}

- (void)douAudioPlayFile:(NSURL *)url
{
    //
    // Update microphone state
    //
    [self.microphone stopFetchingAudio];
    
    //
    // Update recording state
    //
    self.isRecording = NO;
    self.isRecordingPaused = NO;
    //self.recordingStateLabel.text = @"Not Recording";
    //self.recordSwitch.on = NO;
    
    //
    // Close the audio file
    //
    if (self.recorder)
    {
        [self.recorder closeAudioFile];
    }
    
    url = url ?: [self testFilePathURL];
    
    DouTrack *track = [[DouTrack alloc] init];
    [track setArtist:@"artist"];
    [track setTitle:@"title"];
    [track setAudioFileURL:url];
    self.douAudiostreamPlayer.tracks = @[track]; // 会自动触发 play
}

//------------------------------------------------------------------------------

- (void)toggleMicrophone:(BOOL)isOn
{
    //[self.player pause];
    [self.douAudiostreamPlayer pause];
    
    if (!isOn)
    {
        [self.microphone stopFetchingAudio];
    }
    else
    {
        [self.microphone startFetchingAudio];
    }
}

//------------------------------------------------------------------------------

- (void)toggleRecording:(BOOL)isOn
{
    //[self.player pause];
    [self.douAudiostreamPlayer pause];
    if (isOn)
    {
        //
        // Create the recorder
        //
        //[self.recordingAudioPlot clear];
        [self.microphone startFetchingAudio];
        self.recorder = [EZRecorder recorderWithURL:[self testFilePathURL]
                                       clientFormat:[self.microphone audioStreamBasicDescription]
                                           //fileType:EZRecorderFileTypeM4A
                                           fileType:EZRecorderFileTypeWAV
                                           delegate:self];
        //self.playButton.enabled = YES;
    }
    self.isRecording = isOn;
    self.isRecordingPaused = NO;
    //self.recordingStateLabel.text = self.isRecording ? @"Recording" : @"Not Recording";
}

//------------------------------------------------------------------------------
#pragma mark - EZMicrophoneDelegate
//------------------------------------------------------------------------------

- (void)microphone:(EZMicrophone *)microphone changedPlayingState:(BOOL)isPlaying
{
    //self.microphoneStateLabel.text = isPlaying ? @"Microphone On" : @"Microphone Off";
    //self.microphoneSwitch.on = isPlaying;
    NSLog(@"changedPlayingState:%@", isPlaying ? @"Microphone On" : @"Microphone Off");
}

//------------------------------------------------------------------------------

#warning Thread Safety
//
// Note that any callback that provides streamed audio data (like streaming
// microphone input) happens on a separate audio thread that should not be
// blocked. When we feed audio data into any of the UI components we need to
// explicity create a GCD block on the main thread to properly get the UI to
// work.
- (void)   microphone:(EZMicrophone *)microphone
     hasAudioReceived:(float **)buffer
       withBufferSize:(UInt32)bufferSize
 withNumberOfChannels:(UInt32)numberOfChannels
{
    // Getting audio data as an array of float buffer arrays. What does that
    // mean? Because the audio is coming in as a stereo signal the data is split
    // into a left and right channel. So buffer[0] corresponds to the float* data
    // for the left channel while buffer[1] corresponds to the float* data for
    // the right channel.
    
    
    //
    // Calculate the FFT, will trigger EZAudioFFTDelegate
    //
    [self.fft computeFFTWithBuffer:buffer[0] withBufferSize:bufferSize];
    
    //
    // See the Thread Safety warning above, but in a nutshell these callbacks
    // happen on a separate audio thread. We wrap any UI updating in a GCD block
    // on the main thread to avoid blocking that audio flow.
    //
    __weak typeof (self) weakSelf = self;
    dispatch_async(dispatch_get_main_queue(), ^{
        //
        // All the audio plot needs is the buffer data (float*) and the size.
        // Internally the audio plot will handle all the drawing related code,
        // history management, and freeing its own resources. Hence, one badass
        // line of code gets you a pretty plot :)
        //
        
//        [weakSelf.recordingAudioPlot updateBuffer:buffer[0]
//                                   withBufferSize:bufferSize];
    });
}

//------------------------------------------------------------------------------

- (void)   microphone:(EZMicrophone *)microphone
        hasBufferList:(AudioBufferList *)bufferList
       withBufferSize:(UInt32)bufferSize
 withNumberOfChannels:(UInt32)numberOfChannels
{
    //
    // Getting audio data as a buffer list that can be directly fed into the
    // EZRecorder. This is happening on the audio thread - any UI updating needs
    // a GCD main queue block. This will keep appending data to the tail of the
    // audio file.
    //
    if (self.isRecording)
    {
        [self.recorder appendDataFromBufferList:bufferList
                                 withBufferSize:bufferSize];
        
        float decibels = [self powerFromBufferList:bufferList withBufferSize:bufferSize withNumberOfChannels:numberOfChannels];
        if (!self.averagePowerArray) {
            self.averagePowerArray = [NSMutableArray array];
        }
        [self.averagePowerArray addObject:@(decibels)];
        if (self.averagePowerArray.count > AVERAGE_POWER_COUNT) {
            [self.averagePowerArray removeObjectAtIndex:0]; // 保证数组长度不超过 AVERAGE_POWER_COUNT
        }
    }
}

//------------------------------------------------------------------------------
#pragma mark - EZRecorderDelegate
//------------------------------------------------------------------------------

- (void)recorderDidClose:(EZRecorder *)recorder
{
    recorder.delegate = nil;
    
    // [self.recorder closeAudioFile] 会触发回调 recorderDidClose
    // 但同时也会释放文件 ExtAudioFileDispose(self.info->extAudioFileRef)
    // 导致之后再调用 self.recorder.duration 等方法崩溃
    [self handleRecordFinished:YES];
}

//------------------------------------------------------------------------------

- (void)recorderUpdatedCurrentTime:(EZRecorder *)recorder
{
    __weak typeof (self) weakSelf = self;
    NSString *formattedCurrentTime = [recorder formattedCurrentTime];
    dispatch_async(dispatch_get_main_queue(), ^{
        //weakSelf.currentTimeLabel.text = formattedCurrentTime;
    });
}

//------------------------------------------------------------------------------
#pragma mark - EZAudioPlayerDelegate
//------------------------------------------------------------------------------

- (void) audioPlayer:(EZAudioPlayer *)audioPlayer
         playedAudio:(float **)buffer
      withBufferSize:(UInt32)bufferSize
withNumberOfChannels:(UInt32)numberOfChannels
         inAudioFile:(EZAudioFile *)audioFile
{
    //
    // Calculate the FFT, will trigger EZAudioFFTDelegate
    //
    [self.fft computeFFTWithBuffer:buffer[0] withBufferSize:bufferSize];
    
    __weak typeof (self) weakSelf = self;
    dispatch_async(dispatch_get_main_queue(), ^{
//        [weakSelf.playingAudioPlot updateBuffer:buffer[0]
//                                 withBufferSize:bufferSize];
    });
}

//------------------------------------------------------------------------------

- (void)audioPlayer:(EZAudioPlayer *)audioPlayer
    updatedPosition:(SInt64)framePosition
        inAudioFile:(EZAudioFile *)audioFile
{
    __weak typeof (self) weakSelf = self;
    dispatch_async(dispatch_get_main_queue(), ^{
        //weakSelf.currentTimeLabel.text = [audioPlayer formattedCurrentTime];
    });
}

//------------------------------------------------------------------------------
#pragma mark - EZAudioFFTDelegate
//------------------------------------------------------------------------------

- (void)        fft:(EZAudioFFT *)fft
 updatedWithFFTData:(float *)fftData
         bufferSize:(vDSP_Length)bufferSize
{
    float maxFrequency = [fft maxFrequency];
    NSString *noteName = [EZAudioUtilities noteNameStringForFrequency:maxFrequency
                                                        includeOctave:YES];
    
    __weak typeof (self) weakSelf = self;
    dispatch_async(dispatch_get_main_queue(), ^{
        //weakSelf.maxFrequencyLabel.text = [NSString stringWithFormat:@"Highest Note: %@,\nFrequency: %.2f", noteName, maxFrequency];
        //[weakSelf.audioPlotFreq updateBuffer:fftData withBufferSize:(UInt32)bufferSize];
    });
}

#pragma mark - AudioStreamPlayerDelegate

- (void)audioStreamPlayer:(AudioStreamPlayer *)player updatedStatus:(id)status {
    if ([player isKindOfClass:[DouAudioStreamPlayer class]]) {
        switch ([status integerValue]) {
            case DOUAudioStreamerPlaying:
                break;
                
            case DOUAudioStreamerPaused:
                break;
                
            case DOUAudioStreamerIdle:
                break;
                
            case DOUAudioStreamerFinished:
                [self handlePlayFinished:YES];
                break;
                
            case DOUAudioStreamerBuffering:
                break;
                
            case DOUAudioStreamerError:
                [self handlePlayFinished:NO];
                break;
        }
    }
}

#pragma mark - 录音

- (void)requestRecordPermission:(void (^)(NSString * permission))callback {
    switch (self.audioSession.recordPermission) {
        case AVAudioSessionRecordPermissionGranted:
            callback(@"Granted");
            break;
        case AVAudioSessionRecordPermissionDenied:
            callback(@"Denied");
            break;
        case AVAudioSessionRecordPermissionUndetermined:
            callback(@"Undetermined");
            break;
    }
}

- (void)checkAvailabilityWithDelegate:(id<AudioRecordDelegate>)delegate   callback:(void (^)(NSError *error))callback {
    if (!callback)
        return;
    [self.audioSession requestRecordPermission:^(BOOL granted) {
        //第一步：拥有访问麦克风的权限
        if (!granted) {
            NSError *error = [NSError errorWithDomain:ERROR_AUDIO_DOMAIN
                                                 code:kLLErrorRecordTypeAuthorizationDenied
                                             userInfo:nil];
            callback(error);
            return;
        }else {
            if ([delegate respondsToSelector:@selector(audioRecordAuthorizationDidGranted)]) {
                [delegate audioRecordAuthorizationDidGranted];
            }
        }
        
        //第二步：当前麦克风未使用
        if (self.isRecording) {
            NSError *error1 = [NSError errorWithDomain:ERROR_AUDIO_DOMAIN
                                                  code:kLLErrorRecordTypeMultiRequest
                                              userInfo:nil];
            
            callback(error1);
            return;
        }
        
        //第三步：设置AudioSession.category
        NSError *error;
        self.previousCategory = self.audioSession.category;
        BOOL success = [self.audioSession
                        setCategory:AVAudioSessionCategoryPlayAndRecord
                        withOptions:AVAudioSessionCategoryOptionDuckOthers
                        error:&error];
        
        if (!success || error) {
            NSError *error1 = [NSError errorWithDomain:ERROR_AUDIO_DOMAIN
                                                  code:kLLErrorRecordTypeInitFailed
                                              userInfo:nil];
            
            callback(error1);
            return;
        }
        
        //第四步：激活AudioSession
        error = nil;
        success = [[AVAudioSession sharedInstance]
                   setActive:YES
                   withOptions:AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation
                   error:&error];
        if (!success || error) {
            NSError *error1 = [NSError errorWithDomain:ERROR_AUDIO_DOMAIN
                                                  code:kLLErrorRecordTypeInitFailed
                                              userInfo:nil];
            
            callback(error1);
            return;
        }
        
        //第五步：创建临时录音文件
        NSURL *tmpAudioFile = [self.class wavPathWithName:[self.class randomFileName]];
        if (!tmpAudioFile) {
            NSError *error1 = [NSError errorWithDomain:ERROR_AUDIO_DOMAIN
                                                  code:kLLErrorRecordTypeCreateAudioFileFailed
                                              userInfo:nil];
            
            callback(error1);
            return;
        }
        
        //第六步：创建AVAudioRecorder
        
        //第七步：开始录音
        
        [self toggleRecording:YES];
        
        callback(nil);
    }];
}

- (void)startRecordingWithDelegate:(id<AudioRecordDelegate>)delegate
{
    [self checkAvailabilityWithDelegate:delegate callback:^(NSError *error) {
        if (!error) {
            self.recordDelegate = delegate;
            self.isFinishRecording = NO;
            self.isCancelRecording = NO;
            self.isRecording = YES;
            self.isRecordingPaused = NO;
            self.recordDuration = 0;
            
            [self.timer invalidate];
            
            if (self.recordDelegate && [self.recordDelegate respondsToSelector:@selector(audioRecordDurationDidChanged:)]){
                self.timer = [NSTimer timerWithTimeInterval:1 target:self selector:@selector(timerHandler:) userInfo:nil repeats:YES];
                [[NSRunLoop mainRunLoop] addTimer:self.timer forMode:NSRunLoopCommonModes];
            }
            
            self.maxRecordTime = MAX_RECORD_TIME_ALLOWED;
            if (self.recordDelegate && [self.recordDelegate respondsToSelector:@selector(audioRecordMaxRecordTime)]){
                self.maxRecordTime = [delegate audioRecordMaxRecordTime];
            }
            
            //录音音量变化
            [self updateVoiceMeter];
            
            if (self.recordDelegate && [self.recordDelegate respondsToSelector:@selector(audioRecordDidStartRecordingWithError:)]){
                
                _block = dispatch_block_create(0, ^{
                    [delegate audioRecordDidStartRecordingWithError:nil];
                });
                dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)((MIN_RECORD_TIME_REQUIRED + 0.25) * NSEC_PER_SEC)), dispatch_get_main_queue(), _block);
            }
        }else {
            if (delegate && [delegate respondsToSelector:@selector(audioRecordDidStartRecordingWithError:)]){
                
                [delegate audioRecordDidStartRecordingWithError:error];
            }
        }
    }];
}

- (double)volumeFromAveragePower:(float)decibels {
    float   level;                // The linear 0.0 .. 1.0 value we need. // 最终获取的值 0~1之间
    float   minDecibels = -80.0f; // Or use -60dB, which I measured in a silent room. //最小分贝 -80 去除分贝过小的声音
    //float   decibels = [weakSelf.recorder averagePowerForChannel:0]; //获取通道0的分贝
    if (decibels < minDecibels) //控制最小值 0
    {
        level = 0.0f;
    }
    else if (decibels >= 0.0f) //控制最大值 1
    {
        level = 1.0f;
    }
    else
    {
        float   root            = 2.0f;
        float   minAmp          = powf(10.0f, 0.05f * minDecibels); //最小分贝的波形幅度值 公式: db 分贝 A 波形幅度值   dB=20∗log(A)→A=pow(10,(db/20.0))
        float   inverseAmpRange = 1.0f / (1.0f - minAmp);
        float   amp             = powf(10.0f, 0.05f * decibels); //实时获取到波形幅度值 公式同上
        float   adjAmp          = (amp - minAmp) * inverseAmpRange; //(实时数据 - 最小数据)/(最大数据 - 最小数据)  应该计算的一个比例值吧
        
        level = powf(adjAmp, 1.0f / root); //开方? 这个不懂
    }
    
    //float theVal = level * 60; //扩大范围 0~1 -> 0~60
    
    //float averagePower = [weakSelf.recorder peakPowerForChannel:0];
    double lowPassResults = level;
    
    return lowPassResults;
}

- (float)getAveragePower {
    float averagePower = 0;
    if (self.averagePowerArray) {
        NSArray *arr = [self.averagePowerArray copy];
        averagePower = [[arr valueForKeyPath:@"@avg.floatValue"] floatValue];
    }
    return averagePower;
}

- (float)powerFromBufferList:(AudioBufferList *)bufferList
                     withBufferSize:(UInt32)bufferSize
               withNumberOfChannels:(UInt32)numberOfChannels {
    if (bufferList == NULL) {
        return 0;
    }
    
    UInt32 inNumberFrames = bufferSize;
    
    float* data = (float*)malloc(bufferList->mBuffers[0].mDataByteSize*sizeof(float));
    
    memcpy(data, (float *)bufferList->mBuffers[0].mData, bufferList->mBuffers[0].mDataByteSize); //初始化data
    
    vDSP_vsq(data, 1, data, 1, inNumberFrames*numberOfChannels);
    
    float meanVal = 0.0;
    
    vDSP_meanv(data, 1, &meanVal, inNumberFrames*numberOfChannels);
    
    float one = 1.0;
    
    vDSP_vdbcon(&meanVal, 1, &one, &meanVal, 1, 1, 0);
    
    free(data);
    
    return meanVal;
}

//处理音量变化
- (void)updateVoiceMeter {
    
    __weak typeof(self) weakSelf =  self;
    
    __block BOOL isSendMsg = NO;
    dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
        while(weakSelf.isRecording && !weakSelf.isRecordingPaused) {
            
            double lowPassResults = [weakSelf volumeFromAveragePower:[weakSelf getAveragePower]];
            dispatch_async(dispatch_get_main_queue(), ^{
                [weakSelf.recordDelegate audioRecordDidUpdateVoiceMeter:lowPassResults];
            });
            
            if (self.recorder.currentTime >= self.maxRecordTime) {
                isSendMsg = YES;
                if (weakSelf.recordDelegate && [weakSelf.recordDelegate respondsToSelector:@selector(audioRecordDurationTooLong)]){
                    dispatch_async(dispatch_get_main_queue(), ^{
                        [weakSelf.recordDelegate audioRecordDurationTooLong];
                    });
                }
            }
            
            [NSThread sleepForTimeInterval:0.1];
        }
    });
}

- (void)timerHandler:(NSTimer *)timer {
    [self.recordDelegate audioRecordDurationDidChanged:self.recorder.currentTime];
}

/**
 *  停止录音
 */
- (void)stopRecording {
    if(!self.isRecording){
        return;
    }
    
    //录音时间过短，按照取消录音对待
    if(self.recorder.currentTime < MIN_RECORD_TIME_REQUIRED){
        self.isFinishRecording = NO;
        self.isCancelRecording = YES;
        
        if (self.recordDelegate && [self.recordDelegate respondsToSelector:@selector(audioRecordDurationTooShort)]){
            [self.recordDelegate audioRecordDurationTooShort];
        }
        
    }else {
        self.isFinishRecording = YES;
        self.isCancelRecording = NO;
    }
    
    [self willStopRecord];
}


/**
 *  取消录音
 */
- (void)cancelRecording {
    if(!self.isRecording){
        return;
    }
    
    self.isFinishRecording = NO;
    self.isCancelRecording = YES;
    [self willStopRecord];
    if (self.recordDelegate && [self.recordDelegate respondsToSelector:@selector(audioRecordDidCancelled)]){
        [self.recordDelegate audioRecordDidCancelled];
    }
}

/**
 *  暂停录音
 */
- (void)pauseRecording {
    [self toggleMicrophone:NO];
    self.isRecordingPaused = YES;
}

/**
 *  恢复录音
 */
- (void)resumeRecording {
    [self toggleMicrophone:YES];
    self.isRecordingPaused = NO;
    [self updateVoiceMeter];
}

- (void)willStopRecord {
    [self.timer invalidate];
    self.timer = nil;
    [self toggleMicrophone:NO];
    [self toggleRecording:NO];
    // [self.recorder closeAudioFile] 会触发回调 recorderDidClose
    // 但同时也会释放文件 ExtAudioFileDispose(self.info->extAudioFileRef)
    // 导致之后再调用 self.recorder.duration 等方法崩溃
    self.recordDuration = self.recorder.duration;
    [self.recorder closeAudioFile];
    
    if (!dispatch_block_testcancel(_block))
        dispatch_block_cancel(_block);
    _block = nil;
}

- (void)didStopRecord {
    self.recorder.delegate = nil;
    self.recorder = nil;
    self.recordDelegate = nil;
    self.isRecording = NO;
    self.isRecordingPaused = NO;
    self.isFinishRecording = NO;
    self.isCancelRecording = NO;
    self.maxRecordTime = 1<<10;
    
    if (self.previousCategory.length > 0) {
        [self.audioSession setCategory:self.previousCategory error:nil];
        self.previousCategory = nil;
    }
    
    [self.audioSession setActive:NO withOptions:AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation error:nil];
}

- (void)handleRecordFinished:(BOOL)flag {
    NSString *recordPath = [[self testFilePathURL] path];
    
    if (!flag) {
        //录制出错
        if ([self.recordDelegate respondsToSelector:@selector(audioRecordDidFailed)]) {
            [self.recordDelegate audioRecordDidFailed];
        }
        
        // 删除录的wav
        NSFileManager *fm = [NSFileManager defaultManager];
        [fm removeItemAtPath:recordPath error:nil];
        
    }else if (self.isFinishRecording) {
        
        if ([self.recordDelegate respondsToSelector:@selector(audioRecordDidFinishSuccessed:duration:)]) {
            [self.recordDelegate audioRecordDidFinishSuccessed:recordPath duration:self.recordDuration];
        }
        
    }else if (self.isCancelRecording) {
        // 删除录的wav
        NSFileManager *fm = [NSFileManager defaultManager];
        [fm removeItemAtPath:recordPath error:nil];
    }
    
    [self didStopRecord];
}

#pragma mark - 播放录音

- (void)checkAvailabilityWithFile:(NSString *)wavFilePath callback:(void (^)(NSError *error))callback {
    [self stopCurrentPlaying];
    
    NSError *error;
    if (!self.isPlaySessionActive) {
        //设置AudioSession.category
        error = nil;
        self.previousCategory = self.audioSession.category;
        BOOL success = [self.audioSession
                        setCategory:AVAudioSessionCategoryPlayback
                        withOptions:AVAudioSessionCategoryOptionDuckOthers
                        error:&error];
        
        if (!success || error) {
            NSError *error1 = [NSError errorWithDomain:ERROR_AUDIO_DOMAIN
                                                  code:kLLErrorPlayTypeInitFailed
                                              userInfo:nil];
            
            callback(error1);
            return;
        }
        
        if([[self class] isHeadphone]){
            error = nil;
            //打开外放
            success = [self.audioSession overrideOutputAudioPort:AVAudioSessionPortOverrideSpeaker
                                                           error:&error];
            if (!success || error) {
                NSError *error1 = [NSError errorWithDomain:ERROR_AUDIO_DOMAIN
                                                      code:kLLErrorPlayTypeInitFailed
                                                  userInfo:nil];
                
                callback(error1);
                return;
            }
            
        }
        
        //激活AudioSession
        error = nil;
        success = [[AVAudioSession sharedInstance]
                   setActive:YES
                   withOptions:AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation
                   error:&error];
        if (!success || error) {
            NSError *error1 = [NSError errorWithDomain:ERROR_AUDIO_DOMAIN
                                                  code:kLLErrorPlayTypeInitFailed
                                              userInfo:nil];
            
            callback(error1);
            return;
        }
        
    }
    
    //保证WAV格式录音文件存在
    NSFileManager *fileManager = [NSFileManager defaultManager];
    /*
    if (![fileManager fileExistsAtPath:wavFilePath]) {
        NSError *error1 = [NSError errorWithDomain:ERROR_AUDIO_DOMAIN
                                              code:kLLErrorPlayTypeFileNotExist
                                          userInfo:nil];
        
        callback(error1);
        return ;
    }
    */
    
    if (!wavFilePath) {
        NSError *error1 = [NSError errorWithDomain:ERROR_AUDIO_DOMAIN
                                              code:kLLErrorPlayTypeFileNotExist
                                          userInfo:nil];
        
        callback(error1);
        return ;
    }
    
    //创建AVAudioPlayer
    NSURL *wavURL = [NSURL URLWithString:wavFilePath];
    if ([fileManager fileExistsAtPath:wavFilePath]) {
        wavURL = [NSURL fileURLWithPath:wavFilePath];
    }
    
    //开始播放
    
    //修改音频会话类别
    
    //问题原因应该是在录音阶段，SDK设置音频会话类别为AVAudioSessionCategoryRecord，
    //这会停止其他应用的声音（比如iPod）并让你的应用也不能初始化音频回放（比如AVAudioPlayer）。
    //在这种模式下，你只能进行录音。使用这个类别，调用AVAudioPlayer的prepareToPlay会返回YES，但是调用play方法将返回NO。
    //主UI界面会照常工作。这时，即使你的设备屏幕被用户锁定了，应用的录音仍会继续。
    //而在播放完录制音频后需主动修改会话类别，所以播放音频前需要主动调用
    [[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback error:nil];
    
    //[self playFile:wavURL];
    [self douAudioPlayFile:wavURL];
    
    callback(nil);
}


// 播放音频，播放音频不需要特殊权限
- (void)startPlayingWithPath:(NSString *)aFilePath
                    delegate:(id<AudioPlayDelegate>)delegate
                    userinfo:(id)userinfo continuePlaying:(BOOL)continuePlaying {
    
    NSUInteger position = self.douAudiostreamPlayer.currentTime;
    if ([userinfo isKindOfClass:[NSDictionary class]]) {
        NSDictionary *options = (NSDictionary *)userinfo;
        if ([[options allKeys] containsObject:@"position"]) {
            position = [options[@"position"] integerValue];
        }
    }
    if (self.douAudiostreamPlayer.currentPlayingTrack && [self.douAudiostreamPlayer.currentPlayingTrack.audioFileURL.absoluteString isEqualToString:aFilePath]) { // 同一个文件
        self.douAudiostreamPlayer.currentTime = position;
        if (!self.isPlaying) {
            [self resumeCurrentPlaying];
        }
        
        return;
    }
    
    [self checkAvailabilityWithFile:aFilePath callback:^(NSError *error) {
        if (!error) {
            self.playerDelegate = delegate;
            self.userinfo = userinfo;
            self.isPlaying = YES;
            self.isPlaySessionActive = YES;
            
            self.douAudiostreamPlayer.currentTime = position;
            
            if (delegate && [delegate respondsToSelector:@selector(audioPlayDidStarted:)]){
                [delegate audioPlayDidStarted:self.userinfo];
            }
            
            if (!continuePlaying && [[self class] currentVolumeLevel] <= kLLSoundVolumeLevelLow) {
                if (delegate && [delegate respondsToSelector:@selector(audioPlayVolumeTooLow)]){
                    [delegate audioPlayVolumeTooLow];
                }
            }
            
        }else {
            switch (error.code) {
                case kLLErrorPlayTypeInitFailed:
                case kLLErrorPlayTypeFileNotExist:
                case kLLErrorPlayTypePlayError:
                {
                    
                    //NSString *msg = @"遇到问题，暂时无法播放";
                    //todo:ydz
                    /*
                     */
                    break;
                }
                default:
                    break;
            }
            
            [self _stopPlaying];
        }
    }];
}

- (NSTimeInterval)getAudioDuration {
    //return self.player.duration;
    return self.douAudiostreamPlayer.streamer.duration;
}

- (void)stopPlaying {
    if (!self.isPlaySessionActive) {
        return;
    }
    
    [self _stopPlaying];
    if (self.playerDelegate && [self.playerDelegate respondsToSelector:@selector(audioPlayDidStopped:)]){
        [self.playerDelegate audioPlayDidStopped:self.userinfo];
    }
    self.playerDelegate = nil;
    self.userinfo = nil;
}

- (void)_stopPlaying {
    [self _stopCurrentPlaying];
    
    if (self.previousCategory.length > 0) {
        [self.audioSession setCategory:self.previousCategory error:nil];
        self.previousCategory = nil;
    }
    
    [self.audioSession setActive:NO withOptions:AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation error:nil];
    self.isPlaySessionActive = NO;
}

- (void)stopCurrentPlaying {
    if (!self.isPlaying)
        return;
    
    [self _stopCurrentPlaying];
    if (self.playerDelegate && [self.playerDelegate respondsToSelector:@selector(audioPlayDidStopped:)]){
        [self.playerDelegate audioPlayDidStopped:self.userinfo];
    }
}

- (void)_stopCurrentPlaying {
//    if (self.player.isPlaying)
//        [self.player.output stopPlayback];
    
    [self.douAudiostreamPlayer stop];
    
    self.isPlaying = NO;
}

- (void)pauseCurrentPlaying {
    if (!self.isPlaying)
        return;
    
    [self _pauseCurrentPlaying];
    if (self.playerDelegate && [self.playerDelegate respondsToSelector:@selector(audioPlayDidPaused:)]){
        [self.playerDelegate audioPlayDidPaused:self.userinfo];
    }
}

- (void)_pauseCurrentPlaying {
//    if (self.player.isPlaying)
//        [self.player pause];

    if (self.douAudiostreamPlayer.isPlaying) {
        [self.douAudiostreamPlayer pause];
    }
    
    self.isPlaying = NO;
}

- (void)resumeCurrentPlaying {
    if (self.isPlaying)
        return;
    
    [self _resumeCurrentPlaying];
    if (self.playerDelegate && [self.playerDelegate respondsToSelector:@selector(audioPlayDidResumed:)]){
        [self.playerDelegate audioPlayDidResumed:self.userinfo];
    }
}

- (void)_resumeCurrentPlaying {
    //[self.player play];
    [self.douAudiostreamPlayer play];
    
    self.isPlaying = YES;
}

- (void)handlePlayFinished:(BOOL)flag {
    if (!flag) {
        [self _stopPlaying];
        
        if ([self.playerDelegate respondsToSelector:@selector(audioPlayDidFailed:)]) {
            [self.playerDelegate audioPlayDidFailed:self.userinfo];
        }
        self.playerDelegate = nil;
        self.userinfo = nil;
    } else {
        [self _stopCurrentPlaying];
        
        if ([self.playerDelegate respondsToSelector:@selector(audioPlayDidFinished:)]) {
            [self.playerDelegate audioPlayDidFinished:self.userinfo];
        }
    }
}

#pragma mark - private util
#pragma mark - 录音文件存储

//------------------------------------------------------------------------------
#pragma mark - Utility
//------------------------------------------------------------------------------

- (NSArray *)applicationDocuments
{
    return NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
}

//------------------------------------------------------------------------------

- (NSString *)applicationDocumentsDirectory
{
    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
    NSString *basePath = ([paths count] > 0) ? [paths objectAtIndex:0] : nil;
    return basePath;
}

//------------------------------------------------------------------------------

- (NSURL *)testFilePathURL
{
    if (self.basePath) {
        return [NSURL fileURLWithPath:[NSString stringWithFormat:@"%@/%@",
                                       self.basePath,
                                       kAudioFilePath]];
    }
    return [NSURL fileURLWithPath:[NSString stringWithFormat:@"%@/%@",
                                   [self applicationDocumentsDirectory],
                                   kAudioFilePath]];
}

+ (NSString *)randomFileName {
    int x = arc4random() % 100000;
    NSTimeInterval time = [[NSDate date] timeIntervalSince1970];
    NSString *fileName = [NSString stringWithFormat:@"%d%d",(int)time,x];
    
    return fileName;
}

+ (NSURL *)amrPathWithName:(NSString *)fileName {
    if([[self shareInstance] amrAudioTmpPath]){
        NSURL *folderURL = [NSURL URLWithString:[[self shareInstance] amrAudioTmpPath]];
        NSString *filePathName = [NSString stringWithFormat:@"%@.amr", fileName];
        NSURL *filePath = [folderURL URLByAppendingPathComponent:filePathName];
        
        return filePath;
    }
    
    return nil;
}

+ (NSURL *)wavPathWithName:(NSString *)fileName {
    if([[self shareInstance] wavAudioTmpPath]){
        NSURL *folderURL = [NSURL URLWithString:[[self shareInstance] wavAudioTmpPath]];
        NSString *filePathName = [NSString stringWithFormat:@"%@.wav", fileName];
        NSURL *filePath = [folderURL URLByAppendingPathComponent:filePathName];
        
        return filePath;
    }
    
    return nil;
}

+ (BOOL)createDirectory:(NSString *)directory{
    NSFileManager *fileManager = [NSFileManager defaultManager];
    BOOL r =YES;
    if(![fileManager fileExistsAtPath:directory]){
        NSError *error;
        [fileManager createDirectoryAtPath:directory
               withIntermediateDirectories:YES
                                attributes:nil
                                     error:&error];
        
        if (error) {
            r = NO;
            NSLog(@"创建文件失败 %@", error.localizedFailureReason);
        }
    }
    
    return r;
}

+ (NSURL *)createFolderWithName:(NSString *)folderName inDirectory:(NSString *)directory {
    NSString *path = [directory stringByAppendingPathComponent:folderName];
    NSURL *folderURL = nil;
    if([[self class] createDirectory:path]){
        folderURL = [NSURL URLWithString:path];
    }
    return folderURL;
}

#pragma mark - static Method
#pragma mark 获得当前的音量
+ (float)currentVolumn {
    //以下API已废弃
    //    UInt32 dataSize = sizeof(float);
    //
    //    AudioSessionGetProperty (kAudioSessionProperty_CurrentHardwareOutputVolume,
    //                             &dataSize,
    //                             &volume);
    float volume = [AVAudioSession sharedInstance].outputVolume;
    
    return volume;
}

+ (NSInteger)currentVolumeLevel {
    return round(16 *[self currentVolumn]);
}

+ (BOOL)isHeadphone {
    AVAudioSessionRouteDescription* route = [[AVAudioSession sharedInstance] currentRoute];
    for (AVAudioSessionPortDescription* desc in [route outputs]) {
        if ([[desc portType] isEqualToString:AVAudioSessionPortHeadphones])
            return YES;
    }
    return NO;
}

@end
