//
//  StudioManagerTool.m
//  AnotherMediaTool
//
//  Created by fh on 2018/8/31.
//  Copyright © 2018年 fh. All rights reserved.
//

#import "StudioRecordTool.h"
#import <CoreMedia/CoreMedia.h>
#import <AVFoundation/AVFoundation.h>
#import <Photos/Photos.h>

// 错误域名
NSString * const StudioRecordToolErrorDomain = @"com.studioRecordTool.instrumentType.error";

/**
 功能模式
 
 - InstrumentTypePhoto: 拍照
 - InstrumentTypeVideo: 视频录制
 - InstrumentTypeAudio: 录音
 */
typedef NS_ENUM(NSInteger,InstrumentType) {
    InstrumentTypePhoto,
    InstrumentTypeVideo,
    InstrumentTypeAudio
};

@interface StudioRecordTool () <AVCaptureAudioDataOutputSampleBufferDelegate, AVCaptureVideoDataOutputSampleBufferDelegate, AVAudioRecorderDelegate> {
    /** 数据流输出录制方式的缓冲队列 */
    CMBufferQueueRef _previewBufferQueue;
    /** 视频分辨的宽 */
    NSInteger _videoX;
    /** 视频分辨的高 */
    NSInteger _videoY;
    /** 音频通道 */
    int _channels;
    /** 音频采样率 */
    Float64 _samplerate;
    /* 当前是否是后置摄像头 */
    BOOL _curIsBackCamera;
    
    /* 前一帧的presentation时间 */
    CMTime _previousPstTime;
    /* 是否是首帧 */
    BOOL _isFirstFrame;
}

/** 错误监听block */
@property (nonatomic, copy) void(^innerMonitorErrorBlock)(NSError *error);
/** 当前的功能制式:拍照,录音或者录制 */
@property (nonatomic, assign) InstrumentType instrumentType;
/** 录制完成之后的文件路径 */
@property (nonatomic, strong) NSURL *videoFileURL;
/** 开始录制的时间 */
@property (atomic, assign) CMTime startTime;
/** 当前录制时间 */
@property (atomic, assign) CGFloat currentRecordTime;
/** 私有视频录制结果回调 */
@property (nonatomic, copy) void(^innerVideoRecordResult)(NSURL *outputFileURL, NSError *videoRecordError);
/** 私有视频详细录制结果回调 */
@property (nonatomic, copy) void(^innerVideoRecordDetailResult)(NSURL *outputFileURL, NSError *videoRecordError, int videoDuration, float videoSize);
/** 私有视频所有详细录制结果回调 */
@property (nonatomic, copy) void(^innerVideoRecordAllDetailResult)(NSURL *outputFileURL, UIImage *videoImage, NSError *videoRecordError, int videoDuration, float videoSize);
/** 视频录制过程回调 */
@property (nonatomic, copy) void (^innerVideoRecordProcess)(CGFloat processTime);
/** 私有保存照片到相册结果回调 */
@property (nonatomic, copy) void(^innerPhotoSaveToPhotosAlbumResult)(BOOL saveResult, NSError *saveError);
/** 私有保存视频到相册结果回调 */
@property (nonatomic, copy) void(^innerVideoSaveToPhotosAlbumResult)(BOOL saveResult, NSError *saveError);

/** 录音保存路径 */
@property (nonatomic, strong) NSString *audioRecordFilePath;
/** 私有录音录制结果回调 */
@property (nonatomic, copy) void (^innerAudioRecordResult)(NSString *audioFilePath, NSError *error);
/** 私有录音错误信息 */
@property (nonatomic, strong) NSError *innerAudioRecordError;

/** 捕获到的视频呈现的layer */
@property (nonatomic, readwrite, strong) AVCaptureVideoPreviewLayer *previewLayer;
/** 会话 */
@property (nonatomic, strong) AVCaptureSession *session;
/** 后置摄像头输入 */
@property (nonatomic, strong) AVCaptureDeviceInput *backVideoInput;
/** 前置摄像头输入 */
@property (nonatomic, strong) AVCaptureDeviceInput *frontVideoInput;
/** 音频数据输入 */
@property (nonatomic, strong) AVCaptureDeviceInput *audioInput;

/** 录制队列:音视频数据写入文件队列 */
@property (nonatomic, strong, nonnull) dispatch_queue_t assetWriterQueue;
/** 媒体写入对象 */
@property (nonatomic, strong) AVAssetWriter *assetWriter;
/** 视频写入 */
@property (nonatomic, strong) AVAssetWriterInput *assetWriterVideoInput;
/** 音频写入 */
@property (nonatomic, strong) AVAssetWriterInput *assetWriterAudioInput;
/** 音频连接 */
@property (nonatomic, strong) AVCaptureConnection *audioConnection;
/** 视频连接 */
@property (nonatomic, strong) AVCaptureConnection *videoConnection;
/** 视频输出 */
@property (nonatomic, strong) AVCaptureVideoDataOutput *videoDataOutput;
/** 音频输出 */
@property (nonatomic, strong) AVCaptureAudioDataOutput *audioDataOutput;
/** 照片输出流 */
@property (nonatomic, strong) AVCaptureStillImageOutput *imageDataOutput;
/** 照片连接 */
@property (nonatomic, strong) AVCaptureConnection *imageConnection;

/** 录音对象 */
@property (nonatomic, strong) AVAudioRecorder *audioRecorder;

/** 是否将要开始录制 */
@property (nonatomic, assign) BOOL recordingWillBeStarted;
/** 是否准备采集音频 */
@property (nonatomic, assign) BOOL readyToRecordAudio;
/** 是否准备采集视频 */
@property (nonatomic, assign) BOOL readyToRecordVideo;
/** 是否正在录制 */
@property (nonatomic, readwrite, assign) BOOL isRecording;
/** 是否正在处理视频输出 */
@property (nonatomic, readwrite, assign) BOOL isFinishWriting;

/** 分辨率 */
@property (nonatomic, readwrite, copy) AVCaptureSessionPreset preset;

@property (nonatomic, assign) UIBackgroundTaskIdentifier backgroundUpdateTask;
@end

@implementation StudioRecordTool
#pragma mark - Life
- (instancetype)initDefaultVideo {
    if (self = [super init]) {
        [self initVideoConfiguration];
        // 配置分辨率
        if ([self.session canSetSessionPreset:self.preset]) { // AVCaptureSessionPresetHigh
            self.session.sessionPreset = self.preset;
        }
        if (_curIsBackCamera) {
            // 添加后置摄像头的输入
            if ([self.session canAddInput:self.backVideoInput]) {
                [self.session addInput:self.backVideoInput];
            }
        }else {
            if ([self.session canAddInput:self.frontVideoInput]) {
                [self.session addInput:self.frontVideoInput];
            }
        }
        // 添加麦克风的输入
        if ([self.session canAddInput:self.audioInput]) {
            [self.session addInput:self.audioInput];
        }
        
        // 设置自动聚焦
        [self changeCurrentCameraProperty:^(AVCaptureDevice *currentCamera) {
            currentCamera.subjectAreaChangeMonitoringEnabled = YES;
        }];
        [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:nil]; // self.backVideoInput.device
     
        // 监听AVCaptureSession中断事件
        [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(handleInterruption:) name:AVCaptureSessionWasInterruptedNotification object:nil];
        
        // 程序进入后台
        [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(applicationDidEnterBackgroundNotificationAction:) name:UIApplicationDidEnterBackgroundNotification object:nil];
        
        // 监听停止中断
        [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(handleEndInterruption:) name:AVCaptureSessionInterruptionEndedNotification object:nil];
        
        // 配置视频模式的输出
        [self configVideoOutput];
        [self.session startRunning];
        
//        AVAssetWriterInputPixelBufferAdaptor *xx
        
    }
    return self;
}

- (instancetype)initDefaultPhoto {
    if (self = [super init]) {
        [self initPhotoConfiguration];
        // 配置分辨率
        if ([self.session canSetSessionPreset:self.preset]) { // AVCaptureSessionPresetPhoto
            self.session.sessionPreset = self.preset;  // AVCaptureSessionPresetPhoto:分辨率最高:3024x4032  AVCaptureSessionPresetHigh和AVCaptureSessionPresetInputPriority:1080x1920
        }
        if (_curIsBackCamera) {
            // 添加后置摄像头的输入
            if ([self.session canAddInput:self.backVideoInput]) {
                [self.session addInput:self.backVideoInput];
            }
        }else {
            if ([self.session canAddInput:self.frontVideoInput]) {
                [self.session addInput:self.frontVideoInput];
            }
        }
        
        // 设置自动聚焦
        [self changeCurrentCameraProperty:^(AVCaptureDevice *currentCamera) {
            currentCamera.subjectAreaChangeMonitoringEnabled = YES;
        }];
        [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:nil]; // self.backVideoInput.device
        
        // 配置拍照模式的输出
        [self configPhotoOutput];
        [self.session startRunning];
    }
    return self;
}

- (void)dealloc {
//    [[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:self.backVideoInput.device];
    [[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:nil];
    [[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureSessionWasInterruptedNotification object:nil];
    [[NSNotificationCenter defaultCenter] removeObserver:self name:UIApplicationDidEnterBackgroundNotification object:nil];
    [[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureSessionInterruptionEndedNotification object:nil];
    [[NSNotificationCenter defaultCenter] removeObserver:self];
#ifdef DEBUG
    NSLog(@"------- 工具类 %@ 释放了 -------",[self class]);
#endif
}

#pragma mark - Notification
- (void)subjectAreaDidChange:(NSNotification *)notification {
    AVCaptureDevice *device = nil;
    if (_curIsBackCamera) {
        device = self.backVideoInput.device;
    }else {
        device = self.frontVideoInput.device;
    }
    // 自动聚焦和自动曝光
    if (device.isFocusPointOfInterestSupported && [device isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]) {
        [self changeAVCaptureDevicePropertyWithDevice:device propertyChange:^(AVCaptureDevice *curCaptureDevice) {
            [curCaptureDevice setFocusMode: AVCaptureFocusModeContinuousAutoFocus]; // AVCaptureFocusModeContinuousAutoFocus  AVCaptureFocusModeAutoFocus
            CGPoint cameraPoint = [self.previewLayer captureDevicePointOfInterestForPoint:CGPointMake(CGRectGetMidX(self.previewLayer.frame), CGRectGetMidY(self.previewLayer.frame))];
            [self focusWithMode:AVCaptureFocusModeContinuousAutoFocus exposureMode:AVCaptureExposureModeAutoExpose atPoint:cameraPoint];
        }];
    }
    self.exposureMode = ExposureModeAutoExpose; // ExposureModeAutoExpose
}

- (void)handleInterruption:(NSNotification *)notification {
//    [self beingBackgroundUpdateTask];
    // 在这里加上你需要长久运行的代码
//    [self endBackgroundUpdateTask];
    if (@available(iOS 9.0, *)) {
        NSInteger status = [notification.userInfo[AVCaptureSessionInterruptionReasonKey] integerValue];
        if (status != AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground) {
            if (self.videoRecordWasInterruptedBlock) {
                self.videoRecordWasInterruptedBlock(status);
            }
        }
    } else {
        if (self.videoRecordWasInterruptedBlock) {
            self.videoRecordWasInterruptedBlock(0);
        }
    }
}

- (void)handleEndInterruption:(NSNotification *)notification {
    NSLog(@"被打断后回来了！！！");
}

- (void)applicationDidEnterBackgroundNotificationAction:(NSNotification *)notification {
//#pragma mark - app到后台不要停止录制
    [self stopVideoRecordWithDetailCompletion:self.videoRecordDidEnterBackgroundWasInterruptedBlock];
    NSLog(@"应用程序进入后台");
}

#pragma mark - Public
- (void)configurePreviewWithView:(UIView *)view frame:(CGRect)previewFrame {
    if (view) {
        self.previewLayer.frame = view.bounds;
        [view.layer insertSublayer:self.previewLayer atIndex:0];
    }
}
- (void)startVideoRecord {
    if (self.instrumentType != InstrumentTypeVideo) {
        NSString *errorStr = [NSString stringWithFormat:@"模式设置错误,录制失败,当前不是视频录制模式"];
        [self createErrorAndCallbackWithErrorStr:errorStr];
        return;
    }
    // 禁止自动锁屏,因为锁屏时无法保存视频到相册
    [UIApplication sharedApplication].idleTimerDisabled = YES;
    @synchronized(self) {
//        __weak typeof(self) weakSelf = self;
        dispatch_async(self.assetWriterQueue, ^{
            // 重新计算第一帧
            _previousPstTime = kCMTimeZero;
            self.videoFileURL = [self getRandomVideoStorePath];
            NSError *error;
            self.assetWriter = [[AVAssetWriter alloc] initWithURL:self.videoFileURL fileType:AVFileTypeMPEG4 error:&error]; // AVFileTypeMPEG4:MP4类型  AVFileTypeQuickTimeMovie:MOV类型
            // 使其更适合在网络上播放
            self.assetWriter.shouldOptimizeForNetworkUse = YES;
            self.recordingWillBeStarted = YES;
            self.isFinishWriting = NO;
            if (error) {
                if (self.innerMonitorErrorBlock) {
                    self.innerMonitorErrorBlock(error);
                }
            }
        });
    }
}

// 停止录制
- (void)stopVideoRecord {
    if (self.instrumentType != InstrumentTypeVideo) {
        NSString *errorStr = [NSString stringWithFormat:@"模式设置错误,停止录制失败,当前不是视频录制模式"];
        [self createErrorAndCallbackWithErrorStr:errorStr];
        return;
    }
    // 停止录制,恢复自动锁屏功能
    [UIApplication sharedApplication].idleTimerDisabled = NO;
    if (self.isFinishWriting == YES) {
        return;
    }
    dispatch_async(self.assetWriterQueue, ^{
        self.isRecording = NO;
        self.readyToRecordVideo = NO;
        self.readyToRecordAudio = NO;
        self.isFinishWriting = YES;
        [self.assetWriter finishWritingWithCompletionHandler:^{
            self.assetWriterVideoInput = nil;
            self.assetWriterAudioInput = nil;
            self.assetWriter = nil;
            self.isFinishWriting = NO;
            self.startTime = CMTimeMake(0, 0);
            self.currentRecordTime = 0;
            dispatch_async(dispatch_get_main_queue(), ^{
                AVURLAsset *asset = [AVURLAsset assetWithURL:[NSURL fileURLWithPath:self.videoFileURL.path]];
                CMTime time = [asset duration];
                int seconds = ceil(time.value/(time.timescale+0.0));
                NSInteger fileSize = [[NSFileManager defaultManager] attributesOfItemAtPath:self.videoFileURL.path error:nil].fileSize;
#ifdef DEBUG
                NSLog(@"\n视频信息 : {\n 视频时间: %@s \n 视频大小: %fMB \n}",@(seconds),fileSize/1024.0/1024.0);
#endif
                AVAssetImageGenerator *imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:asset];
                imageGenerator.appliesPreferredTrackTransform = YES;    // 截图的时候调整到正确的方向
                NSError *error = nil;
                CMTime getTime = CMTimeMake(0,(int32_t)self.configuration.videoExpectedSourceFrameRate);//缩略图创建时间 CMTime是表示电影时间信息的结构体，第一个参数表示是视频第几秒，第二个参数表示每秒帧数.(如果要获取某一秒的第几帧可以使用CMTimeMake方法)
                CMTime actucalTime; //缩略图实际生成的时间
                CGImageRef cgImage = [imageGenerator copyCGImageAtTime:getTime actualTime:&actucalTime error:&error];
                if (error) {
#ifdef DEBUG
                NSLog(@"截取视频图片失败: %@",error.localizedDescription);
#endif
                }
                CMTimeShow(actucalTime);
                UIImage *image = [UIImage imageWithCGImage:cgImage];
                CGImageRelease(cgImage);
                
                if (self.innerVideoRecordResult) {
                    self.innerVideoRecordResult(self.videoFileURL, nil);
                }else if(self.innerVideoRecordDetailResult) {
                    self.innerVideoRecordDetailResult(self.videoFileURL, nil, seconds, fileSize/1024.0/1024.0);
                }else if(self.innerVideoRecordAllDetailResult) {
                    self.innerVideoRecordAllDetailResult(self.videoFileURL, image, error, seconds, fileSize/1024.0/1024.0);
                }else {
                    if ([self.delegate respondsToSelector:@selector(didFinishVideoRecordToOutputFileAtURL:recordError:)]) {
                        [self.delegate didFinishVideoRecordToOutputFileAtURL:self.videoFileURL recordError:nil];
                    }
                }
            });
        }];
    });
}

// 停止录制视频并回调
- (void)stopVideoRecordWithCompletion:(void (^)(NSURL *, NSError *))completion {
    if (self.instrumentType != InstrumentTypeVideo) {
        NSString *errorStr = [NSString stringWithFormat:@"模式设置错误,停止录制失败,当前不是视频录制模式"];
        if (completion) {
            completion(nil,[self createErrorAndCallbackWithErrorStr:errorStr]);
        }
        return;
    }
    if (completion) {
        self.innerVideoRecordResult = completion;
    }
    [self stopVideoRecord];
}

- (void)stopVideoRecordWithDetailCompletion:(void (^)(NSURL *, NSError *, int, float))completion {
    if (self.instrumentType != InstrumentTypeVideo) {
        NSString *errorStr = [NSString stringWithFormat:@"模式设置错误,停止录制失败,当前不是视频录制模式"];
        if (completion) {
            completion(nil,[self createErrorAndCallbackWithErrorStr:errorStr],0,0);
        }
        return;
    }
    if (completion) {
        self.innerVideoRecordDetailResult = completion;
    }
    [self stopVideoRecord];
}
- (void)stopVideoRecordWithAllDetailCompletion:(void (^)(NSURL *, UIImage *, NSError *, int, float))completion {
    if (self.instrumentType != InstrumentTypeVideo) {
        NSString *errorStr = [NSString stringWithFormat:@"模式设置错误,停止录制失败,当前不是视频录制模式"];
        if (completion) {
            completion(nil,nil,[self createErrorAndCallbackWithErrorStr:errorStr],0,0);
        }
        return;
    }
    if (completion) {
        self.innerVideoRecordAllDetailResult = completion;
    }
    [self stopVideoRecord];
}

- (void)watchVideoRecordProcessWithBlock:(void (^)(CGFloat))processBlock {
    self.innerVideoRecordProcess = processBlock;
}

- (void)takePicture {
    [self takePictureWithCompletion:nil];
}

- (void)takePictureWithCompletion:(void (^)(UIImage *, NSString *, NSError *))completion {
    if (self.instrumentType != InstrumentTypePhoto) {
        NSString *errorStr = [NSString stringWithFormat:@"模式设置错误,拍照失败,当前不是拍照模式"];
        if (completion) {
            completion(nil, nil, [self createErrorAndCallbackWithErrorStr:errorStr]);
        }else {
            if (self.delegate && [self.delegate respondsToSelector:@selector(didFinishPhotoPickupToImage:photoFilePath:photographError:)]) {
                [self.delegate didFinishPhotoPickupToImage:nil photoFilePath:nil photographError:[self createErrorAndCallbackWithErrorStr:errorStr]];
            }
        }
        return;
    }
    
    AVCaptureConnection *connection = [self.imageDataOutput connectionWithMediaType:AVMediaTypeVideo];
    if (!connection) {
#ifdef DEBUG
        NSLog(@"拍照失败: connection == nil!!!");
#endif
        NSString *errorStr = [NSString stringWithFormat:@"会话创建错误,拍照失败,会话为空connection == nil"];
        if (completion) {
            completion(nil,nil,[self createErrorAndCallbackWithErrorStr:errorStr]);
        }else {
            if (self.delegate && [self.delegate respondsToSelector:@selector(didFinishPhotoPickupToImage:photoFilePath:photographError:)]) {
                [self.delegate didFinishPhotoPickupToImage:nil photoFilePath:nil photographError:[self createErrorAndCallbackWithErrorStr:errorStr]];
            }
        }
        return;
    }
    //    NSDictionary *outputSettings = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], (id)kCVPixelBufferPixelFormatTypeKey, nil];

    // 方式1设置缩放倍数
  //  [self.imageConnection setVideoScaleAndCropFactor:self.photoVideoScaleAndCropFactor <= 1.0 ? 1.0 : (self.photoVideoScaleAndCropFactor > self.imageConnection.videoMaxScaleAndCropFactor ? self.imageConnection.videoMaxScaleAndCropFactor : self.photoVideoScaleAndCropFactor)];
    
    __weak typeof(self) weakSelf = self;
    //    __strong typeof(weakSelf) strongSelf = weakSelf
    [self.imageDataOutput setOutputSettings:@{AVVideoCodecKey:AVVideoCodecJPEG}];
    [self.imageDataOutput captureStillImageAsynchronouslyFromConnection:connection completionHandler:^(CMSampleBufferRef  _Nullable imageDataSampleBuffer, NSError * _Nullable error) {
        if (!imageDataSampleBuffer) {
            if (completion) {
                completion(nil,nil,error);
            }else {
                if (self.delegate && [self.delegate respondsToSelector:@selector(didFinishPhotoPickupToImage:photoFilePath:photographError:)]) {
                    [self.delegate didFinishPhotoPickupToImage:nil photoFilePath:nil photographError:error];
                }
            }
            return;
        }
        NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
        UIImage *image = [UIImage imageWithData:imageData];
        NSString *filePath = [self getRandomPhotoStorePath];
        BOOL result = NO;
        if (self.saveToSandbox) {
            result = [imageData writeToFile:filePath atomically:YES];
        }else {
            if (image != nil) {
                result = YES;
            }
        }
        
        if (completion) {
            completion(image,result ? filePath : nil,error);
        }else {
            if (self.delegate && [self.delegate respondsToSelector:@selector(didFinishPhotoPickupToImage:photoFilePath:photographError:)]) {
                [self.delegate didFinishPhotoPickupToImage:image photoFilePath:(result ? filePath : nil) photographError:error];
            }
        }
        if (error) {
            if (weakSelf.innerMonitorErrorBlock) {
                weakSelf.innerMonitorErrorBlock(error);
            }
        }
    }];
    
}
/// 异步监听错误信息
- (void)asyncMonitorError:(void (^)(NSError *))errorBlock {
    if (errorBlock) {
        self.innerMonitorErrorBlock = errorBlock;
    }
}
/// 开始采集
- (void)beginCapture {
    if (!self.session.isRunning) {
        [self.session startRunning];
    }
}
/// 停止采集
- (void)endCapture {
    if (self.session.isRunning) {
        [self.session stopRunning];
    }
}
/// 切换摄像头
- (void)toggleCamera {
    // 改变会话的配置前一定要先开启配置，配置完成后提交配置改变
    [self.session beginConfiguration];
    if (_curIsBackCamera) {
        [self.session removeInput:self.backVideoInput];
        if ([self.session canAddInput:self.frontVideoInput]) {
            [self.session addInput:self.frontVideoInput];
        }
    }else {
        [self.session removeInput:self.frontVideoInput];
        if ([self.session canAddInput:self.backVideoInput]) {
            [self.session addInput:self.backVideoInput];
        }
    }
    [self.session removeOutput:self.videoDataOutput];
    [self.session removeOutput:self.audioDataOutput];
    [self configVideoOutput];
    
    _curIsBackCamera = !_curIsBackCamera;
    [self.session commitConfiguration];
    
    // 需要重新配置一些采集参数
}

/// 聚焦
- (void)focusAtPoint:(CGPoint)point {
    CGPoint cameraPoint = [self.previewLayer captureDevicePointOfInterestForPoint:point];
    [self focusWithMode:AVCaptureFocusModeAutoFocus exposureMode:AVCaptureExposureModeAutoExpose atPoint:cameraPoint];
}

/// 镜头设置焦距
- (void)setVideoZoomFactorWithPercent:(CGFloat)percent {
    [self changeCurrentCameraProperty:^(AVCaptureDevice *currentCamera) {
//        CGFloat current = currentCamera.activeFormat.videoMaxZoomFactor * percent;
        CGFloat current = (self.photoMaxScaleAndCropFactor - self.photoMinScaleAndCropFactor) * percent + self.photoMinScaleAndCropFactor;
        if (current < 1.0) {
            current = 1.0;
        }
//        if (current > currentCamera.activeFormat.videoMaxZoomFactor) {
//            current = currentCamera.activeFormat.videoMaxZoomFactor;
//        }
        if (current > self.photoMaxScaleAndCropFactor) {
            current = self.photoMaxScaleAndCropFactor;
        }
        //        [captureDevice rampToVideoZoomFactor:current withRate:10]; // 1.0 ~ 16.0 : 1.0 表示缩放回到原来的大小(复原)
        currentCamera.videoZoomFactor = current;
    }];
}

/// 保存照片到相册
- (void)savePhoto:(UIImage *)photo toPhotosAlbumWithCompletion:(void (^)(BOOL , NSError *))completion {
    self.innerPhotoSaveToPhotosAlbumResult = completion;
    //保存照片到相册
    UIImageWriteToSavedPhotosAlbum(photo, self, @selector(image:didFinishSavingWithError:contextInfo:), nil);
}

/// 保存视频到相册
- (void)saveVideo:(NSURL *)videoURL toPhotosAlbumWithCompletion:(void (^)(BOOL, NSError *))completion {
    self.innerVideoSaveToPhotosAlbumResult = completion;
    if (UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(videoURL.path)) {
        //保存视频到相册
        UISaveVideoAtPathToSavedPhotosAlbum(videoURL.path, self, @selector(video:didFinishSavingWithError:contextInfo:), nil);
    }
}

/// 开始录音
- (void)startAudioRecord {
    if (![self.audioRecorder isRecording]) {
        [self.audioRecorder prepareToRecord];
        [self.audioRecorder record];
    }
}

/// 停止录音
- (void)stopAudioRecord {
    [self stopAudioRecordWithCompletion:nil];
}
/// 停止录音并回调
- (void)stopAudioRecordWithCompletion:(void (^)(NSString *, NSError *))completion {
    [self.audioRecorder stop];
    self.audioRecorder = nil;
    if (completion) {
        self.innerAudioRecordResult = completion;
    }
}
/// 获取视频信息: 视频截图，视频时长，视频大小
- (void)videoInformationGeneratorWithVideoURL:(NSURL *)videoURL complete:(void (^)(UIImage *videoImage, NSError *generatorError,int videoDuration,NSInteger videoFileSize))completeBlock {
    AVURLAsset *urlSet = [AVURLAsset assetWithURL:videoURL];
    AVAssetImageGenerator *imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:urlSet];
    imageGenerator.appliesPreferredTrackTransform = YES;    // 截图的时候调整到正确的方向
    NSError *error = nil;
    CMTime time = CMTimeMake(0,(int32_t)self.configuration.videoExpectedSourceFrameRate); // 缩略图创建时间 CMTime是表示电影时间信息的结构体，第一个参数表示是视频第几秒，第二个参数表示每秒帧数.(如果要获取某一秒的第几帧可以使用CMTimeMake方法)
    CMTime actucalTime; // 缩略图实际生成的时间
    CGImageRef cgImage = [imageGenerator copyCGImageAtTime:time actualTime:&actucalTime error:&error];
    
    CMTime totalTime = [urlSet duration];
    int seconds = ceil((totalTime.value+0.0)/(totalTime.timescale+0.0));
    NSInteger fileSize = [[NSFileManager defaultManager] attributesOfItemAtPath:videoURL.path error:nil].fileSize;
    if (error) {
#ifdef DEBUG
        NSLog(@"截取视频图片失败:%@",error.localizedDescription);
#endif
    }
    CMTimeShow(actucalTime);
    UIImage *image = [UIImage imageWithCGImage:cgImage];
    CGImageRelease(cgImage);
    if (completeBlock) {
        completeBlock(image,error,seconds,fileSize);
    }
}
/// 获取视频信息: 视频截图，视频时长，视频大小
+ (void)videoInformationGeneratorWithVideoURL:(NSURL *)videoURL complete:(void (^)(UIImage *, NSError *, int, NSInteger))completeBlock {
    AVURLAsset *urlSet = [AVURLAsset assetWithURL:videoURL];
    AVAssetImageGenerator *imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:urlSet];
    imageGenerator.appliesPreferredTrackTransform = YES;    // 截图的时候调整到正确的方向
    NSError *error = nil;
    CMTime time = CMTimeMake(0,30); // 缩略图创建时间 CMTime是表示电影时间信息的结构体，第一个参数表示是视频第几秒，第二个参数表示每秒帧数.(如果要获取某一秒的第几帧可以使用CMTimeMake方法)
    CMTime actucalTime; // 缩略图实际生成的时间
    CGImageRef cgImage = [imageGenerator copyCGImageAtTime:time actualTime:&actucalTime error:&error];
    
    CMTime totalTime = [urlSet duration];
    int seconds = ceil((totalTime.value+0.0)/(totalTime.timescale+0.0));
    NSInteger fileSize = [[NSFileManager defaultManager] attributesOfItemAtPath:videoURL.path error:nil].fileSize;
    if (error) {
#ifdef DEBUG
        NSLog(@"截取视频图片失败:%@",error.localizedDescription);
#endif
    }
    CMTimeShow(actucalTime);
    UIImage *image = [UIImage imageWithCGImage:cgImage];
    CGImageRelease(cgImage);
    if (completeBlock) {
        completeBlock(image,error,seconds,fileSize);
    }
}

#pragma mark - Private
/// 初始化属性
- (void)initPhotoConfiguration {
    _preset = AVCaptureSessionPresetPhoto;
    self.photoVideoScaleAndCropFactor = 1.0;
    self.instrumentType = InstrumentTypePhoto;
    self.flashMode = FlashModeOff;
    self.torchMode = TorchModeOff;
    self.exposureMode = ExposureModeContinuousAutoExposure;
    self.coverStorePhoto = NO;
    self.saveToSandbox = YES;
    _curIsBackCamera = YES;
}
/// 初始化属性
- (void)initVideoConfiguration {
    self.preset = AVCaptureSessionPresetHigh;
    self.instrumentType = InstrumentTypeVideo;
    self.flashMode = FlashModeOff;
    self.torchMode = TorchModeOff;
    self.coverStoreVideo = NO;
    self.saveToSandbox = YES;
    _curIsBackCamera = YES;
    _previousPstTime = kCMTimeZero;
    _isFirstFrame = YES;
    self.configuration = [StudioConfiguration defaultConfiguration];
    self.startTime = CMTimeMake(0, 0);
    self.currentRecordTime = 0;
}
// 改变后置摄像头属性
- (void)changeVideoInputDeviceProperty:(void(^)(AVCaptureDevice *captureDevice))propertyChange {
    AVCaptureDevice *backVideoDevice = [self.backVideoInput device];
    [self changeAVCaptureDevicePropertyWithDevice:backVideoDevice propertyChange:^(AVCaptureDevice *curCaptureDevice) {
        if (propertyChange) {
            propertyChange(curCaptureDevice);
        }
    }];
}
/// 改变当前摄像头的属性
- (void)changeCurrentCameraProperty:(void(^)(AVCaptureDevice *currentCamera))propertyChangeBlock {
    AVCaptureDevice *device = nil;
    if (_curIsBackCamera) {
        device = self.backVideoInput.device;
    }else {
        device = self.frontVideoInput.device;
    }
    [self changeAVCaptureDevicePropertyWithDevice:device propertyChange:propertyChangeBlock];
}
/// 改变设备属性公共方法
- (void)changeAVCaptureDevicePropertyWithDevice:(AVCaptureDevice *)device propertyChange:(void(^)(AVCaptureDevice *curCaptureDevice))propertyChange {
    NSError *error;
    // 注意改变设备属性前一定要首先调用lockForConfiguration:调用完之后使用unlockForConfiguration方法解锁
    if ([device lockForConfiguration:&error]) {
        if (propertyChange) {
           propertyChange(device);
        }
        
//#warning mark - 帧测试
//        device.activeVideoMinFrameDuration = CMTimeMake(1, 20);
//        device.activeVideoMaxFrameDuration = CMTimeMake(1, 20);
        
        [device unlockForConfiguration];
    }else {
        NSString *errorStr = [NSString stringWithFormat:@"设置设备属性过程发生错误,错误信息:%@",error.localizedDescription];
        [self createErrorAndCallbackWithErrorStr:errorStr];
    }
}

/**
 创建错误回调
 
 @param errorStr 错误信息
 */
- (NSError *)createErrorAndCallbackWithErrorStr:(NSString *)errorStr {
    NSDictionary *userInfo = @{NSLocalizedFailureReasonErrorKey : [[NSBundle mainBundle] localizedStringForKey:@"errorStr" value:@"" table:@"StudioManagerTool"]};
    NSError *error = [[NSError alloc] initWithDomain:StudioRecordToolErrorDomain code:NSFeatureUnsupportedError userInfo:userInfo];
    if (self.innerMonitorErrorBlock) {
        self.innerMonitorErrorBlock(error);
    }
    return error;
}

/**
 获取输入源
 @param captureDevice 输入源相关的设备
 */
- (AVCaptureDeviceInput *)createInputWithAVCaptureDevice:(AVCaptureDevice *)captureDevice {
    // 初始化输入设备
    NSError *error = nil;
    AVCaptureDeviceInput *captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
    if (error) {
#ifdef DEBUG
        NSLog(@"取得设备输入对象时出错，错误原因：%@",error.localizedDescription);
#endif
        NSString *errorStr = [NSString stringWithFormat:@"取得设备输入对象时出错，错误原因：%@",error.localizedDescription];
        [self createErrorAndCallbackWithErrorStr:errorStr];
        return nil;
    }

    return captureDeviceInput;
}
/**
 获取后置摄像头
 
 @return 后置摄像头设备
 */
- (AVCaptureDevice *)obtainRearCamera {
    AVCaptureDevice *backCameraDevice = [self obtainDeviceWithPosition:AVCaptureDevicePositionBack mediaType:AVMediaTypeVideo];
    return backCameraDevice;
}
/**
 获取前置摄像头
 
 @return 前置摄像头设备
 */
- (AVCaptureDevice *)obtainFrontCamera {
    AVCaptureDevice *frontCameraDevice = [self obtainDeviceWithPosition:AVCaptureDevicePositionFront mediaType:AVMediaTypeVideo];
    return frontCameraDevice;
}
/**
 获取麦克风
 
 @return 麦克风设备
 */
- (AVCaptureDevice *)obtainMicrophone {
//    AVCaptureDevice *microphoneDevice = [self obtainDeviceWithPosition:AVCaptureDevicePositionUnspecified mediaType:AVMediaTypeAudio];
//    return microphoneDevice;
     AVCaptureDevice *microphoneDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
     return microphoneDevice;
}
/**
 取得指定位置的设备
 
 @param position 设备位置
 @param mediaType 设备媒体类型
 @return 设备
 */
- (AVCaptureDevice *)obtainDeviceWithPosition:(AVCaptureDevicePosition)position mediaType:(AVMediaType)mediaType {
    
    if (@available(iOS 10.0, *)) {
        AVCaptureDeviceDiscoverySession *captureDeviceSession = nil;
        if (mediaType == AVMediaTypeAudio) {
            position = AVCaptureDevicePositionUnspecified;
        }
        if (@available(iOS 11.1, *)) {
            captureDeviceSession = [AVCaptureDeviceDiscoverySession  discoverySessionWithDeviceTypes:@[AVCaptureDeviceTypeBuiltInMicrophone,AVCaptureDeviceTypeBuiltInWideAngleCamera,AVCaptureDeviceTypeBuiltInTelephotoCamera,AVCaptureDeviceTypeBuiltInDualCamera,AVCaptureDeviceTypeBuiltInTrueDepthCamera] mediaType:mediaType position:position];
            
        }else {
            // Fallback on earlier versions
            if (@available(iOS 10.2, *)) {
                captureDeviceSession = [AVCaptureDeviceDiscoverySession  discoverySessionWithDeviceTypes:@[AVCaptureDeviceTypeBuiltInMicrophone,AVCaptureDeviceTypeBuiltInWideAngleCamera,AVCaptureDeviceTypeBuiltInTelephotoCamera,AVCaptureDeviceTypeBuiltInDualCamera] mediaType:mediaType position:position];
                
            } else {
                // Fallback on earlier versions
                captureDeviceSession = [AVCaptureDeviceDiscoverySession  discoverySessionWithDeviceTypes:@[AVCaptureDeviceTypeBuiltInMicrophone,AVCaptureDeviceTypeBuiltInWideAngleCamera,AVCaptureDeviceTypeBuiltInTelephotoCamera] mediaType:mediaType position:position];
            }
        }
        for (AVCaptureDevice *device in captureDeviceSession.devices) {
            if ([device position] == position) {
                return device;
            }
        }
    }else {
        // Fallback on earlier versions
        NSArray *devicesArr = [AVCaptureDevice devicesWithMediaType:mediaType];
        if (mediaType == AVMediaTypeAudio) {
            return devicesArr.firstObject;
        }
        for (AVCaptureDevice *device in devicesArr) {
            if ([device position] == position) {
                return device;
            }
        }
    }

    NSString *errorStr = [NSString stringWithFormat:@"硬件设备获取错误"];
    [self createErrorAndCallbackWithErrorStr:errorStr];
    return nil;
}


// 获取视频文件名随机名字
- (NSURL *)getRandomVideoStorePath {
    
    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
    NSString *documentsDirectory = [paths objectAtIndex:0];
    if (self.videoSaveDirectory) {
        documentsDirectory = self.videoSaveDirectory;
    }
    
    if (self.coverStoreVideo) {
        NSString *filePath = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"uniqueness.mp4"]]; // .MOV
        if (([[NSFileManager defaultManager] fileExistsAtPath:filePath])) {
            NSError *error = nil;
            BOOL result = [[NSFileManager defaultManager] removeItemAtPath:filePath error:&error];
            if (error) {
                if (self.innerMonitorErrorBlock) {
                    self.innerMonitorErrorBlock(error);
                }
            }
            if (result) {
                return [NSURL fileURLWithPath:filePath];
            }
        }else {
             return [NSURL fileURLWithPath:filePath];
        }
    }
    
    NSDateFormatter *formatter = [[NSDateFormatter alloc] init];
    // 设置时间格式
    formatter.dateFormat = @"yyyy-MM-dd-HH-mm-ss";
    NSString *str = [formatter stringFromDate:[NSDate date]];
    NSString *fileName = [NSString stringWithFormat:@"%@",str];
    
    int fileNamePostfix = 0;
    NSString *filePath = nil;
    do {
        filePath = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"%@-%i.mp4",fileName,fileNamePostfix++]]; // .MOV
    } while ([[NSFileManager defaultManager] fileExistsAtPath:filePath]);
    //转为视频保存的url
    NSURL* randomUrl = [NSURL fileURLWithPath:filePath]; // 另一种文件命名路径:defaultPath
    return randomUrl;
}

// 获取照片文件名随机名字
- (NSString *)getRandomPhotoStorePath {
   
    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
    NSString *storeDirectory = [paths objectAtIndex:0];
    if (self.photoSaveDirectory) {
        storeDirectory = self.photoSaveDirectory;
    }
    
    if (self.coverStorePhoto) {
        NSString *filePath = [storeDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"uniqueness.JPG"]];
        if (([[NSFileManager defaultManager] fileExistsAtPath:filePath])) {
            NSError *error = nil;
            BOOL result = [[NSFileManager defaultManager] removeItemAtPath:filePath error:&error];
            if (error) {
                if (self.innerMonitorErrorBlock) {
                    self.innerMonitorErrorBlock(error);
                }
            }
            if (result) {
                return filePath;
            }
        }else {
            return filePath;
        }
    }
    
    NSDateFormatter *formatter = [[NSDateFormatter alloc] init];
    formatter.dateFormat = @"yyyy-MM-dd-HH-mm-ss";
    NSString *dateStrName = [formatter stringFromDate:[NSDate date]];

    int fileNamePostfix = 0;
    NSString *filePath = nil;
    do {
        filePath = [storeDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"%@-%i.JPG",dateStrName,fileNamePostfix++]];
    } while ([[NSFileManager defaultManager] fileExistsAtPath:filePath]);
    
    return filePath;
}

// 获取录音文件名随机名字
- (NSString *)getRandomAudioStorePath {
    NSDateFormatter *formatter = [[NSDateFormatter alloc] init];
    formatter.dateFormat = @"yyyy-MM-dd-HH-mm-ss";
    NSString *dateStrName = [formatter stringFromDate:[NSDate date]];
    
    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
    NSString *storeDirectory = [paths objectAtIndex:0];
    if (self.audioSaveDirectory) {
        storeDirectory = self.audioSaveDirectory;
    }
    
    int fileNamePostfix = 0;
    NSString *filePath = nil;
    do {
        filePath = [storeDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"%@-%i.caf",dateStrName,fileNamePostfix++]];
    } while ([[NSFileManager defaultManager] fileExistsAtPath:filePath]);
    
    return filePath;
}

// 往文件写入采集的数据
- (void)writeSampleBuffer:(CMSampleBufferRef)sampleBuffer
                   ofType:(NSString *)mediaType {
    
    if (self.assetWriter.status == AVAssetWriterStatusUnknown) {
        if ([self.assetWriter startWriting]) {
            CMTime timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
            [self.assetWriter startSessionAtSourceTime:timestamp];
        }else {
#ifdef DEBUG
            NSLog(@"AVAssetWriter startWriting error:%@", self.assetWriter.error);
#endif
            NSString *errorStr = [NSString stringWithFormat:@"AVAssetWriter startWriting error:%@", self.assetWriter.error];
            [self createErrorAndCallbackWithErrorStr:errorStr];
        }
    }
    if (self.assetWriter.status == AVAssetWriterStatusWriting) {
        if (mediaType == AVMediaTypeVideo) {
            // 丢帧自行处理
            CMSampleBufferRef ref = [self dropFrame:sampleBuffer];
            if (ref) {
                if (self.assetWriterVideoInput.readyForMoreMediaData) {
                    if (![self.assetWriterVideoInput appendSampleBuffer:ref]) {
#ifdef DEBUG
                        NSLog(@"isRecording:%d, willBeStarted:%d", self.isRecording, self.recordingWillBeStarted);
                        NSLog(@"AVAssetWriterInput video appendSapleBuffer error:%@", self.assetWriter.error);
#endif
                        NSString *errorStr = [NSString stringWithFormat:@"AVAssetWriterInput video appendSapleBuffer error:%@", self.assetWriter.error];
                        [self createErrorAndCallbackWithErrorStr:errorStr];
                    }
                }
            }

        }else if (mediaType == AVMediaTypeAudio) {
            if (self.assetWriterAudioInput.readyForMoreMediaData) {
                if (![self.assetWriterAudioInput appendSampleBuffer:sampleBuffer]) {
#ifdef DEBUG
                    NSLog(@"AVAssetWriterInput audio appendSapleBuffer error:%@", self.assetWriter.error);
#endif
                    NSString *errorStr = [NSString stringWithFormat:@"AVAssetWriterInput audio appendSapleBuffer error:%@", self.assetWriter.error];
                    [self createErrorAndCallbackWithErrorStr:errorStr];
                }
            }
        }
    }
    if (self.assetWriter.status == AVAssetWriterStatusCompleted) {
#ifdef DEBUG
        NSLog(@"AVAssetWriterStatusCompleted %@",self.assetWriter.error);
#endif
    }
    if (self.assetWriter.status == AVAssetWriterStatusFailed) {
#ifdef DEBUG
        NSLog(@"AVAssetWriterStatusFailed %@",self.assetWriter.error);
#endif
    }
    if (self.assetWriter.status == AVAssetWriterStatusCancelled) {
#ifdef DEBUG
        NSLog(@"AVAssetWriterStatusCancelled %@", self.assetWriter.error);
#endif
    }
}

// 配置图像数据源
- (BOOL)setupAssetWriterVideoInput:(CMFormatDescriptionRef)currentFormatDescription {
    
    float bitsPerPixel;
    CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(currentFormatDescription);
    int numPixels = dimensions.width * dimensions.height;
    int bitsPerSecond;
    
    // Assume that lower-than-SD resolutions are intended for streaming, and use a lower bitrate
    if ( numPixels < (640 * 480) ) {
        bitsPerPixel = 4.05; // This bitrate matches the quality produced by AVCaptureSessionPresetMedium or Low.
    }else {
        bitsPerPixel = 11.4; // This bitrate matches the quality produced by AVCaptureSessionPresetHigh.
    }
    bitsPerSecond = numPixels * bitsPerPixel;
    
//    NSDictionary *compressionProperties = @{AVVideoAverageBitRateKey : @(bitsPerSecond),
//                                            //AVVideoExpectedSourceFrameRateKey : @(30),
//                                            AVVideoMaxKeyFrameIntervalKey : @(30),
//                                            AVVideoProfileLevelKey : AVVideoProfileLevelH264BaselineAutoLevel};
    
    NSDictionary *videoCompressionSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                              AVVideoCodecH264, AVVideoCodecKey,
                                              [NSNumber numberWithInteger:dimensions.width], AVVideoWidthKey,
                                              [NSNumber numberWithInteger:dimensions.height], AVVideoHeightKey,
                                              [NSDictionary dictionaryWithObjectsAndKeys:
                                               [NSNumber numberWithInteger:bitsPerSecond], AVVideoAverageBitRateKey,
                                               [NSNumber numberWithInteger:30], AVVideoMaxKeyFrameIntervalKey,
                                               nil], AVVideoCompressionPropertiesKey,
                                               nil];
    
    //    videoCompressionSettings = [NSDictionary dictionaryWithObjectsAndKeys:
    //                                AVVideoCodecH264, AVVideoCodecKey,
    //                                [NSNumber numberWithInteger: 720], AVVideoWidthKey,
    //                                [NSNumber numberWithInteger: 1280], AVVideoHeightKey,
    //                                nil];
    
    /*
     // 码率和帧率设置
     NSDictionary *compressionProperties = @{AVVideoAverageBitRateKey : @(bitsPerSecond),
     //AVVideoExpectedSourceFrameRateKey : @(30),
     AVVideoMaxKeyFrameIntervalKey : @(30),
     AVVideoProfileLevelKey : AVVideoProfileLevelH264BaselineAutoLevel};
     //这是系统推荐的使用的视频参数，但是我们不适用，因为不同的屏幕拍摄的大小肯定不一样
     // self.videoCompressionSettings = [self.videoDataOutput recommendedVideoSettingsForAssetWriterWithOutputFileType:AVFileTypeMPEG4];
     
     self.videoCompressionSettings = @{ AVVideoCodecKey :    AVVideoCodecH264,
     AVVideoScalingModeKey : AVVideoScalingModeResizeAspectFill,
     AVVideoWidthKey : @(960),
     AVVideoHeightKey : @(540),
     AVVideoCompressionPropertiesKey : compressionProperties};
     */
    
#ifdef DEBUG
//    NSLog(@"视频输出设置: %@", videoCompressionSettings);
     NSLog(@"视频输出设置: %@", self.configuration.videoOutputSettings);
    /*
    NSLog(@"简便的默认的视频音频输出压缩参数设置start:");
    {
        AVOutputSettingsAssistant *heiheihei = [AVOutputSettingsAssistant outputSettingsAssistantWithPreset:AVOutputSettingsPreset640x480];
        NSLog(@"AVOutputSettingsPreset640x480 : %@",heiheihei.videoSettings);
    }
    {
        AVOutputSettingsAssistant *heiheihei = [AVOutputSettingsAssistant outputSettingsAssistantWithPreset:AVOutputSettingsPreset960x540];
        NSLog(@"AVOutputSettingsPreset960x540 : %@",heiheihei.videoSettings);
    }
    {
        AVOutputSettingsAssistant *heiheihei = [AVOutputSettingsAssistant outputSettingsAssistantWithPreset:AVOutputSettingsPreset1280x720];
        NSLog(@"AVOutputSettingsPreset1280x720 : %@",heiheihei.videoSettings);
    }
    {
        AVOutputSettingsAssistant *heiheihei = [AVOutputSettingsAssistant outputSettingsAssistantWithPreset:AVOutputSettingsPreset1920x1080];
        NSLog(@"AVOutputSettingsPreset1920x1080 : %@",heiheihei.videoSettings);
    }
    {
        AVOutputSettingsAssistant *heiheihei = [AVOutputSettingsAssistant outputSettingsAssistantWithPreset:AVOutputSettingsPreset3840x2160];
        NSLog(@"AVOutputSettingsPreset3840x2160 : %@",heiheihei.videoSettings);
        NSLog(@"audioSettings : %@",heiheihei.audioSettings);
        NSLog(@"availableOutputSettingsPresets : %@",[AVOutputSettingsAssistant availableOutputSettingsPresets]);
        
        heiheihei = [AVOutputSettingsAssistant outputSettingsAssistantWithPreset:AVOutputSettingsPreset3840x2160];
        heiheihei.sourceVideoAverageFrameDuration = CMTimeMake(1, 10);
        NSLog(@"修改 : %@",heiheihei.videoSettings);
    }
    
    NSLog(@"简便的默认的视频音频输出压缩参数设置end");
//*/
#endif
    // videoCompressionSettings
    if ([self.assetWriter canApplyOutputSettings:self.configuration.videoOutputSettings forMediaType:AVMediaTypeVideo]) {
        self.assetWriterVideoInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:self.configuration.videoOutputSettings];
        self.assetWriterVideoInput.expectsMediaDataInRealTime = YES;
        if ([self.assetWriter canAddInput:self.assetWriterVideoInput]) {
            [self.assetWriter addInput:self.assetWriterVideoInput];
        }else {
#ifdef DEBUG
            NSLog(@"Couldn't add asset writer video input.");
#endif
            NSString *errorStr = [NSString stringWithFormat:@"Couldn't add asset writer video input."];
            [self createErrorAndCallbackWithErrorStr:errorStr];
            return NO;
        }
    }else {
#ifdef DEBUG
        NSLog(@"Couldn't apply video output settings.");
#endif
        NSString *errorStr = [NSString stringWithFormat:@"Couldn't apply video output settings."];
        [self createErrorAndCallbackWithErrorStr:errorStr];
        return NO;
    }
    return YES;
}
// 配置声音数据源
- (BOOL)setupAssetWriterAudioInput:(CMFormatDescriptionRef)currentFormatDescription {
    
    const AudioStreamBasicDescription *currentASBD = CMAudioFormatDescriptionGetStreamBasicDescription(currentFormatDescription);
    size_t aclSize = 0;
    const AudioChannelLayout *currentChannelLayout = CMAudioFormatDescriptionGetChannelLayout(currentFormatDescription, &aclSize);
    NSData *currentChannelLayoutData = nil;
    
    // AVChannelLayoutKey must be specified, but if we don't know any better give an empty data and let AVAssetWriter decide.
    if (currentChannelLayout && aclSize > 0 ) {
        currentChannelLayoutData = [NSData dataWithBytes:currentChannelLayout length:aclSize];
    }else {
        currentChannelLayoutData = [NSData data];
    }
    NSDictionary *audioCompressionSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                              [NSNumber numberWithInteger:kAudioFormatMPEG4AAC], AVFormatIDKey,
                                              [NSNumber numberWithFloat:currentASBD->mSampleRate], AVSampleRateKey,
                                              [NSNumber numberWithInt:64000], AVEncoderBitRatePerChannelKey,
                                              [NSNumber numberWithInteger:currentASBD->mChannelsPerFrame], AVNumberOfChannelsKey,
                                              currentChannelLayoutData, AVChannelLayoutKey,
                                              nil];
  //  NSLog(@"音频输出设置: %@",audioCompressionSettings);
    NSLog(@"音频输出设置： %@",self.configuration.audioOutputSettings);
    
    //    audioCompressionSettings
    if ([self.assetWriter canApplyOutputSettings:self.configuration.audioOutputSettings forMediaType:AVMediaTypeAudio]) {
        self.assetWriterAudioInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeAudio outputSettings:self.configuration.audioOutputSettings];
        self.assetWriterAudioInput.expectsMediaDataInRealTime = YES;
        if ([self.assetWriter canAddInput:self.assetWriterAudioInput]) {
            [self.assetWriter addInput:self.assetWriterAudioInput];
        }else {
            NSString *errorStr = [NSString stringWithFormat:@"Couldn't add asset writer audio input."];
            [self createErrorAndCallbackWithErrorStr:errorStr];
            return NO;
        }
    }else {
        NSString *errorStr = [NSString stringWithFormat:@"Couldn't apply audio output settings."];
        [self createErrorAndCallbackWithErrorStr:errorStr];
        return NO;
    }
    return YES;
}

/**
 配置照片输出
 */
- (void)configPhotoOutput {
    if ([self.session canAddOutput:self.imageDataOutput]) {
        [self.session addOutput:self.imageDataOutput];
    }
}

/**
 配置视频录制输出
 */
- (void)configVideoOutput {
    // 以下属性因为是懒加载所以重新设置之前得置空
    
    self.videoDataOutput = nil;
    // 添加视频输出
    if ([self.session canAddOutput:self.videoDataOutput]) {
        [self.session addOutput:self.videoDataOutput];
    }
    self.audioDataOutput = nil;
    // 添加音频输出
    if ([self.session canAddOutput:self.audioDataOutput]) {
        [self.session addOutput:self.audioDataOutput];
    }
    self.videoConnection = nil;
    //设置视频录制的方向
    self.videoConnection.videoOrientation = AVCaptureVideoOrientationPortrait;
    self.audioConnection = nil;
    // 缓冲队列:BufferQueue
    OSStatus err = CMBufferQueueCreate(kCFAllocatorDefault, 1, CMBufferQueueGetCallbacksForUnsortedSampleBuffers(), &_previewBufferQueue);
    if (!self.videoConnection||!self.audioConnection) {
#ifdef DEBUG
        NSLog(@"CMBufferQueueCreate error: %d", err);
#endif
    }
}

/// 内部设置聚焦
- (void)focusWithMode:(AVCaptureFocusMode)focusMode exposureMode:(AVCaptureExposureMode)exposureMode atPoint:(CGPoint)point {
    AVCaptureDevice *device = nil;
    if (_curIsBackCamera) {
        device = self.backVideoInput.device;
    }else {
        device = self.frontVideoInput.device;
    }
    [self changeAVCaptureDevicePropertyWithDevice:device propertyChange:^(AVCaptureDevice *curCaptureDevice) {
        if ([curCaptureDevice isFocusPointOfInterestSupported]) {
            [curCaptureDevice setFocusPointOfInterest:point];
        }
        if ([curCaptureDevice isExposurePointOfInterestSupported]) {
            [curCaptureDevice setExposurePointOfInterest:point];
        }
        if ([curCaptureDevice isExposureModeSupported:exposureMode]) {
            [curCaptureDevice setExposureMode:exposureMode];
        }
        if ([curCaptureDevice isFocusModeSupported:focusMode]) {
            [curCaptureDevice setFocusMode:focusMode];
        }
    }];
}

- (void)beingBackgroundUpdateTask {
    self.backgroundUpdateTask = [[UIApplication sharedApplication] beginBackgroundTaskWithExpirationHandler:^{
        [self endBackgroundUpdateTask];
    }];
}

- (void)endBackgroundUpdateTask {
    [[UIApplication sharedApplication] endBackgroundTask: self.backgroundUpdateTask];
    self.backgroundUpdateTask = UIBackgroundTaskInvalid;
}

/// 保存照片完成之后的回调
- (void)image:(UIImage *)image didFinishSavingWithError:(NSError *)error contextInfo:(void *)contextInfo {
#ifdef DEBUG
    NSLog(@"保存照片完成之后的线程%@",[NSThread currentThread]);
    if (!error) {
        NSLog(@"照片保存到相册成功");
    }
    NSLog(@"image = %@, error = %@, contextInfo = %@", image, error, contextInfo);
#endif
    if (error) {
        PHAuthorizationStatus authStatus = [PHPhotoLibrary authorizationStatus];
        if (authStatus == PHAuthorizationStatusRestricted|| authStatus == PHAuthorizationStatusDenied) {
            if (self.innerPhotoSaveToPhotosAlbumResult) {
                self.innerPhotoSaveToPhotosAlbumResult(NO,[self createErrorAndCallbackWithErrorStr:@"保存照片到相册失败，请检查相册的访问权限"]);
            }
        }else {
            if (self.innerPhotoSaveToPhotosAlbumResult) {
                self.innerPhotoSaveToPhotosAlbumResult(NO,error);
            }
        }
    }
}
/// 保存视频完成之后的回调
- (void)video:(NSString *)videoPath didFinishSavingWithError:(NSError *)error contextInfo:(void *)contextInfo {
#ifdef DEBUG
    NSLog(@"保存视频完成之后的线程%@",[NSThread currentThread]);
    if (!error) {
        NSLog(@"视频保存到相册成功");
    }
    NSLog(@"videoPath = %@, error = %@-%@, contextInfo = %@", videoPath, error, error.localizedDescription, contextInfo);
#endif
    if (error) {
        PHAuthorizationStatus authStatus = [PHPhotoLibrary authorizationStatus];
        if (authStatus == PHAuthorizationStatusRestricted|| authStatus == PHAuthorizationStatusDenied) {
            if (self.innerVideoSaveToPhotosAlbumResult) {
                self.innerVideoSaveToPhotosAlbumResult(NO,[self createErrorAndCallbackWithErrorStr:@"保存视频到相册失败，请检查相册的访问权限"]);
            }
        }else {
            if (self.innerVideoSaveToPhotosAlbumResult) {
                self.innerVideoSaveToPhotosAlbumResult(NO,error);
            }
        }
    }
}

/**
 * 修复压缩字典参数帧率设置不生效的问题:判断是否需要丢弃当前帧
 */
- (CMSampleBufferRef)dropFrame:(CMSampleBufferRef)sample {
    CMTime sampleTime = CMSampleBufferGetPresentationTimeStamp(sample);
    // 是首帧
    if (CMTimeCompare(_previousPstTime, kCMTimeZero) == 0) {
        // 第一次进来
        _previousPstTime = sampleTime;
#ifdef DEBUG
        NSLog(@"第一帧开始");
        CMTimeShow(sampleTime);
        NSLog(@"第一帧结束");
#endif
    }
    // 获取当前帧跟上一帧的时间间距
    CMTime duration = CMTimeSubtract(sampleTime, _previousPstTime);
    // 外部设置的fps转化为一帧需要的时间
    CMTime frameRate = CMTimeMake(1, (int32_t)self.configuration.videoExpectedSourceFrameRate);
#ifdef DEBUG
    CMTimeShow(duration);
#endif
    // 首帧不跳过; 非首帧的情况下，判断与前一帧的间距是否满足输出帧率
    if (CMTimeCompare(duration, kCMTimeZero) == 0 || CMTimeCompare(duration, frameRate) == 1) {
        _previousPstTime = sampleTime;
        return sample;
    }
    return nil;
    
    // 没用代码
    /*
    CMSampleBufferRef sampleBufferToWrite = NULL;
    double _videoOutFrameIntervals = 1.0 / self.configuration.videoExpectedSourceFrameRate;
    if (sample) {
        if (self.configuration.videoExpectedSourceFrameRate == -1) {
            // 不需要更改帧率的情况下，直接拷贝当前帧
            CMSampleBufferCreateCopy(nil, sample, &sampleBufferToWrite);
        } else {
            // 是否跳过当前帧
            BOOL drop = YES;
            // 获取当前帧
            CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sample);
            if (_isFirstFrame) {
                _previousPstTime = presentationTime;
                _isFirstFrame = NO;
            }
            Float64 subPresentationTime = CMTimeGetSeconds(CMTimeSubtract(presentationTime, _previousPstTime));
            
            if (subPresentationTime == 0 || subPresentationTime >= _videoOutFrameIntervals) {
                // 首帧不跳过;  非首帧的情况下，判断与前一帧的间距是否满足输出帧率（帧间距 >= 1/输出帧率）
                drop = NO;
            }
            if (!drop) {
                // 不跳过当前帧的情况下
                CMSampleTimingInfo timeInfo = {0};
                CMSampleBufferGetSampleTimingInfo(sample, 0, &timeInfo);
                CMSampleBufferCreateCopyWithNewTiming(nil, sample, 1, &timeInfo, &sampleBufferToWrite);
                
                // 更新前一帧的 presentation 时间
                _previousPstTime = presentationTime;
            }
        }
    }
    return sampleBufferToWrite;
    //*/
}


#pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate
- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
 
    @synchronized(self) {
        CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer);
        CFRetain(sampleBuffer);
        dispatch_async(self.assetWriterQueue, ^{
            if (self.assetWriter && (self.isRecording || self.recordingWillBeStarted)) {
                BOOL wasReadyToRecord = (self.readyToRecordAudio && self.readyToRecordVideo);
                if (connection == self.videoConnection) {
                    // Initialize the video input if this is not done yet
                    if (!self.readyToRecordVideo) {
                        self.readyToRecordVideo = [self setupAssetWriterVideoInput:formatDescription];
                    }
                    // Write video data to file
                    if (self.readyToRecordVideo && self.readyToRecordAudio) {
                        [self writeSampleBuffer:sampleBuffer ofType:AVMediaTypeVideo];
                    }
                }else if (connection == self.audioConnection) {
                    // Initialize the audio input if this is not done yet
                    if (!self.readyToRecordAudio) {
                        self.readyToRecordAudio = [self setupAssetWriterAudioInput:formatDescription];
                    }
                    // Write audio data to file
                    if (self.readyToRecordAudio && self.readyToRecordVideo) {
                        [self writeSampleBuffer:sampleBuffer ofType:AVMediaTypeAudio];
                    }
                }
                BOOL isReadyToRecord = (self.readyToRecordAudio && self.readyToRecordVideo);
                if (!wasReadyToRecord && isReadyToRecord) {
                    self.recordingWillBeStarted = NO;
                    self.isRecording = YES;
                }
                if (isReadyToRecord) {
                    CMTime dur = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
                    if (self.startTime.value == 0) {
                        self.startTime = dur;
                    }
                    CMTime sub = CMTimeSubtract(dur, self.startTime);
                    self.currentRecordTime = CMTimeGetSeconds(sub);
#ifdef DEBUG
//                    NSLog(@"录制时间: %f",self.currentRecordTime);
#endif
                    dispatch_async(dispatch_get_main_queue(), ^{
                        if (self.innerVideoRecordProcess) {
                            self.innerVideoRecordProcess(self.currentRecordTime);
                        }
                    });
                }
            }
            CFRelease(sampleBuffer);
        });
    }
}
#pragma mark - AVAudioRecorderDelegate
//录音结束的代理
- (void)audioRecorderDidFinishRecording:(AVAudioRecorder *)recorder successfully:(BOOL)flag {
    if (self.innerAudioRecordResult) {
        self.innerAudioRecordResult(self.audioRecordFilePath,self.innerAudioRecordError);
    }else {
        if (self.delegate && [self.delegate respondsToSelector:@selector(didFinishAudioRecordToOutputFileAtPath:recordError:)]) {
            [self.delegate didFinishAudioRecordToOutputFileAtPath:self.audioRecordFilePath recordError:self.innerAudioRecordError];
        }
    }
#ifdef DEBUG
    NSLog(@"录音结束");
#endif
}
- (void)audioRecorderEncodeErrorDidOccur:(AVAudioRecorder *)recorder error:(NSError * _Nullable)error {
#ifdef DEBUG
    NSLog(@"录音错误: %@",error);
#endif
    if (self.innerMonitorErrorBlock) {
        self.innerMonitorErrorBlock(error);
    }
    self.innerAudioRecordError = error;
}


#pragma mark - Setter and getter

- (void)setFlashMode:(FlashMode)flashMode {
    _flashMode = flashMode;
    AVCaptureFlashMode mode = AVCaptureFlashModeOff;
    switch (flashMode) {
        case FlashModeOn:
            mode = AVCaptureFlashModeOn;
            break;
        case FlashModeOff:
            mode = AVCaptureFlashModeOff;
            break;
        case FlashModeAuto:
            mode = AVCaptureFlashModeAuto;
            break;
    }
//    [self changeVideoInputDeviceProperty:^(AVCaptureDevice *captureDevice) {
//        if ([captureDevice isFlashModeSupported:mode]) {
//            [captureDevice setFlashMode:mode];
//        }
//    }];
    [self changeCurrentCameraProperty:^(AVCaptureDevice *currentCamera) {
        if ([currentCamera isFlashModeSupported:mode]) {
            [currentCamera setFlashMode:mode];
        }
    }];
}
- (void)setTorchMode:(TorchMode)torchMode {
    _torchMode = torchMode;
    AVCaptureTorchMode mode = AVCaptureTorchModeOff;
    switch (torchMode) {
        case TorchModeOff:
            mode = AVCaptureTorchModeOff;
            break;
        case TorchModeOn:
            mode = AVCaptureTorchModeOn;
            break;
        case TorchModeAuto:
            mode = AVCaptureTorchModeAuto;
            break;
    }
//    [self changeVideoInputDeviceProperty:^(AVCaptureDevice *captureDevice) {
//        if ([captureDevice isTorchModeSupported:mode]) {
//            captureDevice.torchMode = mode;
//        }
//    }];
    [self changeCurrentCameraProperty:^(AVCaptureDevice *currentCamera) {
        if ([currentCamera isTorchModeSupported:mode]) {
            currentCamera.torchMode = mode;
        }
    }];
}

- (void)setExposureMode:(ExposureMode)exposureMode {
    _exposureMode = exposureMode;
    AVCaptureExposureMode mode = AVCaptureExposureModeLocked;
    switch (exposureMode) {
        case ExposureModeLocked:
            mode = AVCaptureExposureModeLocked;
            break;
    
        case ExposureModeAutoExpose:
             mode = AVCaptureExposureModeAutoExpose;
            break;
        case ExposureModeContinuousAutoExposure:
             mode = AVCaptureExposureModeContinuousAutoExposure;
            break;
        case ExposureModeCustom:
             mode = AVCaptureExposureModeCustom;
            break;
    }
    [self changeCurrentCameraProperty:^(AVCaptureDevice *currentCamera) {
        if ([currentCamera isExposureModeSupported:mode]) {
            [currentCamera setExposureMode:mode];
        }
    }];
    
}

- (void)setPreset:(AVCaptureSessionPreset)preset {
    _preset = preset;
    if ([self.session canSetSessionPreset:preset]) {
        [self.session setSessionPreset:preset];
    }
}

- (AVCaptureSession *)session {
    if (!_session) {
        _session = [[AVCaptureSession alloc] init];
//        _session.usesApplicationAudioSession = NO;
    }
    return _session;
}
- (AVCaptureDeviceInput *)backVideoInput {
    if (!_backVideoInput) {
        _backVideoInput = [self createInputWithAVCaptureDevice:[self obtainRearCamera]];
    }
    return _backVideoInput;
}
- (AVCaptureDeviceInput *)frontVideoInput {
    if (!_frontVideoInput) {
        _frontVideoInput = [self createInputWithAVCaptureDevice:[self obtainFrontCamera]];
    }
    return _frontVideoInput;
}
- (AVCaptureDeviceInput *)audioInput {
    if (!_audioInput) {
        _audioInput = [self createInputWithAVCaptureDevice:[self obtainMicrophone]];
    }
    return _audioInput;
}
- (AVCaptureVideoPreviewLayer *)previewLayer {
    if (!_previewLayer) {
        _previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session];
        _previewLayer.backgroundColor = [UIColor blackColor].CGColor;
        _previewLayer.contentsGravity = kCAGravityResizeAspectFill;
        _previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
    }
    return _previewLayer;
}
- (dispatch_queue_t)assetWriterQueue {
    if (!_assetWriterQueue) {
        _assetWriterQueue = dispatch_queue_create("com.studioManager.movieWritingQueue", DISPATCH_QUEUE_SERIAL);
    }
    return _assetWriterQueue;
}
- (AVCaptureVideoDataOutput *)videoDataOutput {
    if (!_videoDataOutput) {
        _videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
        [_videoDataOutput setSampleBufferDelegate:self queue:self.assetWriterQueue];
        [_videoDataOutput setAlwaysDiscardsLateVideoFrames:YES];
        NSDictionary *settings = [NSDictionary dictionaryWithObjectsAndKeys:
                                  [NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
                                  nil];  // kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
        [_videoDataOutput setVideoSettings:settings];
    }
    return _videoDataOutput;
}
- (AVCaptureAudioDataOutput *)audioDataOutput {
    if (!_audioDataOutput) {
        _audioDataOutput = [[AVCaptureAudioDataOutput alloc] init];
        [_audioDataOutput setSampleBufferDelegate:self queue:self.assetWriterQueue];
    }
    return _audioDataOutput;
}
- (AVCaptureConnection *)videoConnection {
    if (!_videoConnection) {
        _videoConnection = [self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
    }
    return _videoConnection;
}
- (AVCaptureConnection *)audioConnection {
    if (!_audioConnection) {
        _audioConnection = [self.audioDataOutput connectionWithMediaType:AVMediaTypeAudio];
    }
    return _audioConnection;
}
- (AVCaptureStillImageOutput *)imageDataOutput {
    if (!_imageDataOutput) {
        _imageDataOutput = [[AVCaptureStillImageOutput alloc] init];
    }
    return _imageDataOutput;
}
- (AVCaptureConnection *)imageConnection {
    if (!_imageConnection) {
        _imageConnection = [self.imageDataOutput connectionWithMediaType:AVMediaTypeVideo];
    }
    return _imageConnection;
}
- (AVAudioRecorder *)audioRecorder {
    if (!_audioRecorder) {
        NSString *cafPath = [self getRandomAudioStorePath];
        self.audioRecordFilePath = cafPath;
        NSURL *url = [NSURL URLWithString:cafPath];
        AVAudioSession *session = [AVAudioSession sharedInstance];
        NSError *error = nil;
        [session setCategory:AVAudioSessionCategoryRecord error:&error];
        if(error){
#ifdef DEBUG
            NSLog(@"初始化AVAudioSession错误: %@", [error description]);
#endif
            if (self.innerMonitorErrorBlock) {
                self.innerMonitorErrorBlock(error);
            }
        }
        NSMutableDictionary *setting = [NSMutableDictionary dictionary];
        // 音频格式
        setting[AVFormatIDKey] = @(kAudioFormatAppleIMA4);  // kAudioFormatMPEGLayer3  kAudioFormatAppleIMA4
        // 录音采样率(Hz) 如：AVSampleRateKey==8000/44100/96000（影响音频的质量）
        setting[AVSampleRateKey] = @(44100);
        
        // 音频通道数 1 或 2
        setting[AVNumberOfChannelsKey] = @(1);
        // 线性音频的位深度  8、16、24、32
        setting[AVLinearPCMBitDepthKey] = @(8);
        // 录音的质量
        setting[AVEncoderAudioQualityKey] = [NSNumber numberWithInt:AVAudioQualityHigh];
        
        _audioRecorder = [[AVAudioRecorder alloc] initWithURL:url settings:setting error:&error];
        _audioRecorder.meteringEnabled = YES;
        _audioRecorder.delegate = self;
        if(error){
#ifdef DEBUG
            NSLog(@"初始化AVAudioRecorder错误: %@", [error description]);
#endif
            if (self.innerMonitorErrorBlock) {
                self.innerMonitorErrorBlock(error);
            }
        }
    }
    return _audioRecorder;
}
- (CGFloat)photoMinScaleAndCropFactor {
//    CGFloat minZoomFactor = 1.0;
//    if (@available(iOS 11.0, *)) {
//        minZoomFactor = [self obtainRearCamera].minAvailableVideoZoomFactor;
//
//    }
//    return minZoomFactor;
    
//    AVCaptureDevice *device = nil;
//    if (_curIsBackCamera) {
//        device = self.backVideoInput.device;
//    }else {
//        device = self.frontVideoInput.device;
//    }
//    if (@available(iOS 11.0, *)) {
//        return device.minAvailableVideoZoomFactor;
//    } else {
//        return 1.0;
//    }
    

    AVCaptureDevice *device = nil;
    if (_curIsBackCamera) {
        device = self.backVideoInput.device;
    }else {
        device = self.frontVideoInput.device;
    }
    return 1.0;
//    if (@available(iOS 11.0, *)) {
//        return device.activeFormat.videoMinZoomFactorForDepthDataDelivery;
//    } else {
//        return 1.0;
//    }
}

- (CGFloat)photoMaxScaleAndCropFactor {
    // 方式1
//    DBLog(@"%f",self.imageConnection.videoMaxScaleAndCropFactor);
//    return 5.0;
//    return self.imageConnection.videoMaxScaleAndCropFactor;
    
    // 方式2
//    AVCaptureDevice *device = nil;
//    if (_curIsBackCamera) {
//        device = self.backVideoInput.device;
//    }else {
//        device = self.frontVideoInput.device;
//    }
//    if (@available(iOS 11.0, *)) {
//        return device.maxAvailableVideoZoomFactor;
//    } else {
//        return 5.0;
//    }
    
    
    AVCaptureDevice *device = nil;
    if (_curIsBackCamera) {
        device = self.backVideoInput.device;
    }else {
        device = self.frontVideoInput.device;
    }
    CGFloat maxValue = device.activeFormat.videoMaxZoomFactor;
    if (maxValue > 8) {
        maxValue = 8;
    }
    return maxValue;
    
//    CGFloat maxZoomFactor = [self obtainRearCamera].activeFormat.videoMaxZoomFactor;
//    if (@available(iOS 11.0, *)) {
//        maxZoomFactor = [self obtainRearCamera].maxAvailableVideoZoomFactor;
//
//    }
//    if (maxZoomFactor > 6.0) {
//        maxZoomFactor = 6.0;
//    }
//    return maxZoomFactor;
    
}

//- (CGFloat)testValue {
//    return [self obtainRearCamera].videoZoomFactor;
//}


@end









