//
//  DKCapturePipeline.m
//  DKShortVideo
//
//  Created by Keer_LGQ on 2018/4/1.
//  Copyright © 2018年 DK. All rights reserved.
//

#import "DKCapturePipeline.h"
#import "DKGPUFiterRenderer.h"
#import "MovieRecorder.h"

#import <CoreMedia/CMBufferQueue.h>
#import <CoreMedia/CMAudioClock.h>
#import <AssetsLibrary/AssetsLibrary.h>
#import <ImageIO/CGImageProperties.h>
#import <UIKit/UIKit.h>

#define RETAINED_BUFFER_COUNT 6

#define RECORD_AUDIO 1

@interface DKCapturePipeline ()<AVCaptureAudioDataOutputSampleBufferDelegate, AVCaptureVideoDataOutputSampleBufferDelegate, MovieRecorderDelegate>
{
    NSMutableArray *_previousSecondTimestamps;

    AVCaptureSession *_captureSession;
    AVCaptureDevice *_videoDevice;
    AVCaptureConnection *_audioConnection;
    AVCaptureConnection *_videoConnection;
    AVCaptureVideoOrientation _videoBufferOrientation;
    
    dispatch_queue_t _sessionQueue;
    dispatch_queue_t _videoDataOutputQueue;
    
    BOOL _running;
    BOOL _startCaptureSessionOnEnteringForeground;
    id _applicationWillEnterForegroundNotificationObserver;
    NSDictionary *_videoCompressionSettings;
    NSDictionary *_audioCompressionSettings;

    id<DKBufferRenderProtocol> _renderer;
    BOOL _renderingEnabled;
    
    MovieRecorder *_recorder;
    NSURL *_recordingURL;
    DKRecordingStatus _recordingStatus;
    
    UIBackgroundTaskIdentifier _pipelineRunningTask;
    
    __weak id<DKCapturePipelineDelegate> _delegate;
    dispatch_queue_t _delegateCallbackQueue;
}
// Redeclared readwrite
@property(nonatomic, copy) NSMutableArray *videoUrlArray;
@property(nonatomic, copy) NSMutableArray *tempedVideoArray;

@property(atomic, readwrite) float videoFrameRate;
@property(atomic, readwrite) CMVideoDimensions videoDimensions;
// Because we specify __attribute__((NSObject)) ARC will manage the lifetime of the backing ivars even though they are CF types.
@property(nonatomic, strong) __attribute__((NSObject)) CVPixelBufferRef currentPreviewPixelBuffer;
@property(nonatomic, strong) __attribute__((NSObject)) CMFormatDescriptionRef outputVideoFormatDescription;
@property(nonatomic, strong) __attribute__((NSObject)) CMFormatDescriptionRef outputAudioFormatDescription;
@end

@implementation DKCapturePipeline

#pragma mark life
- (instancetype)initWithDelegate:(id<DKCapturePipelineDelegate>)delegate callbackQueue:(dispatch_queue_t)queue
{
    NSParameterAssert( delegate != nil );
    NSParameterAssert( queue != nil );
    
    self = [super init];
    if ( self )
    {
        _previousSecondTimestamps = [[NSMutableArray alloc] init];
        _recordingOrientation = AVCaptureVideoOrientationPortrait;
        
        _sessionQueue = dispatch_queue_create( "com.apple.sample.capturepipeline.session", DISPATCH_QUEUE_SERIAL );
        
        // In a multi-threaded producer consumer system it's generally a good idea to make sure that producers do not get starved of CPU time by their consumers.
        // In this app we start with VideoDataOutput frames on a high priority queue, and downstream consumers use default priority queues.
        // Audio uses a default priority queue because we aren't monitoring it live and just want to get it into the movie.
        // AudioDataOutput can tolerate more latency than VideoDataOutput as its buffers aren't allocated out of a fixed size pool.
        _videoDataOutputQueue = dispatch_queue_create( "com.apple.sample.capturepipeline.video", DISPATCH_QUEUE_SERIAL );
        dispatch_set_target_queue( _videoDataOutputQueue, dispatch_get_global_queue( DISPATCH_QUEUE_PRIORITY_HIGH, 0 ) );
        
        _renderer = [[DKGPUFiterRenderer alloc] init];

        _pipelineRunningTask = UIBackgroundTaskInvalid;
        _delegate = delegate;
        _delegateCallbackQueue = queue;
        _videoUrlArray = [NSMutableArray arrayWithObjects:[self videoURLWithString:@"Movie1.MOV"],
                                                          [self videoURLWithString:@"Movie2.MOV"],
                                                          [self videoURLWithString:@"Movie3.MOV"], nil];
    }
    return self;
}

- (void)dealloc
{
    [self teardownCaptureSession];
}

#pragma mark Capture Session

- (void)startRunning
{
    dispatch_sync( _sessionQueue, ^{
        [self setupCaptureSession];
        
        if ( _captureSession ) {
            [_captureSession startRunning];
            _running = YES;
        }
    } );
}

- (void)stopRunning
{
    dispatch_sync( _sessionQueue, ^{
        _running = NO;
        
        // the captureSessionDidStopRunning method will stop recording if necessary as well, but we do it here so that the last video and audio samples are better aligned
        [self stopRecording]; // does nothing if we aren't currently recording
        
        [_captureSession stopRunning];
        
        [self captureSessionDidStopRunning];
        
        [self teardownCaptureSession];
    } );
}
#pragma mark set get
- (void)setRenderingEnabled:(BOOL)renderingEnabled
{
    @synchronized( _renderer ) {
        _renderingEnabled = renderingEnabled;
    }
}

- (BOOL)renderingEnabled
{
    @synchronized( _renderer ) {
        return _renderingEnabled;
    }
}

- (DKRecordingStatus)getRecorderStatus
{
    return _recordingStatus;
}

- (NSURL *)getLastedTempVideoUrl
{
    if ([self getCurrentTempedVideoCount] > 0) {
        return [_videoUrlArray objectAtIndex:[self getCurrentTempedVideoCount]-1];
    }
    return [_videoUrlArray objectAtIndex:0];
}

#pragma mark setup
- (void)setupCaptureSession
{
    if ( _captureSession ) {
        return;
    }
    
    _captureSession = [[AVCaptureSession alloc] init];
    
    [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(captureSessionNotification:) name:nil object:_captureSession];
    _applicationWillEnterForegroundNotificationObserver = [[NSNotificationCenter defaultCenter] addObserverForName:UIApplicationWillEnterForegroundNotification object:[UIApplication sharedApplication] queue:nil usingBlock:^(NSNotification *note) {
        // Retain self while the capture session is alive by referencing it in this observer block which is tied to the session lifetime
        // Client must stop us running before we can be deallocated
        [self applicationWillEnterForeground];
    }];
    
#if RECORD_AUDIO
    /* Audio */
    AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
    AVCaptureDeviceInput *audioIn = [[AVCaptureDeviceInput alloc] initWithDevice:audioDevice error:nil];
    if ( [_captureSession canAddInput:audioIn] ) {
        [_captureSession addInput:audioIn];
    }
    
    AVCaptureAudioDataOutput *audioOut = [[AVCaptureAudioDataOutput alloc] init];
    // Put audio on its own queue to ensure that our video processing doesn't cause us to drop audio
    dispatch_queue_t audioCaptureQueue = dispatch_queue_create( "com.apple.sample.capturepipeline.audio", DISPATCH_QUEUE_SERIAL );
    [audioOut setSampleBufferDelegate:self queue:audioCaptureQueue];
    
    if ( [_captureSession canAddOutput:audioOut] ) {
        [_captureSession addOutput:audioOut];
    }
    _audioConnection = [audioOut connectionWithMediaType:AVMediaTypeAudio];
#endif // RECORD_AUDIO
    
    /* Video */
    AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    NSError *videoDeviceError = nil;
    AVCaptureDeviceInput *videoIn = [[AVCaptureDeviceInput alloc] initWithDevice:videoDevice error:&videoDeviceError];
    if ( [_captureSession canAddInput:videoIn] ) {
        [_captureSession addInput:videoIn];
        _videoDevice = videoDevice;
    }
    else {
        [self handleNonRecoverableCaptureSessionRuntimeError:videoDeviceError];
        return;
    }
    
    AVCaptureVideoDataOutput *videoOut = [[AVCaptureVideoDataOutput alloc] init];
    videoOut.videoSettings = @{ (id)kCVPixelBufferPixelFormatTypeKey : @(_renderer.inputPixelFormat) ,(id)kCVPixelBufferOpenGLESCompatibilityKey:@(YES)};
    [videoOut setSampleBufferDelegate:self queue:_videoDataOutputQueue];
    
    // filter records videos and we prefer not to have any dropped frames in the video recording.
    // By setting alwaysDiscardsLateVideoFrames to NO we ensure that minor fluctuations in system load or in our processing time for a given frame won't cause framedrops.
    // We do however need to ensure that on average we can process frames in realtime.
    // If we were doing preview only we would probably want to set alwaysDiscardsLateVideoFrames to YES.
    videoOut.alwaysDiscardsLateVideoFrames = NO;
    
    if ( [_captureSession canAddOutput:videoOut] ) {
        [_captureSession addOutput:videoOut];
    }
    _videoConnection = [videoOut connectionWithMediaType:AVMediaTypeVideo];
    
    int frameRate;
    NSString *sessionPreset = AVCaptureSessionPresetHigh;
    CMTime frameDuration = kCMTimeInvalid;
    // For single core systems like iPhone 4 and iPod Touch 4th Generation we use a lower resolution and framerate to maintain real-time performance.
    if ( [NSProcessInfo processInfo].processorCount == 1 )
    {
        if ( [_captureSession canSetSessionPreset:AVCaptureSessionPreset640x480] ) {
            sessionPreset = AVCaptureSessionPreset640x480;
        }
        frameRate = 15;
    }
    else
    {
        frameRate = 30;
    }
    
    _captureSession.sessionPreset = sessionPreset;
    
    frameDuration = CMTimeMake( 1, frameRate );
    
    NSError *error = nil;
    if ( [videoDevice lockForConfiguration:&error] ) {
        videoDevice.activeVideoMaxFrameDuration = frameDuration;
        videoDevice.activeVideoMinFrameDuration = frameDuration;
        [videoDevice unlockForConfiguration];
    }
    else {
        NSLog( @"videoDevice lockForConfiguration returned error %@", error );
    }
    
    // Get the recommended compression settings after configuring the session/device.
#if RECORD_AUDIO
    _audioCompressionSettings = [[audioOut recommendedAudioSettingsForAssetWriterWithOutputFileType:AVFileTypeQuickTimeMovie] copy];
#endif
    _videoCompressionSettings = [[videoOut recommendedVideoSettingsForAssetWriterWithOutputFileType:AVFileTypeQuickTimeMovie] copy];
    
    _videoBufferOrientation = _videoConnection.videoOrientation;
    
    return;
}
// 设置视频width height
- (void)setupVideoPipelineWithInputFormatDescription:(CMFormatDescriptionRef)inputFormatDescription
{
    NSLog( @"-[%@ %@] called", [self class], NSStringFromSelector(_cmd) );
    
    [self videoPipelineWillStartRunning];
    
    self.videoDimensions = CMVideoFormatDescriptionGetDimensions( inputFormatDescription );
    [_renderer prepareForInputWithVideoDimensions:_videoDimensions outputRetainedBufferCountHint:RETAINED_BUFFER_COUNT];
    
    if ( ! _renderer.operatesInPlace && [_renderer respondsToSelector:@selector(outputFormatDescription)] ) {
        self.outputVideoFormatDescription = _renderer.outputFormatDescription;
    }
    else {
        self.outputVideoFormatDescription = inputFormatDescription;
    }
}

#pragma mark runing line
- (void)videoPipelineWillStartRunning
{
    NSLog( @"-[%@ %@] called", [self class], NSStringFromSelector(_cmd) );
    
    NSAssert( _pipelineRunningTask == UIBackgroundTaskInvalid, @"should not have a background task active before the video pipeline starts running" );
    
    _pipelineRunningTask = [[UIApplication sharedApplication] beginBackgroundTaskWithExpirationHandler:^{
        NSLog( @"video capture pipeline background task expired" );
    }];
}

- (void)videoPipelineDidFinishRunning
{
    NSLog( @"-[%@ %@] called", [self class], NSStringFromSelector(_cmd) );
    
    NSAssert( _pipelineRunningTask != UIBackgroundTaskInvalid, @"should have a background task active when the video pipeline finishes running" );
    
    [[UIApplication sharedApplication] endBackgroundTask:_pipelineRunningTask];
    _pipelineRunningTask = UIBackgroundTaskInvalid;
}

- (void)videoPipelineDidRunOutOfBuffers
{
    // We have run out of buffers.
    // Tell the delegate so that it can flush any cached buffers.
    [self invokeDelegateCallbackAsync:^{
        [_delegate capturePipelineDidRunOutOfPreviewBuffers:self];
    }];
}

#pragma mark handle

- (void)captureSessionNotification:(NSNotification *)notification
{
    dispatch_async( _sessionQueue, ^{
        
        if ( [notification.name isEqualToString:AVCaptureSessionWasInterruptedNotification] )
        {
            NSLog( @"session interrupted" );
            
            [self captureSessionDidStopRunning];
        }
        else if ( [notification.name isEqualToString:AVCaptureSessionInterruptionEndedNotification] )
        {
            NSLog( @"session interruption ended" );
        }
        else if ( [notification.name isEqualToString:AVCaptureSessionRuntimeErrorNotification] )
        {
            [self captureSessionDidStopRunning];
            
            NSError *error = notification.userInfo[AVCaptureSessionErrorKey];
            if ( error.code == AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground )
            {
                NSLog( @"device not available in background" );
                 
                // Since we can't resume running while in the background we need to remember this for next time we come to the foreground
                if ( _running ) {
                    _startCaptureSessionOnEnteringForeground = YES;
                }
            }
            else if ( error.code == AVErrorMediaServicesWereReset )
            {
                NSLog( @"media services were reset" );
                [self handleRecoverableCaptureSessionRuntimeError:error];
            }
            else
            {
                [self handleNonRecoverableCaptureSessionRuntimeError:error];
            }
        }
        else if ( [notification.name isEqualToString:AVCaptureSessionDidStartRunningNotification] )
        {
            NSLog( @"session started running" );
        }
        else if ( [notification.name isEqualToString:AVCaptureSessionDidStopRunningNotification] )
        {
            NSLog( @"session stopped running" );
        }
    } );
}

- (void)handleRecoverableCaptureSessionRuntimeError:(NSError *)error
{
    if ( _running ) {
        [_captureSession startRunning];
    }
}

- (void)handleNonRecoverableCaptureSessionRuntimeError:(NSError *)error
{
    NSLog( @"fatal runtime error %@, code %i", error, (int)error.code );
    
    _running = NO;
    [self teardownCaptureSession];
    
    [self invokeDelegateCallbackAsync:^{
        [_delegate capturePipeline:self didStopRunningWithError:error];
    }];
}

- (void)applicationWillEnterForeground
{
    NSLog( @"-[%@ %@] called", [self class], NSStringFromSelector(_cmd) );
    
    dispatch_sync( _sessionQueue, ^{
        
        if ( _startCaptureSessionOnEnteringForeground )
        {
            NSLog( @"-[%@ %@] manually restarting session", [self class], NSStringFromSelector(_cmd) );
            
            _startCaptureSessionOnEnteringForeground = NO;
            if ( _running ) {
                [_captureSession startRunning];
            }
        }
    } );
}


#pragma mark Recording

- (void)startRecording
{
    // TODO 确定该录制第几段视频 index
    if ([self getCurrentTempedVideoCount] >= _videoUrlArray.count) {
        // TODO通知缓存池已满
        [_videoUrlArray enumerateObjectsUsingBlock:^(id  _Nonnull obj, NSUInteger idx, BOOL * _Nonnull stop) {
            
            NSError *error;
            // 清空temp文件夹
            [[NSFileManager defaultManager] removeItemAtURL:obj error:&error];
            if (error) {
                @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:@"清除缓存失败" userInfo:nil];
                return ;
            }
        }];
    }
    _recordingURL = [_videoUrlArray objectAtIndex:[self getCurrentTempedVideoCount]];
    @synchronized( self )
    {
        if ( _recordingStatus != DKRecordingStatusIdle ) {
            @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:@"Already recording" userInfo:nil];
            return;
        }
        
        [self transitionToRecordingStatus:DKRecordingStatusStartingRecording error:nil];
    }
    
    dispatch_queue_t callbackQueue = dispatch_queue_create( "com.apple.sample.capturepipeline.recordercallback", DISPATCH_QUEUE_SERIAL ); // guarantee ordering of callbacks with a serial queue
    MovieRecorder *recorder = [[MovieRecorder alloc] initWithURL:_recordingURL delegate:self callbackQueue:callbackQueue];
    
#if RECORD_AUDIO
    [recorder addAudioTrackWithSourceFormatDescription:self.outputAudioFormatDescription settings:_audioCompressionSettings];
#endif // RECORD_AUDIO
    
    CGAffineTransform videoTransform = [self transformFromVideoBufferOrientationToOrientation:self.recordingOrientation withAutoMirroring:NO]; // Front camera recording shouldn't be mirrored
    
    [recorder addVideoTrackWithSourceFormatDescription:self.outputVideoFormatDescription transform:videoTransform settings:_videoCompressionSettings];
    _recorder = recorder;
    [recorder prepareToRecord]; // asynchronous, will call us back with recorderDidFinishPreparing: or recorder:didFailWithError: when done
}

- (void)stopRecording
{
    @synchronized( self )
    {
        if ( _recordingStatus != DKRecordingStatusRecording ) {
            return;
        }
        
        [self transitionToRecordingStatus:DKRecordingStatusStoppingRecording error:nil];
    }
    
    [_recorder finishRecording]; // asynchronous, will call us back with recorderDidFinishRecording: or recorder:didFailWithError: when done
}

#pragma mark teardown

- (void)captureSessionDidStopRunning
{
    [self stopRecording]; // a no-op if we aren't recording
    [self teardownVideoPipeline];
}

// synchronous, blocks until the pipeline is drained, don't call from within the pipeline
- (void)teardownVideoPipeline
{
    // The session is stopped so we are guaranteed that no new buffers are coming through the video data output.
    // There may be inflight buffers on _videoDataOutputQueue however.
    // Synchronize with that queue to guarantee no more buffers are in flight.
    // Once the pipeline is drained we can tear it down safely.
    
    NSLog( @"-[%@ %@] called", [self class], NSStringFromSelector(_cmd) );
    
    dispatch_sync( _videoDataOutputQueue, ^{
        
        if ( ! self.outputVideoFormatDescription ) {
            return;
        }
        
        self.outputVideoFormatDescription = NULL;
        [_renderer reset];
        self.currentPreviewPixelBuffer = NULL;
        
        NSLog( @"-[%@ %@] finished teardown", [self class], NSStringFromSelector(_cmd) );
        
        [self videoPipelineDidFinishRunning];
    } );
}

- (void)teardownCaptureSession
{
    if ( _captureSession )
    {
        [[NSNotificationCenter defaultCenter] removeObserver:self name:nil object:_captureSession];
        
        [[NSNotificationCenter defaultCenter] removeObserver:_applicationWillEnterForegroundNotificationObserver];
        _applicationWillEnterForegroundNotificationObserver = nil;
        
        _captureSession = nil;
        
        _videoCompressionSettings = nil;
        _audioCompressionSettings = nil;
    }
}

#pragma mark AVCapture delegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
    CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription( sampleBuffer );
    
    if ( connection == _videoConnection )
    {
        if ( self.outputVideoFormatDescription == NULL ) {
            // Don't render the first sample buffer.
            // This gives us one frame interval (33ms at 30fps) for setupVideoPipelineWithInputFormatDescription: to complete.
            // Ideally this would be done asynchronously to ensure frames don't back up on slower devices.
            [self setupVideoPipelineWithInputFormatDescription:formatDescription];
        }
        else {
            [self renderVideoSampleBuffer:sampleBuffer];
        }
    }
    else if ( connection == _audioConnection )
    {
        self.outputAudioFormatDescription = formatDescription;
        
        @synchronized( self ) {
            if ( _recordingStatus == DKRecordingStatusRecording ) {
                [_recorder appendAudioSampleBuffer:sampleBuffer];
            }
        }
    }
}

- (void)renderVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer
{
    CVPixelBufferRef renderedPixelBuffer = NULL;
    CMTime timestamp = CMSampleBufferGetPresentationTimeStamp( sampleBuffer );
    
    [self calculateFramerateAtTimestamp:timestamp];
    
    // We must not use the GPU while running in the background.
    // setRenderingEnabled: takes the same lock so the caller can guarantee no GPU usage once the setter returns.
    @synchronized( _renderer )
    {
        if ( _renderingEnabled ) {
            CVPixelBufferRef sourcePixelBuffer = CMSampleBufferGetImageBuffer( sampleBuffer );
            renderedPixelBuffer = [_renderer copyRenderedPixelBuffer:sourcePixelBuffer];
        }
        else {
            return;
        }
    }
    
    if ( renderedPixelBuffer )
    {
        @synchronized( self )
        {
            // 回到主线程预览
            [self outputPreviewPixelBuffer:renderedPixelBuffer];
            
            if ( _recordingStatus == DKRecordingStatusRecording ) {
                // 视频线程输出
                [_recorder appendVideoPixelBuffer:renderedPixelBuffer withPresentationTime:timestamp];
            }
        }
        
        CFRelease( renderedPixelBuffer );
    }
    else
    {
        [self videoPipelineDidRunOutOfBuffers];
    }
}

// call under @synchronized( self )
- (void)outputPreviewPixelBuffer:(CVPixelBufferRef)previewPixelBuffer
{
    // Keep preview latency low by dropping stale frames that have not been picked up by the delegate yet
    // Note that access to currentPreviewPixelBuffer is protected by the @synchronized lock
    self.currentPreviewPixelBuffer = previewPixelBuffer;
    
    [self invokeDelegateCallbackAsync:^{
        
        CVPixelBufferRef currentPreviewPixelBuffer = NULL;
        @synchronized( self )
        {
            currentPreviewPixelBuffer = self.currentPreviewPixelBuffer;
            if ( currentPreviewPixelBuffer ) {
                CFRetain( currentPreviewPixelBuffer );
                self.currentPreviewPixelBuffer = NULL;
            }
        }
        
        if ( currentPreviewPixelBuffer ) {
            [_delegate capturePipeline:self previewPixelBufferReadyForDisplay:currentPreviewPixelBuffer];
            CFRelease( currentPreviewPixelBuffer );
        }
    }];
}

#pragma mark Recording State Machine

// call under @synchonized( self )
- (void)transitionToRecordingStatus:(DKRecordingStatus)newStatus error:(NSError *)error
{
    DKRecordingStatus oldStatus = _recordingStatus;
    _recordingStatus = newStatus;
    
    if ( newStatus != oldStatus )
    {
        dispatch_block_t delegateCallbackBlock = nil;
        
        if ( error && ( newStatus == DKRecordingStatusIdle ) )
        {
            delegateCallbackBlock = ^{ [_delegate capturePipeline:self recordingDidFailWithError:error]; };
        }
        else
        {
            if ( ( oldStatus == DKRecordingStatusStartingRecording ) && ( newStatus == DKRecordingStatusRecording ) ) {
                delegateCallbackBlock = ^{ [_delegate capturePipelineRecordingDidStart:self]; };
            }
            else if ( ( oldStatus == DKRecordingStatusRecording ) && ( newStatus == DKRecordingStatusStoppingRecording ) ) {
                delegateCallbackBlock = ^{ [_delegate capturePipelineRecordingWillStop:self]; };
            }
            else if ( ( oldStatus == DKRecordingStatusStoppingRecording ) && ( newStatus == DKRecordingStatusIdle ) ) {
                delegateCallbackBlock = ^{ [_delegate capturePipelineRecordingDidStop:self]; };
            }
        }
        
        if ( delegateCallbackBlock )
        {
            [self invokeDelegateCallbackAsync:delegateCallbackBlock];
        }
    }
}


#pragma mark MovieRecorder Delegate
- (void)movieRecorderDidFinishPreparing:(MovieRecorder *)recorder
{
    @synchronized( self )
    {
        if ( _recordingStatus != DKRecordingStatusStartingRecording ) {
            @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:@"Expected to be in StartingRecording state" userInfo:nil];
            return;
        }
        
        [self transitionToRecordingStatus:DKRecordingStatusRecording error:nil];
    }
}

- (void)movieRecorder:(MovieRecorder *)recorder didFailWithError:(NSError *)error
{
    @synchronized( self )
    {
        _recorder = nil;
        [self transitionToRecordingStatus:DKRecordingStatusIdle error:error];
    }
}

- (void)movieRecorderDidFinishRecording:(MovieRecorder *)recorder
{
    @synchronized( self )
    {
        if ( _recordingStatus != DKRecordingStatusStoppingRecording ) {
            @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:@"Expected to be in StoppingRecording state" userInfo:nil];
            return;
        }
        
        // No state transition, we are still in the process of stopping.
        // We will be stopped once we save to the assets library.
    }
    
    _recorder = nil;
    
    @synchronized( self )
    {
        if ( _recordingStatus != DKRecordingStatusStoppingRecording ) {
            @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:@"Expected to be in StoppingRecording state" userInfo:nil];
            return;
        }
        [self transitionToRecordingStatus:DKRecordingStatusIdle error:nil];
    }
    //    ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
    //
    //    [library writeVideoAtPathToSavedPhotosAlbum:_recordingURL completionBlock:^(NSURL *assetURL, NSError *error) {
    //
    //        [[NSFileManager defaultManager] removeItemAtURL:_recordingURL error:NULL];
    //
    //        @synchronized( self )
    //        {
    //            if ( _recordingStatus != DKRecordingStatusStoppingRecording ) {
    //                @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:@"Expected to be in StoppingRecording state" userInfo:nil];
    //                return;
    //            }
    //            [self transitionToRecordingStatus:DKRecordingStatusIdle error:error];
    //        }
    //    }];
}


#pragma mark Utilities
- (void)invokeDelegateCallbackAsync:(dispatch_block_t)callbackBlock
{
    dispatch_async( _delegateCallbackQueue, ^{
        @autoreleasepool {
            callbackBlock();
        }
    } );
}


// Auto mirroring: Front camera is mirrored; back camera isn't
- (CGAffineTransform)transformFromVideoBufferOrientationToOrientation:(AVCaptureVideoOrientation)orientation withAutoMirroring:(BOOL)mirror
{
    CGAffineTransform transform = CGAffineTransformIdentity;
    
    // Calculate offsets from an arbitrary reference orientation (portrait)
    CGFloat orientationAngleOffset = angleOffsetFromPortraitOrientationToOrientation( orientation );
    CGFloat videoOrientationAngleOffset = angleOffsetFromPortraitOrientationToOrientation( _videoBufferOrientation );
    
    // Find the difference in angle between the desired orientation and the video orientation
    CGFloat angleOffset = orientationAngleOffset - videoOrientationAngleOffset;
    transform = CGAffineTransformMakeRotation( angleOffset );
    
    if ( _videoDevice.position == AVCaptureDevicePositionFront )
    {
        if ( mirror ) {
            transform = CGAffineTransformScale( transform, -1, 1 );
        }
        else {
            if ( UIInterfaceOrientationIsPortrait( (UIInterfaceOrientation)orientation ) ) {
                transform = CGAffineTransformRotate( transform, M_PI );
            }
        }
    }
    
    return transform;
}


static CGFloat angleOffsetFromPortraitOrientationToOrientation(AVCaptureVideoOrientation orientation)
{
    CGFloat angle = 0.0;
    
    switch ( orientation )
    {
        case AVCaptureVideoOrientationPortrait:
            angle = 0.0;
            break;
        case AVCaptureVideoOrientationPortraitUpsideDown:
            angle = M_PI;
            break;
        case AVCaptureVideoOrientationLandscapeRight:
            angle = -M_PI_2;
            break;
        case AVCaptureVideoOrientationLandscapeLeft:
            angle = M_PI_2;
            break;
        default:
            break;
    }
    
    return angle;
}

- (void)calculateFramerateAtTimestamp:(CMTime)timestamp
{
    [_previousSecondTimestamps addObject:[NSValue valueWithCMTime:timestamp]];
    
    CMTime oneSecond = CMTimeMake( 1, 1 );
    CMTime oneSecondAgo = CMTimeSubtract( timestamp, oneSecond );
    // 比较时间戳
    while( CMTIME_COMPARE_INLINE( [_previousSecondTimestamps[0] CMTimeValue], <, oneSecondAgo ) ) {
        [_previousSecondTimestamps removeObjectAtIndex:0];
    }
    
    if ( [_previousSecondTimestamps count] > 1 )
    {
        const Float64 duration = CMTimeGetSeconds( CMTimeSubtract( [[_previousSecondTimestamps lastObject] CMTimeValue], [_previousSecondTimestamps[0] CMTimeValue] ) );
        const float newRate = (float)( [_previousSecondTimestamps count] - 1 ) / duration;
        self.videoFrameRate = newRate;
    }
}

- (NSInteger)getCurrentTempedVideoCount
{
    // TODO
    NSError *error;
    NSArray *tempArray = [[NSFileManager defaultManager] contentsOfDirectoryAtPath:NSTemporaryDirectory() error:&error];
    return tempArray.count;
}

- (NSURL *)videoURLWithString:(NSString *)urlString
{
      return [[NSURL alloc] initFileURLWithPath:[NSString pathWithComponents:@[NSTemporaryDirectory(), urlString]]];
}

+ (UIImage *)thumbnailImageForVideo:(NSURL *)videoURL atTime:(NSTimeInterval)time
{
    AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:videoURL options:nil];
    NSParameterAssert(asset);
    AVAssetImageGenerator *assetImageGenerator = [[AVAssetImageGenerator alloc] initWithAsset:asset];
    assetImageGenerator.appliesPreferredTrackTransform = YES;
    assetImageGenerator.apertureMode = AVAssetImageGeneratorApertureModeEncodedPixels;
    
    CGImageRef thumbnailImageRef = NULL;
    CFTimeInterval thumbnailImageTime = time;
    NSError *thumbnailImageGenerationError = nil;
    thumbnailImageRef = [assetImageGenerator copyCGImageAtTime:CMTimeMake(thumbnailImageTime, 60) actualTime:NULL error:&thumbnailImageGenerationError];
    
    if (!thumbnailImageRef) NSLog(@"thumbnailImageGenerationError %@", thumbnailImageGenerationError);
    
    UIImage *thumbnailImage = thumbnailImageRef ? [[UIImage alloc] initWithCGImage:thumbnailImageRef] : nil;

    return thumbnailImage;
}

@end
