//
//  LYCameraManager.m
//  CustomCamera
//
//  Created by SuJiang on 2018/4/2.
//  Copyright © 2018年 appscomm. All rights reserved.
//

#import "LYCameraManager.h"
#import "MotionOrientation.h"
#import <AssetsLibrary/AssetsLibrary.h>

static void * SessionRunningContext = &SessionRunningContext;
static void * FocusModeContext = &FocusModeContext;
static void * ExposureModeContext = &ExposureModeContext;
static void * WhiteBalanceModeContext = &WhiteBalanceModeContext;
static void * LensPositionContext = &LensPositionContext;
static void * ExposureDurationContext = &ExposureDurationContext;
static void * ISOContext = &ISOContext;
static void * ExposureTargetBiasContext = &ExposureTargetBiasContext;
static void * ExposureTargetOffsetContext = &ExposureTargetOffsetContext;
static void * DeviceWhiteBalanceGainsContext = &DeviceWhiteBalanceGainsContext;

@interface LYCameraManager()<AVCaptureVideoDataOutputSampleBufferDelegate>

// Session management
@property (nonatomic) dispatch_queue_t sessionQueue;
@property (nonatomic) dispatch_queue_t captureQueue;
@property (nonatomic, strong) AVCaptureSession *session;
@property (nonatomic, strong) AVCaptureDeviceInput *videoDeviceInput;
@property (nonatomic, strong) AVCaptureDevice *videoDevice;
@property (nonatomic, strong) AVCaptureVideoDataOutput *videoOutput;
@property (nonatomic, strong) AVCaptureStillImageOutput *photoOutput;
@property (nonatomic,strong) AVCaptureConnection *connection;

/**
 *  记录屏幕的旋转方向
 */
@property (nonatomic,assign) UIDeviceOrientation deviceOrientation;

// Utilities
@property (nonatomic, assign) LYCameraSetupResult setupResult;
@property (nonatomic, getter=isSessionRunning, assign) BOOL sessionRunning;
@property (nonatomic, assign) UIBackgroundTaskIdentifier backgroundRecordingID;

@end


@implementation LYCameraManager

- (instancetype) initWithMode:(LYCameraCaptureMode)mode
{
    self = [self init];
    [self switchToMode:mode];
    return self;
}

- (instancetype)init
{
    self = [super init];
    if (self) {
        
        UITapGestureRecognizer *tap = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(focusAndExposeTap:)];
        [self.previewView addGestureRecognizer:tap];
        
        // Create the AVCaptureSession
        self.session = [[AVCaptureSession alloc] init];
        self.previewView.session = self.session;
        self.sessionQueue = dispatch_queue_create("session queue", DISPATCH_QUEUE_SERIAL);
        self.captureQueue = dispatch_queue_create("capture queuee", NULL);
        self.setupResult = LYCameraSetupResultSuccess;
        
        // Check video authorization status. Video access is required and audio access is optional.
        // If audio access is denied, audio is not recorded during movie recording.
        switch ([AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo]) {
            case AVAuthorizationStatusAuthorized:
            {
                // The user has previously granted access to the camera
            }
                break;
                
            case AVAuthorizationStatusNotDetermined:
            {
                // The user has not yet been presented with the option to grant video access.
                // We suspend the session queue to delay session running until the access request has completed.
                // Note that audio access will be implicitly requested when we create an AVCaptureDeviceInput for audio during session setup.
                dispatch_suspend( self.sessionQueue );
                [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^( BOOL granted ) {
                    if ( ! granted ) {
                        self.setupResult = LYCameraSetupResultCameraNotAuthorized;
                    }
                    dispatch_resume( self.sessionQueue );
                }];
                break;
            }
            default:
            {
                // The user has previously denied access
                self.setupResult = LYCameraSetupResultCameraNotAuthorized;
                break;
            }
        }
        
        // Setup the capture session.
        // In general it is not safe to mutate an AVCaptureSession or any of its inputs, outputs, or connections from multiple threads at the same time.
        // Why not do all of this on the main queue?
        // Because -[AVCaptureSession startRunning] is a blocking call which can take a long time. We dispatch session setup to the sessionQueue
        // so that the main queue isn't blocked, which keeps the UI responsive.
        dispatch_async( self.sessionQueue, ^{
            [self configureSession];
        });
    }
    return self;
}


- (void) startSessionRunning
{
    dispatch_async(self.sessionQueue, ^{
        
        switch (self.setupResult)
        {
            case LYCameraSetupResultSuccess:
            {
                [self addObservers];
                [self.session startRunning];
                self.sessionRunning = self.session.isRunning;
                break;
            }
            case LYCameraSetupResultCameraNotAuthorized:
            {
                dispatch_async( dispatch_get_main_queue(), ^{
                    if ([self.delegate respondsToSelector:@selector(cameraSessionSetupError:)]) {
                        [self.delegate cameraSessionSetupError:LYCameraSetupResultCameraNotAuthorized];
                    }
                });
                break;
            }
            case LYCameraSetupResultSessionConfigurationFailed:
            {
                dispatch_async( dispatch_get_main_queue(), ^{
                    if ([self.delegate respondsToSelector:@selector(cameraSessionSetupError:)]) {
                        [self.delegate cameraSessionSetupError:LYCameraSetupResultSessionConfigurationFailed];
                    }
                } );
                break;
            }
        }
        
    });
}

- (void) stopSessionRunning
{
    dispatch_async( self.sessionQueue, ^{
        if (self.setupResult == LYCameraSetupResultSuccess )
        {
            [self.session stopRunning];
            [self removeObservers];
        }
    });
}


#pragma mark - session management
- (void) configureSession
{
    if (self.setupResult != LYCameraSetupResultSuccess)
    {
        return;
    }
    
    NSError *error = nil;
    
    [self.session beginConfiguration];
    
    self.session.sessionPreset = AVCaptureSessionPresetPhoto;
    
    // Add Video input
    AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
    if (!videoDeviceInput)
    {
        NSLog( @"Could not create video device input: %@", error);
        self.setupResult = LYCameraSetupResultSessionConfigurationFailed;
        [self.session commitConfiguration];
        return;
    }
    
    
    if ( [self.session canAddInput:videoDeviceInput] )
    {
        [self.session addInput:videoDeviceInput];
        self.videoDeviceInput = videoDeviceInput;
        self.videoDevice = videoDevice;
        [self switchFocusMode:AVCaptureFocusModeContinuousAutoFocus];
        
        dispatch_async( dispatch_get_main_queue(), ^{
            /*
             Why are we dispatching this to the main queue?
             Because AVCaptureVideoPreviewLayer is the backing layer for AVCamManualPreviewView and UIView
             can only be manipulated on the main thread.
             Note: As an exception to the above rule, it is not necessary to serialize video orientation changes
             on the AVCaptureVideoPreviewLayer’s connection with other session manipulation.
             
             Use the status bar orientation as the initial video orientation. Subsequent orientation changes are
             handled by -[AVCamManualCameraViewController viewWillTransitionToSize:withTransitionCoordinator:].
             */
            UIInterfaceOrientation statusBarOrientation = [UIApplication sharedApplication].statusBarOrientation;
            AVCaptureVideoOrientation initialVideoOrientation = AVCaptureVideoOrientationPortrait;
            if ( statusBarOrientation != UIInterfaceOrientationUnknown ) {
                initialVideoOrientation = (AVCaptureVideoOrientation)statusBarOrientation;
            }
            
            AVCaptureVideoPreviewLayer *previewLayer = (AVCaptureVideoPreviewLayer *)self.previewView.layer;
            previewLayer.connection.videoOrientation = initialVideoOrientation;
        });
    }
    else {
        NSLog( @"Could not add video device input to the session" );
        self.setupResult = LYCameraSetupResultSessionConfigurationFailed;
        [self.session commitConfiguration];
        return;
    }
    
    
    // Add audio input
    AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
    AVCaptureDeviceInput *audioDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:&error];
    if ( ! audioDeviceInput ) {
        NSLog( @"Could not create audio device input: %@", error );
    }
    if ( [self.session canAddInput:audioDeviceInput] ) {
        [self.session addInput:audioDeviceInput];
    }
    else {
        NSLog( @"Could not add audio device input to the session" );
    }
    
    
    // Add photo output
    self.photoOutput = [[AVCaptureStillImageOutput alloc] init];
    NSDictionary *outputSettings = @{AVVideoCodecKey: AVVideoCodecJPEG};
    [self.photoOutput setOutputSettings:outputSettings];
    if ([self.session canAddOutput:self.photoOutput])
    {
        [self.session addOutput:self.photoOutput];
    }
    else
    {
        NSLog( @"Could not add photo output to the session" );
        self.setupResult = LYCameraSetupResultSessionConfigurationFailed;
        [self.session commitConfiguration];
        return;
    }
    
    // We will not create an AVCaptureMovieFileOutput when configuring the session because the AVCaptureMovieFileOutput does not support movie recording with AVCaptureSessionPresetPhoto
    self.backgroundRecordingID = UIBackgroundTaskInvalid;
    
    [self.session commitConfiguration];
    
    dispatch_async( dispatch_get_main_queue(), ^{
        //        [self configureManualHUD];
    } );
    
}

- (void) resumeInterruptedSession
{
    dispatch_async( self.sessionQueue, ^{
        // The session might fail to start running, e.g. if a phone or FaceTime call is still using audio or video.
        // A failure to start the session will be communicated via a session runtime error notification.
        // To avoid repeatedly failing to start the session running, we only try to restart the session in the
        // session runtime error handler if we aren't trying to resume the session running.
        [self.session startRunning];
        self.sessionRunning = self.session.isRunning;
        if (!self.session.isRunning) {
            dispatch_async( dispatch_get_main_queue(), ^{
                if ([self.delegate respondsToSelector:@selector(cameraSessionResumeFailed)]) {
                    [self.delegate cameraSessionResumeFailed];
                }
            });
        }
        else {
            NSLog(@"resume successfully!");
        }
    });
}

- (void) switchToMode:(LYCameraCaptureMode)mode
{
    
    if (mode == LYCameraCaptureModePhoto)
    {
        dispatch_async(self.sessionQueue, ^{
            // Remove the AVCaptureMovieFileOutput from the session because movie recording is not supported with AVCaptureSessionPresetPhoto. Additionally, Live Photo
            // capture is not supported when an AVCaptureMovieFileOutput is connected to the session.
            [self.session beginConfiguration];
            [self.session removeOutput:self.videoOutput];
            self.session.sessionPreset = AVCaptureSessionPresetPhoto;
            [self.session commitConfiguration];
            
            self.videoOutput = nil;
        });
    }
    else if(mode == LYCameraCaptureModeVideo)
    {
        dispatch_async( self.sessionQueue, ^{
            AVCaptureVideoDataOutput *videoOutput = [[AVCaptureVideoDataOutput alloc] init];
            [videoOutput setVideoSettings:@{
                                        (NSString *)kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]
                                        }];
            [videoOutput setAlwaysDiscardsLateVideoFrames:YES];
            [videoOutput setSampleBufferDelegate:self queue:_captureQueue];
            
            if ( [self.session canAddOutput:videoOutput] ) {
                [self.session beginConfiguration];
                [self.session addOutput:videoOutput];
                self.session.sessionPreset = AVCaptureSessionPresetHigh;
                AVCaptureConnection *connection = [videoOutput connectionWithMediaType:AVMediaTypeVideo];
                if ( connection.isVideoStabilizationSupported ) {
                    connection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto;
                }
                [self.session commitConfiguration];
                
                self.videoOutput = videoOutput;
                
                dispatch_async( dispatch_get_main_queue(), ^{
                    NSLog(@"视频模式成功");
                });
            }
        });
    }
}


// 对焦
- (void) switchFocusMode:(AVCaptureFocusMode)mode
{
    NSError *error = nil;
    if ( [self.videoDevice lockForConfiguration:&error] ) {
        if ( [self.videoDevice isFocusModeSupported:mode] ) {
            self.videoDevice.focusMode = mode;
        }
        else {
            NSLog( @"Focus mode %@ is not supported. Focus mode is %@.", [self stringFromFocusMode:mode], [self stringFromFocusMode:self.videoDevice.focusMode] );
        }
        [self.videoDevice unlockForConfiguration];
    }
    else {
        NSLog( @"Could not lock device for configuration: %@", error );
    }
}

- (void) setFocusLensPosition:(CGFloat)position
{
    if (self.videoDevice.focusMode == AVCaptureFocusModeLocked)
    {
        NSError *error = nil;
        if ( [self.videoDevice lockForConfiguration:&error] ) {
            [self.videoDevice setFocusModeLockedWithLensPosition:position completionHandler:nil];
            [self.videoDevice unlockForConfiguration];
        }
        else {
            NSLog( @"Could not lock device for configuration: %@", error );
        }
    }
    else
    {
        NSLog(@"非锁定对焦模式下，不可以设置这个值！");
    }
}

- (void) focusAndExposeTap:(UIGestureRecognizer *)gestureRecognizer
{
    CGPoint devicePoint = [(AVCaptureVideoPreviewLayer *)self.previewView.layer captureDevicePointOfInterestForPoint:[gestureRecognizer locationInView:[gestureRecognizer view]]];
    [self focusWithMode:self.videoDevice.focusMode exposeWithMode:self.videoDevice.exposureMode atDevicePoint:devicePoint monitorSubjectAreaChange:YES];
}


- (void)focusWithMode:(AVCaptureFocusMode)focusMode exposeWithMode:(AVCaptureExposureMode)exposureMode atDevicePoint:(CGPoint)point monitorSubjectAreaChange:(BOOL)monitorSubjectAreaChange
{
    dispatch_async( self.sessionQueue, ^{
        AVCaptureDevice *device = self.videoDevice;
        
        NSError *error = nil;
        if ( [device lockForConfiguration:&error] ) {
            // Setting (focus/exposure)PointOfInterest alone does not initiate a (focus/exposure) operation
            // Call -set(Focus/Exposure)Mode: to apply the new point of interest
            if ( focusMode != AVCaptureFocusModeLocked && device.isFocusPointOfInterestSupported && [device isFocusModeSupported:focusMode] ) {
                device.focusPointOfInterest = point;
                device.focusMode = focusMode;
            }
            
            if ( exposureMode != AVCaptureExposureModeCustom && device.isExposurePointOfInterestSupported && [device isExposureModeSupported:exposureMode] ) {
                device.exposurePointOfInterest = point;
                device.exposureMode = exposureMode;
            }
            
            device.subjectAreaChangeMonitoringEnabled = monitorSubjectAreaChange;
            [device unlockForConfiguration];
        }
        else {
            NSLog( @"Could not lock device for configuration: %@", error );
        }
    } );
}

#pragma mark - 曝光
- (void) setExposureTargetBias:(CGFloat)bias
{
    NSError *error = nil;
    if ( [self.videoDevice lockForConfiguration:&error] ) {
        [self.videoDevice setExposureTargetBias:bias completionHandler:nil];
        [self.videoDevice unlockForConfiguration];
    }
    else {
        NSLog( @"Could not lock device for configuration: %@", error);
    }
}


#pragma mark - 通知处理方法
- (void)subjectAreaDidChange:(NSNotification *)notification
{
    CGPoint devicePoint = CGPointMake( 0.5, 0.5 );
    [self focusWithMode:self.videoDevice.focusMode exposeWithMode:self.videoDevice.exposureMode atDevicePoint:devicePoint monitorSubjectAreaChange:NO];
}

- (void)sessionRuntimeError:(NSNotification *)notification
{
    NSError *error = notification.userInfo[AVCaptureSessionErrorKey];
    NSLog( @"Capture session runtime error: %@", error );
    
    if ( error.code == AVErrorMediaServicesWereReset ) {
        dispatch_async( self.sessionQueue, ^{
            // If we aren't trying to resume the session, try to restart it, since it must have been stopped due to an error (see -[resumeInterruptedSession:])
            if ( self.isSessionRunning ) {
                [self.session startRunning];
                self.sessionRunning = self.session.isRunning;
            }
        } );
    }
}


- (void)sessionWasInterrupted:(NSNotification *)notification
{
    
}


- (void)sessionInterruptionEnded:(NSNotification *)notification
{
    NSLog( @"Capture session interruption ended" );
}

#pragma mark -图片方向
- (AVCaptureVideoOrientation)avOrientationForDeviceOrientation:(UIDeviceOrientation)deviceOrientation
{
    AVCaptureVideoOrientation result = (AVCaptureVideoOrientation)deviceOrientation;
    if ( deviceOrientation == UIDeviceOrientationLandscapeLeft )
        result = AVCaptureVideoOrientationLandscapeRight;
    else if ( deviceOrientation == UIDeviceOrientationLandscapeRight )
        result = AVCaptureVideoOrientationLandscapeLeft;
    return result;
}

#pragma mark -加速计通知,监听手机方向
- (void)motionDeviceOrientationChanged:(NSNotification *)notification
{
    dispatch_async(dispatch_get_main_queue(), ^{
        
        self.deviceOrientation = [MotionOrientation sharedInstance].deviceOrientation;
    });
}


#pragma mark KVO and Notifications

- (void)addObservers
{
    [self addObserver:self forKeyPath:@"session.running" options:NSKeyValueObservingOptionNew context:SessionRunningContext];
    [self addObserver:self forKeyPath:@"videoDevice.focusMode" options:(NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew) context:FocusModeContext];
    [self addObserver:self forKeyPath:@"videoDevice.lensPosition" options:NSKeyValueObservingOptionNew context:LensPositionContext];
    [self addObserver:self forKeyPath:@"videoDevice.exposureMode" options:(NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew) context:ExposureModeContext];
    [self addObserver:self forKeyPath:@"videoDevice.exposureDuration" options:NSKeyValueObservingOptionNew context:ExposureDurationContext];
    [self addObserver:self forKeyPath:@"videoDevice.ISO" options:NSKeyValueObservingOptionNew context:ISOContext];
    [self addObserver:self forKeyPath:@"videoDevice.exposureTargetBias" options:NSKeyValueObservingOptionNew context:ExposureTargetBiasContext];
    [self addObserver:self forKeyPath:@"videoDevice.exposureTargetOffset" options:NSKeyValueObservingOptionNew context:ExposureTargetOffsetContext];
    [self addObserver:self forKeyPath:@"videoDevice.whiteBalanceMode" options:(NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew) context:WhiteBalanceModeContext];
    [self addObserver:self forKeyPath:@"videoDevice.deviceWhiteBalanceGains" options:NSKeyValueObservingOptionNew context:DeviceWhiteBalanceGainsContext];
    
    [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:self.videoDevice];
    [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(sessionRuntimeError:) name:AVCaptureSessionRuntimeErrorNotification object:self.session];
    // A session can only run when the app is full screen. It will be interrupted in a multi-app layout, introduced in iOS 9,
    // see also the documentation of AVCaptureSessionInterruptionReason. Add observers to handle these session interruptions
    // and show a preview is paused message. See the documentation of AVCaptureSessionWasInterruptedNotification for other
    // interruption reasons.
    [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(sessionWasInterrupted:) name:AVCaptureSessionWasInterruptedNotification object:self.session];
    [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(sessionInterruptionEnded:) name:AVCaptureSessionInterruptionEndedNotification object:self.session];
    [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(motionDeviceOrientationChanged:) name:MotionOrientationChangedNotification object:nil];
}

- (void)removeObservers
{
    [[NSNotificationCenter defaultCenter] removeObserver:self];
    
    [self removeObserver:self forKeyPath:@"session.running" context:SessionRunningContext];
    [self removeObserver:self forKeyPath:@"videoDevice.focusMode" context:FocusModeContext];
    [self removeObserver:self forKeyPath:@"videoDevice.lensPosition" context:LensPositionContext];
    [self removeObserver:self forKeyPath:@"videoDevice.exposureMode" context:ExposureModeContext];
    [self removeObserver:self forKeyPath:@"videoDevice.exposureDuration" context:ExposureDurationContext];
    [self removeObserver:self forKeyPath:@"videoDevice.ISO" context:ISOContext];
    [self removeObserver:self forKeyPath:@"videoDevice.exposureTargetBias" context:ExposureTargetBiasContext];
    [self removeObserver:self forKeyPath:@"videoDevice.exposureTargetOffset" context:ExposureTargetOffsetContext];
    [self removeObserver:self forKeyPath:@"videoDevice.whiteBalanceMode" context:WhiteBalanceModeContext];
    [self removeObserver:self forKeyPath:@"videoDevice.deviceWhiteBalanceGains" context:DeviceWhiteBalanceGainsContext];
}

- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
{
    id oldValue = change[NSKeyValueChangeOldKey];
    id newValue = change[NSKeyValueChangeNewKey];
    
    if ( context == FocusModeContext ) {
        if ( newValue && newValue != [NSNull null] ) {
            AVCaptureFocusMode newMode = [newValue intValue];
            dispatch_async( dispatch_get_main_queue(), ^{
                if ( oldValue && oldValue != [NSNull null] ) {
                    AVCaptureFocusMode oldMode = [oldValue intValue];
                    NSLog( @"focus mode: %@ -> %@", [self stringFromFocusMode:oldMode], [self stringFromFocusMode:newMode] );
                }
                else {
                    NSLog( @"focus mode: %@", [self stringFromFocusMode:newMode] );
                }
            } );
        }
    }
    else if ( context == LensPositionContext ) {
//        if ( newValue && newValue != [NSNull null] ) {
//            AVCaptureFocusMode focusMode = self.videoDevice.focusMode;
//            float newLensPosition = [newValue floatValue];
//            dispatch_async( dispatch_get_main_queue(), ^{
//                NSLog(@"lensPositionValue: %.1f", newLensPosition);
//            });
//        }
    }
    else if ( context == ExposureModeContext ) {
        if ( newValue && newValue != [NSNull null] ) {
            AVCaptureExposureMode newMode = [newValue intValue];
            if ( oldValue && oldValue != [NSNull null] ) {
                AVCaptureExposureMode oldMode = [oldValue intValue];
                /*
                 It’s important to understand the relationship between exposureDuration and the minimum frame rate as represented by activeVideoMaxFrameDuration.
                 In manual mode, if exposureDuration is set to a value that's greater than activeVideoMaxFrameDuration, then activeVideoMaxFrameDuration will
                 increase to match it, thus lowering the minimum frame rate. If exposureMode is then changed to automatic mode, the minimum frame rate will
                 remain lower than its default. If this is not the desired behavior, the min and max frameRates can be reset to their default values for the
                 current activeFormat by setting activeVideoMaxFrameDuration and activeVideoMinFrameDuration to kCMTimeInvalid.
                 */
                if ( oldMode != newMode && oldMode == AVCaptureExposureModeCustom ) {
                    NSError *error = nil;
                    if ( [self.videoDevice lockForConfiguration:&error] ) {
                        self.videoDevice.activeVideoMaxFrameDuration = kCMTimeInvalid;
                        self.videoDevice.activeVideoMinFrameDuration = kCMTimeInvalid;
                        [self.videoDevice unlockForConfiguration];
                    }
                    else {
                        NSLog( @"Could not lock device for configuration: %@", error );
                    }
                }
            }
            dispatch_async( dispatch_get_main_queue(), ^{
                
                //                self.exposureModeControl.selectedSegmentIndex = [self.exposureModes indexOfObject:@(newMode)];
                //                self.exposureDurationSlider.enabled = ( newMode == AVCaptureExposureModeCustom );
                //                self.ISOSlider.enabled = ( newMode == AVCaptureExposureModeCustom );
                
                if ( oldValue && oldValue != [NSNull null] ) {
                    AVCaptureExposureMode oldMode = [oldValue intValue];
                    NSLog( @"exposure mode: %@ -> %@", [self stringFromExposureMode:oldMode], [self stringFromExposureMode:newMode] );
                }
                else {
                    NSLog( @"exposure mode: %@", [self stringFromExposureMode:newMode] );
                }
            } );
        }
    }
    else if ( context == ExposureDurationContext ) {
//        if ( newValue && newValue != [NSNull null] ) {
//            double newDurationSeconds = CMTimeGetSeconds( [newValue CMTimeValue] );
//            AVCaptureExposureMode exposureMode = self.videoDevice.exposureMode;
//
//            double minDurationSeconds = MAX( CMTimeGetSeconds( self.videoDevice.activeFormat.minExposureDuration ), kExposureMinimumDuration );
//            double maxDurationSeconds = CMTimeGetSeconds( self.videoDevice.activeFormat.maxExposureDuration );
//            // Map from duration to non-linear UI range 0-1
//            double p = ( newDurationSeconds - minDurationSeconds ) / ( maxDurationSeconds - minDurationSeconds ); // Scale to 0-1
//            dispatch_async( dispatch_get_main_queue(), ^{
//                if ( exposureMode != AVCaptureExposureModeCustom ) {
//                    self.exposureDurationSlider.value = pow( p, 1 / kExposureDurationPower ); // Apply inverse power
//                }
//                if ( newDurationSeconds < 1 ) {
//                    int digits = MAX( 0, 2 + floor( log10( newDurationSeconds ) ) );
//                    self.exposureDurationValueLabel.text = [NSString stringWithFormat:@"1/%.*f", digits, 1/newDurationSeconds];
//                }
//                else {
//                    self.exposureDurationValueLabel.text = [NSString stringWithFormat:@"%.2f", newDurationSeconds];
//                }
//            } );
//        }
    }
    else if ( context == ISOContext ) {
//        if ( newValue && newValue != [NSNull null] ) {
//            float newISO = [newValue floatValue];
//            AVCaptureExposureMode exposureMode = self.videoDevice.exposureMode;
//
//            dispatch_async( dispatch_get_main_queue(), ^{
//                if ( exposureMode != AVCaptureExposureModeCustom ) {
//                    self.ISOSlider.value = newISO;
//                }
//                self.ISOValueLabel.text = [NSString stringWithFormat:@"%i", (int)newISO];
//            } );
//        }
    }
    else if ( context == ExposureTargetBiasContext ) {
//        if ( newValue && newValue != [NSNull null] ) {
//            float newExposureTargetBias = [newValue floatValue];
//            dispatch_async( dispatch_get_main_queue(), ^{
//                self.exposureTargetBiasValueLabel.text = [NSString stringWithFormat:@"%.1f", newExposureTargetBias];
//            });
//        }
    }
    else if ( context == ExposureTargetOffsetContext ) {
//        if ( newValue && newValue != [NSNull null] ) {
//            float newExposureTargetOffset = [newValue floatValue];
//            dispatch_async( dispatch_get_main_queue(), ^{
//                //                self.exposureTargetOffsetSlider.value = newExposureTargetOffset;
//                //                self.exposureTargetOffsetValueLabel.text = [NSString stringWithFormat:@"%.1f", newExposureTargetOffset];
//            } );
//        }
    }
    else if ( context == WhiteBalanceModeContext ) {
//        if ( newValue && newValue != [NSNull null] ) {
//            AVCaptureWhiteBalanceMode newMode = [newValue intValue];
//            dispatch_async( dispatch_get_main_queue(), ^{
//                //                self.whiteBalanceModeControl.selectedSegmentIndex = [self.whiteBalanceModes indexOfObject:@(newMode)];
//                //                self.temperatureSlider.enabled = ( newMode == AVCaptureWhiteBalanceModeLocked );
//                //                self.tintSlider.enabled = ( newMode == AVCaptureWhiteBalanceModeLocked );
//
//                if ( oldValue && oldValue != [NSNull null] ) {
//                    AVCaptureWhiteBalanceMode oldMode = [oldValue intValue];
//                    NSLog( @"white balance mode: %@ -> %@", [self stringFromWhiteBalanceMode:oldMode], [self stringFromWhiteBalanceMode:newMode] );
//                }
//            });
//        }
    }
    else if ( context == DeviceWhiteBalanceGainsContext ) {
//        if ( newValue && newValue != [NSNull null] ) {
//            AVCaptureWhiteBalanceGains newGains;
//            [newValue getValue:&newGains];
//            AVCaptureWhiteBalanceTemperatureAndTintValues newTemperatureAndTint = [self.videoDevice temperatureAndTintValuesForDeviceWhiteBalanceGains:newGains];
//            AVCaptureWhiteBalanceMode whiteBalanceMode = self.videoDevice.whiteBalanceMode;
//            dispatch_async( dispatch_get_main_queue(), ^{
//                if ( whiteBalanceMode != AVCaptureExposureModeLocked ) {
//                    //                    self.temperatureSlider.value = newTemperatureAndTint.temperature;
//                    //                    self.tintSlider.value = newTemperatureAndTint.tint;
//                }
//
//                //                self.temperatureValueLabel.text = [NSString stringWithFormat:@"%i", (int)newTemperatureAndTint.temperature];
//                //                self.tintValueLabel.text = [NSString stringWithFormat:@"%i", (int)newTemperatureAndTint.tint];
//            } );
//        }
    }
    else if ( context == SessionRunningContext ) {
        BOOL isRunning = NO;
        if ( newValue && newValue != [NSNull null] ) {
            isRunning = [newValue boolValue];
        }
        dispatch_async( dispatch_get_main_queue(), ^{
            //            self.cameraButton.enabled = isRunning && ( self.videoDeviceDiscoverySession.devices.count > 1 );
            //            self.recordButton.enabled = isRunning && ( self.captureModeControl.selectedSegmentIndex == AVCamManualCaptureModeMovie );
            //            self.photoButton.enabled = isRunning;
            //            self.HUDButton.enabled = isRunning;
            //            self.captureModeControl.enabled = isRunning;
        } );
    }
    else {
        [super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
    }
}

#pragma mark Utilities

- (NSString *)stringFromFocusMode:(AVCaptureFocusMode)focusMode
{
    NSString *string = @"INVALID FOCUS MODE";
    
    if ( focusMode == AVCaptureFocusModeLocked ) {
        string = @"Locked";
    }
    else if ( focusMode == AVCaptureFocusModeAutoFocus ) {
        string = @"Auto";
    }
    else if ( focusMode == AVCaptureFocusModeContinuousAutoFocus ) {
        string = @"ContinuousAuto";
    }
    
    return string;
}

- (NSString *)stringFromExposureMode:(AVCaptureExposureMode)exposureMode
{
    NSString *string = @"INVALID EXPOSURE MODE";
    
    if ( exposureMode == AVCaptureExposureModeLocked ) {
        string = @"Locked";
    }
    else if ( exposureMode == AVCaptureExposureModeAutoExpose ) {
        string = @"Auto";
    }
    else if ( exposureMode == AVCaptureExposureModeContinuousAutoExposure ) {
        string = @"ContinuousAuto";
    }
    else if ( exposureMode == AVCaptureExposureModeCustom ) {
        string = @"Custom";
    }
    
    return string;
}

- (NSString *)stringFromWhiteBalanceMode:(AVCaptureWhiteBalanceMode)whiteBalanceMode
{
    NSString *string = @"INVALID WHITE BALANCE MODE";
    
    if ( whiteBalanceMode == AVCaptureWhiteBalanceModeLocked ) {
        string = @"Locked";
    }
    else if ( whiteBalanceMode == AVCaptureWhiteBalanceModeAutoWhiteBalance ) {
        string = @"Auto";
    }
    else if ( whiteBalanceMode == AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance ) {
        string = @"ContinuousAuto";
    }
    
    return string;
}




#pragma mark - getter & setter
- (LYCameraPreviewView *) previewView
{
    if (!_previewView) {
        _previewView = [[LYCameraPreviewView alloc] initWithFrame:CGRectZero];
    }
    return _previewView;
}

#pragma mark - 动作
- (void) captureWithCallback:(void (^)(UIImage *image))handler
{
    self.connection = [self.photoOutput connectionWithMediaType:AVMediaTypeVideo];
    
    //获取输出视图的展示方向
    AVCaptureVideoOrientation avcaptureOrientation = [self avOrientationForDeviceOrientation: self.deviceOrientation];
    [self.connection setVideoOrientation:avcaptureOrientation];
    
    [self.photoOutput captureStillImageAsynchronouslyFromConnection:self.connection completionHandler:^(CMSampleBufferRef  _Nullable imageDataSampleBuffer, NSError * _Nullable error) {
        NSData *jpegData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
        //原图
        UIImage *image = [UIImage imageWithData:jpegData];
        handler(image);
    }];
}

- (void) captureAndSaveToAlbumWithCallback:(void (^)(UIImage *image))handler
{
    [self captureWithCallback:^(UIImage *image) {
        ALAuthorizationStatus author = [ALAssetsLibrary authorizationStatus];
        if (author == ALAuthorizationStatusAuthorized)
        {
            UIImageWriteToSavedPhotosAlbum(image, self, nil, NULL);
        }
        handler(image);
    }];
}


#pragma mark - capture delegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
       fromConnection:(AVCaptureConnection *)connection {
    if (![self.delegate respondsToSelector:@selector(cameraSessionVideoFrame:)])
    {
        return;
    }
    CVImageBufferRef videoFrame = CMSampleBufferGetImageBuffer(sampleBuffer);
    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(videoFrame);
    
    size_t width = CVPixelBufferGetWidth(videoFrame);
    size_t height = CVPixelBufferGetHeight(videoFrame);
    
    size_t newBytesPerRow = ((width*4+0xf)>>4)<<4;
    
    CVPixelBufferLockBaseAddress(videoFrame,0);
    
    int8_t *baseAddress = (int8_t *)CVPixelBufferGetBaseAddress(videoFrame);
    
    size_t size = newBytesPerRow*height;
    int8_t *bytes = (int8_t *)malloc(size * sizeof(int8_t));
    if (newBytesPerRow == bytesPerRow) {
        memcpy(bytes, baseAddress, size * sizeof(int8_t));
    }
    else
    {
        for (int y=0; y<height; y++) {
            memcpy(bytes+y*newBytesPerRow,
                   baseAddress+y*bytesPerRow,
                   newBytesPerRow * sizeof(int8_t));
        }
    }
    CVPixelBufferUnlockBaseAddress(videoFrame, 0);
    
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef newContext = CGBitmapContextCreate(bytes,
                                                    width,
                                                    height,
                                                    8,
                                                    newBytesPerRow,
                                                    colorSpace,
                                                    kCGBitmapByteOrder32Little|
                                                    kCGImageAlphaNoneSkipFirst);
    CGColorSpaceRelease(colorSpace);
    
    CGImageRef result = CGBitmapContextCreateImage(newContext);
    
    CGContextRelease(newContext);
    
    free(bytes);
    
    [self.delegate cameraSessionVideoFrame:result];
}


@end
