/*
 Copyright (C) 2015 Apple Inc. All Rights Reserved.
 See LICENSE.txt for this sample’s licensing information
 
 Abstract:
 View controller for camera interface.
 */

@import AVFoundation;
@import Photos;

#import "AAPLCameraViewController.h"
#import "AAPLPreviewView.h"
#import "SDNavigationController.h"
#import "SDStorePhotoViewController.h"
#import "SDTabBarController.h"
#import "UIImage+FixOrientation.h"
#import "VPTabBar.h"

#define kPreviewScaleFactory 1.75


static void * CapturingStillImageContext = &CapturingStillImageContext;
static void * SessionRunningContext = &SessionRunningContext;

typedef NS_ENUM( NSInteger, AVCamSetupResult ) {
    AVCamSetupResultSuccess,
    AVCamSetupResultCameraNotAuthorized,
    AVCamSetupResultSessionConfigurationFailed
};

@interface AAPLCameraViewController () <AVCaptureFileOutputRecordingDelegate>

// For use in the storyboards.
@property (nonatomic, strong)  AAPLPreviewView *previewView;
@property (nonatomic, strong)  UILabel *cameraUnavailableLabel;
@property (nonatomic, strong)  UIButton *resumeButton;
@property (nonatomic, strong)  UIButton *recordButton;
@property (nonatomic, strong)  UIButton *cameraButton;
@property (nonatomic, strong)  UIButton *stillButton;
@property (nonatomic, strong)  UIButton *photoButton;
@property (nonatomic, strong)  UITextField* focusText;
@property (nonatomic, strong)  UITextField* tempText, *tintText;

// Session management.
@property (nonatomic) dispatch_queue_t sessionQueue;
@property (nonatomic) AVCaptureSession *session;
@property (nonatomic) AVCaptureDeviceInput *videoDeviceInput;
@property (nonatomic) AVCaptureMovieFileOutput *movieFileOutput;
@property (nonatomic) AVCaptureStillImageOutput *stillImageOutput;

// Utilities.
@property (nonatomic) AVCamSetupResult setupResult;
@property (nonatomic, getter=isSessionRunning) BOOL sessionRunning;
@property (nonatomic) UIBackgroundTaskIdentifier backgroundRecordingID;

@property (nonatomic,assign) BOOL _showRuler;
@property (nonatomic,strong) UIImageView* rulerImageView;
@end

@implementation AAPLCameraViewController

- (BOOL)navigationBarHidden
{
    return NO;
}

-(void)initViews{
    BOOL naviHidden = [self navigationBarHidden];
    if (!naviHidden && self.navBar != nil)
    {
        [self.navBar customNaviLeftButton:self action:@selector(onThumbsButtonClicked:) image:LOAD_IMAGE_USE_CACHE_INBUNDLE(HomePageBundle, @"actionbar_btn_picture.png")];
        [self.navBar customNaviRightButton:self action:@selector(snapStillImage:) image:LOAD_IMAGE_USE_CACHE_INBUNDLE(HomePageBundle,@"actionbar_btn_takepicture.png")];
        
        UIImage* rulerImg = LOAD_IMAGE_USE_CACHE_INBUNDLE(HomePageBundle, @"actionbar_btn_ruler_h.png");
        UIButton* rulerBtn = [UIButton buttonWithType:UIButtonTypeCustom];
        [rulerBtn setImage:rulerImg forState:UIControlStateNormal];
        [rulerBtn setTitleColor:[UIColor blackColor] forState:UIControlStateNormal];
        rulerBtn.frame = CGRectMake(self.navBar.backButton.right + 15, kNaviOrigenY, rulerImg.size.width, 44);
        ///rulerBtn.centerY = navigationBar.centerY;
        [rulerBtn addTarget:self action:@selector(onRulerButtonClicked:) forControlEvents:UIControlEventTouchUpInside];
        [self.navBar addSubview:rulerBtn];
    }
    
    
    _previewView = [[AAPLPreviewView alloc] initWithFrame:CGRectMake(0, kNavBarHeight, kScreenWidth, kScreenHeight - kNavBarHeight)];
    [self.view addSubview:_previewView];
    
    UITapGestureRecognizer* tagGestureReconginer = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(focusAndExposeTap:)];
    [_previewView addGestureRecognizer:tagGestureReconginer];
    
    _photoButton = [[UIButton alloc] initWithFrame:CGRectMake(CGRectGetWidth(self.view.frame)/2-40, CGRectGetHeight(self.view.frame)-100, 80, 80)];
    [_photoButton addTarget:self action:@selector(takePhotoTap:) forControlEvents:UIControlEventTouchUpInside];
    _photoButton.backgroundColor = [UIColor whiteColor];
    _photoButton.layer.cornerRadius = 40;
    _photoButton.layer.masksToBounds = YES;
    [self.view addSubview:_photoButton];
    
    [self addSettings];
}

-(void)onThumbsButtonClicked:(id)sender{
    // 清除连拍缓存
    [[SDPhotoArray shareInstanse] removeAllPhoto];

    SDTabBarController* tabBarController = [SDWindowManager defaultManager].getTabBarController;
    [tabBarController.myTabBar selectTabAtIndex:0];
    tabBarController.selectedIndex = 0;
}

-(void)onRulerButtonClicked:(id)sender{
    __showRuler = !__showRuler;
    if (__showRuler) {
        [self addRulerViewToOverLayeView];
    }
    else{
        [self removeRulerViewFromOverLayView];
    }
}

- (void)onPresentStorePhotoView:(UIImage*)image
{
    SDStorePhotoViewController* storePhotoViewCtrl = [[SDStorePhotoViewController alloc] init];
    storePhotoViewCtrl.comparePhotoInfo = _comparePhotoInfo;
    storePhotoViewCtrl.photoImage = image;
    dispatch_async(dispatch_get_main_queue(), ^{
        [self presentViewController:[[SDNavigationController alloc] initWithRootViewController:storePhotoViewCtrl] animated:YES completion:nil];
    });
}


-(void)addRulerViewToOverLayeView{
    [self removeRulerViewFromOverLayView];
    
    if (_rulerImageView == nil) {
        UIImage* rulerImg = LOAD_IMAGE_USE_CACHE_INBUNDLE(HomePageBundle, @"takepicture_ruler.png");
        _rulerImageView = [[UIImageView alloc] initWithFrame:CGRectMake(0, self.view.height - 100, rulerImg.size.width, rulerImg.size.height)];
        _rulerImageView.image = rulerImg;
    }
    
    [self.view addSubview:_rulerImageView];
    _rulerImageView.left = (self.view.width - _rulerImageView.width)/2;
}

-(void)removeRulerViewFromOverLayView{
    if (_rulerImageView != nil) {
        [_rulerImageView removeFromSuperview];
        _rulerImageView = nil;
    }
}

-(void)addSettings{
    
    _focusText = [[UITextField alloc] initWithFrame:CGRectMake(10, 60, 80, 44)];
    _focusText.textColor = [UIColor blackColor];
    _focusText.backgroundColor = [UIColor whiteColor];
    [_focusText setFont:[UIFont systemFontOfSize:13.f]];
    _focusText.text = @"0.3";
//    [self.view addSubview:_focusText];

    UIButton* focusBtn = [UIButton buttonWithType:UIButtonTypeCustom];
    focusBtn.backgroundColor = [UIColor whiteColor];
    [focusBtn setTitleColor:[UIColor blackColor] forState:UIControlStateNormal];
    focusBtn.frame = CGRectMake(120, 60, 100, 44);
    [focusBtn setTitle:@"应用焦距" forState:UIControlStateNormal];
    [focusBtn addTarget:self action:@selector(focusClicked:) forControlEvents:UIControlEventTouchUpInside];
//    [self.view addSubview:focusBtn];
    
    _tempText = [[UITextField alloc] initWithFrame:CGRectMake(10, 120, 80, 44)];
    _tempText.textColor = [UIColor blackColor];
    _tempText.backgroundColor = [UIColor whiteColor];
    [_tempText setFont:[UIFont systemFontOfSize:13.f]];
    _tempText.text = @"6000";
    [self.view addSubview:_tempText];
    
    _tintText = [[UITextField alloc] initWithFrame:CGRectMake(110, 120, 80, 44)];
    _tintText.textColor = [UIColor blackColor];
    _tintText.backgroundColor = [UIColor whiteColor];
    [_tintText setFont:[UIFont systemFontOfSize:13.f]];
    _tintText.text = @"0";
    [self.view addSubview:_tintText];
    
    UIButton* wbBtn = [UIButton buttonWithType:UIButtonTypeCustom];
    wbBtn.backgroundColor = [UIColor whiteColor];
    [wbBtn setTitleColor:[UIColor blackColor] forState:UIControlStateNormal];
    wbBtn.frame = CGRectMake(220, 120, 100, 44);
    [wbBtn setTitle:@"应用白平衡" forState:UIControlStateNormal];
    [wbBtn addTarget:self action:@selector(wbClicked:) forControlEvents:UIControlEventTouchUpInside];
    [self.view addSubview:wbBtn];
    
}

-(void)focusClicked:(id)sender{
    float distance = [_focusText.text floatValue];
    AVCaptureDevice *device = self.videoDeviceInput.device;
    if ([device lockForConfiguration:nil]) {
        // 对焦模式
        //对焦模式和对焦点
        [device setFocusModeLockedWithLensPosition:distance completionHandler:nil];
        
        [device unlockForConfiguration];
    }
    
}

-(void)wbClicked:(id)sender{
    float temp = [_tempText.text floatValue];
    float tint = [_tintText.text floatValue];
    AVCaptureDevice *device = self.videoDeviceInput.device;
    if ([device lockForConfiguration:nil]) {
        AVCaptureWhiteBalanceTemperatureAndTintValues tempAndTint = {temp, tint};
        
        AVCaptureWhiteBalanceGains wbGains = [device deviceWhiteBalanceGainsForTemperatureAndTintValues:tempAndTint];
        if (wbGains.blueGain > device.maxWhiteBalanceGain || wbGains.greenGain > device.maxWhiteBalanceGain
            || wbGains.redGain > device.maxWhiteBalanceGain)
        {
            UIAlertController* uac = [UIAlertController alertControllerWithTitle:@"注意" message:@"设置参数超出范围" preferredStyle:UIAlertControllerStyleAlert];
            UIAlertAction* confirm = [UIAlertAction actionWithTitle:@"确定" style:UIAlertActionStyleDefault handler:nil];
            [uac addAction:confirm];
            [self presentViewController:uac animated:YES completion:nil];
            return;
        }
        
        [device setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:wbGains completionHandler:nil];
        
        [device unlockForConfiguration];
    }
    
}

- (void)viewDidLoad
{
    [super viewDidLoad];
    
    [self initViews];
    
    // Create the AVCaptureSession.
    self.session = [[AVCaptureSession alloc] init];
    [self.session setSessionPreset:AVCaptureSessionPresetPhoto];
    
    
    // Setup the preview view.
    self.previewView.session = self.session;
    
    // Communicate with the session and other session objects on this queue.
    self.sessionQueue = dispatch_queue_create( "session queue", DISPATCH_QUEUE_SERIAL );
    
    self.setupResult = AVCamSetupResultSuccess;
    
    // Check video authorization status. Video access is required and audio access is optional.
    // If audio access is denied, audio is not recorded during movie recording.
    switch ( [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo] )
    {
        case AVAuthorizationStatusAuthorized:
        {
            // The user has previously granted access to the camera.
            break;
        }
        case AVAuthorizationStatusNotDetermined:
        {
            // The user has not yet been presented with the option to grant video access.
            // We suspend the session queue to delay session setup until the access request has completed to avoid
            // asking the user for audio access if video access is denied.
            // Note that audio access will be implicitly requested when we create an AVCaptureDeviceInput for audio during session setup.
            dispatch_suspend( self.sessionQueue );
            [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^( BOOL granted ) {
                if ( ! granted ) {
                    self.setupResult = AVCamSetupResultCameraNotAuthorized;
                }
                dispatch_resume( self.sessionQueue );
            }];
            break;
        }
        default:
        {
            // The user has previously denied access.
            self.setupResult = AVCamSetupResultCameraNotAuthorized;
            break;
        }
    }
    
    // Setup the capture session.
    // In general it is not safe to mutate an AVCaptureSession or any of its inputs, outputs, or connections from multiple threads at the same time.
    // Why not do all of this on the main queue?
    // Because -[AVCaptureSession startRunning] is a blocking call which can take a long time. We dispatch session setup to the sessionQueue
    // so that the main queue isn't blocked, which keeps the UI responsive.
    dispatch_async( self.sessionQueue, ^{
        if ( self.setupResult != AVCamSetupResultSuccess ) {
            return;
        }
        
        self.backgroundRecordingID = UIBackgroundTaskInvalid;
        NSError *error = nil;
        
        AVCaptureDevice *videoDevice = [AAPLCameraViewController deviceWithMediaType:AVMediaTypeVideo preferringPosition:AVCaptureDevicePositionBack];
        AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
        
        if ( ! videoDeviceInput ) {
            NSLog( @"Could not create video device input: %@", error );
        }
        
        [self.session beginConfiguration];
        if ( [self.session canAddInput:videoDeviceInput] ) {
            [self.session addInput:videoDeviceInput];
            self.videoDeviceInput = videoDeviceInput;
            videoDevice.videoZoomFactor = kPreviewScaleFactory;
            
            dispatch_async( dispatch_get_main_queue(), ^{
                // Why are we dispatching this to the main queue?
                // Because AVCaptureVideoPreviewLayer is the backing layer for AAPLPreviewView and UIView
                // can only be manipulated on the main thread.
                // Note: As an exception to the above rule, it is not necessary to serialize video orientation changes
                // on the AVCaptureVideoPreviewLayer’s connection with other session manipulation.
                
                // Use the status bar orientation as the initial video orientation. Subsequent orientation changes are handled by
                // -[viewWillTransitionToSize:withTransitionCoordinator:].
                UIInterfaceOrientation statusBarOrientation = [UIApplication sharedApplication].statusBarOrientation;
                AVCaptureVideoOrientation initialVideoOrientation = AVCaptureVideoOrientationPortrait;
                if ( statusBarOrientation != UIInterfaceOrientationUnknown ) {
                    initialVideoOrientation = (AVCaptureVideoOrientation)statusBarOrientation;
                }
                
                AVCaptureVideoPreviewLayer *previewLayer = (AVCaptureVideoPreviewLayer *)self.previewView.layer;
                [previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
               
                //[previewLayer setAffineTransform:CGAffineTransformMakeScale(kPreviewScaleFactory, kPreviewScaleFactory)];
                previewLayer.connection.videoOrientation = initialVideoOrientation;
            } );
        }
        else {
            NSLog( @"Could not add video device input to the session" );
            self.setupResult = AVCamSetupResultSessionConfigurationFailed;
        }
        
        AVCaptureDevice *device = self.videoDeviceInput.device;
        if ([device lockForConfiguration:nil]) {
            // 对焦模式
            //对焦模式和对焦点
//            [device setFocusModeLockedWithLensPosition:0.3 completionHandler:nil];
            [device setFocusMode:AVCaptureFocusModeContinuousAutoFocus];
            
            AVCaptureWhiteBalanceTemperatureAndTintValues tempAndTint = {6000, 0};
            
            AVCaptureWhiteBalanceGains wbGains = [device deviceWhiteBalanceGainsForTemperatureAndTintValues:tempAndTint];
            [device setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:wbGains completionHandler:nil];
            // 曝光模式
            //        if ([device isExposureModeSupported:AVCaptureExposureModeAutoExpose]) {
            //            [device setExposureMode:AVCaptureExposureModeAutoExpose];
            //        }
            // 闪光灯模式
            //            if ([device hasFlash]) {
            //                [device setFlashMode:AVCaptureFlashModeAuto];
            //                [device setTorchMode:AVCaptureTorchModeAuto];
            //            }
            // 修改后解锁设备
            [device unlockForConfiguration];
        }
        
        AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
        AVCaptureDeviceInput *audioDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:&error];
        
        if ( ! audioDeviceInput ) {
            NSLog( @"Could not create audio device input: %@", error );
        }
        
        if ( [self.session canAddInput:audioDeviceInput] ) {
            [self.session addInput:audioDeviceInput];
        }
        else {
            NSLog( @"Could not add audio device input to the session" );
        }
        
        AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
        if ( [self.session canAddOutput:movieFileOutput] ) {
            [self.session addOutput:movieFileOutput];
            AVCaptureConnection *connection = [movieFileOutput connectionWithMediaType:AVMediaTypeVideo];
            if ( connection.isVideoStabilizationSupported ) {
                connection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto;
            }
            self.movieFileOutput = movieFileOutput;
        }
        else {
            NSLog( @"Could not add movie file output to the session" );
            self.setupResult = AVCamSetupResultSessionConfigurationFailed;
        }
        
        AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
        if ( [self.session canAddOutput:stillImageOutput] ) {
            stillImageOutput.outputSettings = @{AVVideoCodecKey : AVVideoCodecJPEG};
            [self.session addOutput:stillImageOutput];
            self.stillImageOutput = stillImageOutput;
        }
        else {
            NSLog( @"Could not add still image output to the session" );
            self.setupResult = AVCamSetupResultSessionConfigurationFailed;
        }
        
        [self.session commitConfiguration];
    } );
}

-(void)viewDidAppear:(BOOL)animated
{
    [super viewDidAppear:animated];
    //   [self performSelector:@selector(autoFocusAtCenter) withObject:NULL afterDelay:0.2];
    
    [self setCompareImage];
}

-(void)viewWillDisappear:(BOOL)animated
{
    [super viewWillDisappear:animated];
    SDTabBarController* tabBarController = [SDWindowManager defaultManager].getTabBarController;
    tabBarController.myTabBar.hidden = NO;
    
}

- (void)viewWillAppear:(BOOL)animated
{
    [super viewWillAppear:animated];
    
    SDTabBarController* tabBarController = [SDWindowManager defaultManager].getTabBarController;
    tabBarController.myTabBar.hidden = YES;
    
    dispatch_async( self.sessionQueue, ^{
        switch ( self.setupResult )
        {
            case AVCamSetupResultSuccess:
            {
                // Only setup observers and start the session running if setup succeeded.
                [self addObservers];
                [self.session startRunning];
                self.sessionRunning = self.session.isRunning;
                break;
            }
            case AVCamSetupResultCameraNotAuthorized:
            {
                dispatch_async( dispatch_get_main_queue(), ^{
                    NSString *message = NSLocalizedString( @"AVCam doesn't have permission to use the camera, please change privacy settings", @"Alert message when the user has denied access to the camera" );
                    UIAlertController *alertController = [UIAlertController alertControllerWithTitle:@"AVCam" message:message preferredStyle:UIAlertControllerStyleAlert];
                    UIAlertAction *cancelAction = [UIAlertAction actionWithTitle:NSLocalizedString( @"OK", @"Alert OK button" ) style:UIAlertActionStyleCancel handler:nil];
                    [alertController addAction:cancelAction];
                    // Provide quick access to Settings.
                    UIAlertAction *settingsAction = [UIAlertAction actionWithTitle:NSLocalizedString( @"Settings", @"Alert button to open Settings" ) style:UIAlertActionStyleDefault handler:^( UIAlertAction *action ) {
                        [[UIApplication sharedApplication] openURL:[NSURL URLWithString:UIApplicationOpenSettingsURLString]];
                    }];
                    [alertController addAction:settingsAction];
                    [self presentViewController:alertController animated:YES completion:nil];
                } );
                break;
            }
            case AVCamSetupResultSessionConfigurationFailed:
            {
                dispatch_async( dispatch_get_main_queue(), ^{
                    NSString *message = NSLocalizedString( @"Unable to capture media", @"Alert message when something goes wrong during capture session configuration" );
                    UIAlertController *alertController = [UIAlertController alertControllerWithTitle:@"AVCam" message:message preferredStyle:UIAlertControllerStyleAlert];
                    UIAlertAction *cancelAction = [UIAlertAction actionWithTitle:NSLocalizedString( @"OK", @"Alert OK button" ) style:UIAlertActionStyleCancel handler:nil];
                    [alertController addAction:cancelAction];
                    [self presentViewController:alertController animated:YES completion:nil];
                } );
                break;
            }
        }
    } );
}

- (void)viewDidDisappear:(BOOL)animated
{
    dispatch_async( self.sessionQueue, ^{
        if ( self.setupResult == AVCamSetupResultSuccess ) {
            [self.session stopRunning];
            [self removeObservers];
        }
    } );
    
    [super viewDidDisappear:animated];
}

-(void)setCompareImage
{
    UIImageView* compareImgaeView = (UIImageView*)[self.view viewWithTag:10001];
    if (compareImgaeView != nil) {
        [compareImgaeView removeFromSuperview];
    }
    if (CHECK_VALID_STRING(_compareImgeName)) {
        NSString* directory = [SDFile getDirectoryInCacheWithName:kPhotoDiretoryName];
        NSString *filePath = [directory stringByAppendingPathComponent:_compareImgeName];
        if ([SDFile fileExist:filePath]) {
            UIImage* image = [UIImage imageWithContentsOfFile:filePath];
            UIImageView* compareImgaeView = [[UIImageView alloc] initWithFrame:CGRectMake(0, kNavBarHeight, kScreenWidth, kScreenHeight - kNavBarHeight)];
            compareImgaeView.tag = 10001;
            [compareImgaeView setContentMode:UIViewContentModeScaleAspectFill];
            compareImgaeView.image = image;
            [self.view addSubview:compareImgaeView];
            
            compareImgaeView.alpha = 0.5;
        }
    }
}

#pragma mark Orientation

- (BOOL)shouldAutorotate
{
    // Disable autorotation of the interface when recording is in progress.
    return ! self.movieFileOutput.isRecording;
}

- (UIInterfaceOrientationMask)supportedInterfaceOrientations
{
    //return UIInterfaceOrientationMaskAll;
    return UIInterfaceOrientationMaskPortrait;
}

- (void)viewWillTransitionToSize:(CGSize)size withTransitionCoordinator:(id<UIViewControllerTransitionCoordinator>)coordinator
{
    [super viewWillTransitionToSize:size withTransitionCoordinator:coordinator];
    
    // Note that the app delegate controls the device orientation notifications required to use the device orientation.
    UIDeviceOrientation deviceOrientation = [UIDevice currentDevice].orientation;
    if ( UIDeviceOrientationIsPortrait( deviceOrientation ) || UIDeviceOrientationIsLandscape( deviceOrientation ) ) {
        AVCaptureVideoPreviewLayer *previewLayer = (AVCaptureVideoPreviewLayer *)self.previewView.layer;
        previewLayer.connection.videoOrientation = (AVCaptureVideoOrientation)deviceOrientation;
    }
}

#pragma mark KVO and Notifications

- (void)addObservers
{
    [self.session addObserver:self forKeyPath:@"running" options:NSKeyValueObservingOptionNew context:SessionRunningContext];
    //[self.stillImageOutput addObserver:self forKeyPath:@"capturingStillImage" options:NSKeyValueObservingOptionNew context:CapturingStillImageContext];
    
    [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:self.videoDeviceInput.device];
    [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(sessionRuntimeError:) name:AVCaptureSessionRuntimeErrorNotification object:self.session];
    // A session can only run when the app is full screen. It will be interrupted in a multi-app layout, introduced in iOS 9,
    // see also the documentation of AVCaptureSessionInterruptionReason. Add observers to handle these session interruptions
    // and show a preview is paused message. See the documentation of AVCaptureSessionWasInterruptedNotification for other
    // interruption reasons.
    [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(sessionWasInterrupted:) name:AVCaptureSessionWasInterruptedNotification object:self.session];
    [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(sessionInterruptionEnded:) name:AVCaptureSessionInterruptionEndedNotification object:self.session];
}

- (void)removeObservers
{
    [[NSNotificationCenter defaultCenter] removeObserver:self];
    
    [self.session removeObserver:self forKeyPath:@"running" context:SessionRunningContext];
    //[self.stillImageOutput removeObserver:self forKeyPath:@"capturingStillImage" context:CapturingStillImageContext];
}

- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
{
    if ( context == CapturingStillImageContext ) {
        BOOL isCapturingStillImage = [change[NSKeyValueChangeNewKey] boolValue];
        
        if ( isCapturingStillImage ) {
            dispatch_async( dispatch_get_main_queue(), ^{
                self.previewView.layer.opacity = 0.0;
                [UIView animateWithDuration:0.25 animations:^{
                    self.previewView.layer.opacity = 1.0;
                }];
            } );
        }
    }
    else if ( context == SessionRunningContext ) {
        BOOL isSessionRunning = [change[NSKeyValueChangeNewKey] boolValue];
        
        dispatch_async( dispatch_get_main_queue(), ^{
            // Only enable the ability to change camera if the device has more than one camera.
            self.cameraButton.enabled = isSessionRunning && ( [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo].count > 1 );
            self.recordButton.enabled = isSessionRunning;
            self.stillButton.enabled = isSessionRunning;
        } );
    }
    else {
        [super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
    }
}

- (void)subjectAreaDidChange:(NSNotification *)notification
{
    CGPoint devicePoint = CGPointMake( 0.5, 0.5 );
    [self focusWithMode:AVCaptureFocusModeContinuousAutoFocus exposeWithMode:AVCaptureExposureModeContinuousAutoExposure atDevicePoint:devicePoint monitorSubjectAreaChange:NO];
}

- (void)sessionRuntimeError:(NSNotification *)notification
{
    NSError *error = notification.userInfo[AVCaptureSessionErrorKey];
    NSLog( @"Capture session runtime error: %@", error );
    
    // Automatically try to restart the session running if media services were reset and the last start running succeeded.
    // Otherwise, enable the user to try to resume the session running.
    if ( error.code == AVErrorMediaServicesWereReset ) {
        dispatch_async( self.sessionQueue, ^{
            if ( self.isSessionRunning ) {
                [self.session startRunning];
                self.sessionRunning = self.session.isRunning;
            }
            else {
                dispatch_async( dispatch_get_main_queue(), ^{
                    self.resumeButton.hidden = NO;
                } );
            }
        } );
    }
    else {
        self.resumeButton.hidden = NO;
    }
}

- (void)sessionWasInterrupted:(NSNotification *)notification
{
    // In some scenarios we want to enable the user to resume the session running.
    // For example, if music playback is initiated via control center while using AVCam,
    // then the user can let AVCam resume the session running, which will stop music playback.
    // Note that stopping music playback in control center will not automatically resume the session running.
    // Also note that it is not always possible to resume, see -[resumeInterruptedSession:].
    BOOL showResumeButton = NO;
    
    // In iOS 9 and later, the userInfo dictionary contains information on why the session was interrupted.
    if ( &AVCaptureSessionInterruptionReasonKey ) {
        AVCaptureSessionInterruptionReason reason = [notification.userInfo[AVCaptureSessionInterruptionReasonKey] integerValue];
        NSLog( @"Capture session was interrupted with reason %ld", (long)reason );
        
        if ( reason == AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient ||
            reason == AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient ) {
            showResumeButton = YES;
        }
        else if ( reason == AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps ) {
            // Simply fade-in a label to inform the user that the camera is unavailable.
            self.cameraUnavailableLabel.hidden = NO;
            self.cameraUnavailableLabel.alpha = 0.0;
            [UIView animateWithDuration:0.25 animations:^{
                self.cameraUnavailableLabel.alpha = 1.0;
            }];
        }
    }
    else {
        NSLog( @"Capture session was interrupted" );
        showResumeButton = ( [UIApplication sharedApplication].applicationState == UIApplicationStateInactive );
    }
    
    if ( showResumeButton ) {
        // Simply fade-in a button to enable the user to try to resume the session running.
        self.resumeButton.hidden = NO;
        self.resumeButton.alpha = 0.0;
        [UIView animateWithDuration:0.25 animations:^{
            self.resumeButton.alpha = 1.0;
        }];
    }
}

- (void)sessionInterruptionEnded:(NSNotification *)notification
{
    NSLog( @"Capture session interruption ended" );
    
    if ( ! self.resumeButton.hidden ) {
        [UIView animateWithDuration:0.25 animations:^{
            self.resumeButton.alpha = 0.0;
        } completion:^( BOOL finished ) {
            self.resumeButton.hidden = YES;
        }];
    }
    if ( ! self.cameraUnavailableLabel.hidden ) {
        [UIView animateWithDuration:0.25 animations:^{
            self.cameraUnavailableLabel.alpha = 0.0;
        } completion:^( BOOL finished ) {
            self.cameraUnavailableLabel.hidden = YES;
        }];
    }
}

#pragma mark Actions

- (IBAction)resumeInterruptedSession:(id)sender
{
    dispatch_async( self.sessionQueue, ^{
        // The session might fail to start running, e.g., if a phone or FaceTime call is still using audio or video.
        // A failure to start the session running will be communicated via a session runtime error notification.
        // To avoid repeatedly failing to start the session running, we only try to restart the session running in the
        // session runtime error handler if we aren't trying to resume the session running.
        [self.session startRunning];
        self.sessionRunning = self.session.isRunning;
        if ( ! self.session.isRunning ) {
            dispatch_async( dispatch_get_main_queue(), ^{
                NSString *message = NSLocalizedString( @"Unable to resume", @"Alert message when unable to resume the session running" );
                UIAlertController *alertController = [UIAlertController alertControllerWithTitle:@"AVCam" message:message preferredStyle:UIAlertControllerStyleAlert];
                UIAlertAction *cancelAction = [UIAlertAction actionWithTitle:NSLocalizedString( @"OK", @"Alert OK button" ) style:UIAlertActionStyleCancel handler:nil];
                [alertController addAction:cancelAction];
                [self presentViewController:alertController animated:YES completion:nil];
            } );
        }
        else {
            dispatch_async( dispatch_get_main_queue(), ^{
                self.resumeButton.hidden = YES;
            } );
        }
    } );
}

- (IBAction)toggleMovieRecording:(id)sender
{
    // Disable the Camera button until recording finishes, and disable the Record button until recording starts or finishes. See the
    // AVCaptureFileOutputRecordingDelegate methods.
    self.cameraButton.enabled = NO;
    self.recordButton.enabled = NO;
    
    dispatch_async( self.sessionQueue, ^{
        if ( ! self.movieFileOutput.isRecording ) {
            if ( [UIDevice currentDevice].isMultitaskingSupported ) {
                // Setup background task. This is needed because the -[captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error:]
                // callback is not received until AVCam returns to the foreground unless you request background execution time.
                // This also ensures that there will be time to write the file to the photo library when AVCam is backgrounded.
                // To conclude this background execution, -endBackgroundTask is called in
                // -[captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error:] after the recorded file has been saved.
                self.backgroundRecordingID = [[UIApplication sharedApplication] beginBackgroundTaskWithExpirationHandler:nil];
            }
            
            // Update the orientation on the movie file output video connection before starting recording.
            AVCaptureConnection *connection = [self.movieFileOutput connectionWithMediaType:AVMediaTypeVideo];
            AVCaptureVideoPreviewLayer *previewLayer = (AVCaptureVideoPreviewLayer *)self.previewView.layer;
            connection.videoOrientation = previewLayer.connection.videoOrientation;
            
            // Turn OFF flash for video recording.
            [AAPLCameraViewController setFlashMode:AVCaptureFlashModeOff forDevice:self.videoDeviceInput.device];
            
            // Start recording to a temporary file.
            NSString *outputFileName = [NSProcessInfo processInfo].globallyUniqueString;
            NSString *outputFilePath = [NSTemporaryDirectory() stringByAppendingPathComponent:[outputFileName stringByAppendingPathExtension:@"mov"]];
            [self.movieFileOutput startRecordingToOutputFileURL:[NSURL fileURLWithPath:outputFilePath] recordingDelegate:self];
        }
        else {
            [self.movieFileOutput stopRecording];
        }
    } );
}

- (IBAction)changeCamera:(id)sender
{
    self.cameraButton.enabled = NO;
    self.recordButton.enabled = NO;
    self.stillButton.enabled = NO;
    
    dispatch_async( self.sessionQueue, ^{
        AVCaptureDevice *currentVideoDevice = self.videoDeviceInput.device;
        AVCaptureDevicePosition preferredPosition = AVCaptureDevicePositionUnspecified;
        AVCaptureDevicePosition currentPosition = currentVideoDevice.position;
        
        switch ( currentPosition )
        {
            case AVCaptureDevicePositionUnspecified:
            case AVCaptureDevicePositionFront:
                preferredPosition = AVCaptureDevicePositionBack;
                break;
            case AVCaptureDevicePositionBack:
                preferredPosition = AVCaptureDevicePositionFront;
                break;
        }
        
        AVCaptureDevice *videoDevice = [AAPLCameraViewController deviceWithMediaType:AVMediaTypeVideo preferringPosition:preferredPosition];
        AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:nil];
        
        [self.session beginConfiguration];
        
        // Remove the existing device input first, since using the front and back camera simultaneously is not supported.
        [self.session removeInput:self.videoDeviceInput];
        
        if ( [self.session canAddInput:videoDeviceInput] ) {
            [[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:currentVideoDevice];
            
            [AAPLCameraViewController setFlashMode:AVCaptureFlashModeAuto forDevice:videoDevice];
            [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:videoDevice];
            
            [self.session addInput:videoDeviceInput];
            self.videoDeviceInput = videoDeviceInput;
        }
        else {
            [self.session addInput:self.videoDeviceInput];
        }
        
        AVCaptureConnection *connection = [self.movieFileOutput connectionWithMediaType:AVMediaTypeVideo];
        if ( connection.isVideoStabilizationSupported ) {
            connection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto;
        }
        
        [self.session commitConfiguration];
        
        dispatch_async( dispatch_get_main_queue(), ^{
            self.cameraButton.enabled = YES;
            self.recordButton.enabled = YES;
            self.stillButton.enabled = YES;
        } );
    } );
}

-(void)takePhotoTap:(UIGestureRecognizer*)gestureRecognizer
{
    [self snapStillImage:nil];
}

- (void)snapStillImage:(id)sender
{
    dispatch_async( self.sessionQueue, ^{
        AVCaptureConnection *connection = [self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo];
        AVCaptureVideoPreviewLayer *previewLayer = (AVCaptureVideoPreviewLayer *)self.previewView.layer;
        //[connection setVideoScaleAndCropFactor:kPreviewScaleFactory];
        
        
        // Update the orientation on the still image output video connection before capturing.
        connection.videoOrientation = previewLayer.connection.videoOrientation;
        
        // Flash set to Auto for Still Capture.
        [AAPLCameraViewController setFlashMode:AVCaptureFlashModeAuto forDevice:self.videoDeviceInput.device];
        
        // Capture a still image.
        [self.stillImageOutput captureStillImageAsynchronouslyFromConnection:connection completionHandler:^( CMSampleBufferRef imageDataSampleBuffer, NSError *error ) {
            if ( imageDataSampleBuffer ) {
                // The sample buffer is not retained. Create image data before saving the still image to the photo library asynchronously.
                NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
                [PHPhotoLibrary requestAuthorization:^( PHAuthorizationStatus status ) {
                    if ( status == PHAuthorizationStatusAuthorized ) {
                        // To preserve the metadata, we create an asset from the JPEG NSData representation.
                        // Note that creating an asset from a UIImage discards the metadata.
                        // In iOS 9, we can use -[PHAssetCreationRequest addResourceWithType:data:options].
                        // In iOS 8, we save the image to a temporary file and use +[PHAssetChangeRequest creationRequestForAssetFromImageAtFileURL:].
                        UIImage* rulerImg = LOAD_IMAGE_USE_CACHE_INBUNDLE(HomePageBundle, @"takepicture_ruler.png");
                        //CGSize rsz = rulerImg.size;
                        UIImage* image = [UIImage imageWithData:imageData];
                        //
                        //     UIImage *newImage   = [SDImage imageWithImage:image scaledToMaxWidth:kScreenWidth maxHeight:kScreenHeight - kNavBarHeight];
                        UIImage *newImage   = image;
                        if (__showRuler) {
                            newImage = [newImage drawImage:rulerImg inRect:CGRectMake((newImage.size.width-rulerImg.size.width)/2, newImage.size.height - 100,  rulerImg.size.width, rulerImg.size.height)];
                        }
                        
                        //                        UIGraphicsBeginImageContextWithOptions(CGSizeMake(kScreenWidth, kScreenHeight-kNavBarHeight), NO, 0.0);
                        //                        [image drawInRect:CGRectMake(0.0, 0.0, kScreenWidth, kScreenHeight-kNavBarHeight)];
                        //                        [rulerImg drawInRect:CGRectMake((self.view.width - rulerImg.size.width)/2, kScreenHeight-kNavBarHeight - 100, rulerImg.size.width, rulerImg.size.height)];
                        //                        UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
                        //                        UIGraphicsEndImageContext();
                        //
                        //                                                //2448 3264
                        //                                                CGSize sz = image.size;
                        //                                                CGFloat h = sz.height - 500;
                        //                                                rulerImg = [SDImage imageWithImage:rulerImg scaledToWidth:rulerImg.size.width*6];
                        //                                                UIImage* newImage = [image drawImage:rulerImg inRect:CGRectMake((image.size.width-rulerImg.size.width)/2, h,  rulerImg.size.width, rulerImg.size.height)];
                        
                        newImage = [newImage fixOrientation];
                        [self onPresentStorePhotoView:newImage];
                        
                        if ( [PHAssetCreationRequest class] ) {
                            //							[[PHPhotoLibrary sharedPhotoLibrary] performChanges:^{
                            //								[[PHAssetCreationRequest creationRequestForAsset] addResourceWithType:PHAssetResourceTypePhoto data:imageData options:nil];
                            //							} completionHandler:^( BOOL success, NSError *error ) {
                            //								if ( ! success ) {
                            //									NSLog( @"Error occurred while saving image to photo library: %@", error );
                            //                                } else {
                            //
                            //                                    UIImage* rulerImg = LOAD_IMAGE_USE_CACHE_INBUNDLE(HomePageBundle, @"takepicture_ruler.png");
                            //                                    CGSize rsz = rulerImg.size;
                            //                                    UIImage* image = [UIImage imageWithData:imageData];
                            //
                            //                                    UIGraphicsBeginImageContextWithOptions(CGSizeMake(kScreenWidth, kScreenHeight), NO, 0.0);
                            //                                    [image drawInRect:CGRectMake(0.0, 0.0, kScreenWidth, kScreenHeight)];
                            //                                    [rulerImg drawInRect:CGRectMake((self.view.width - rulerImg.size.width)/2, self.view.height - 100, rulerImg.size.width, rulerImg.size.height)];
                            //                                    UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
                            //                                    UIGraphicsEndImageContext();
                            //
                            //
                            //                                       newImage = [newImage fixOrientation];
                            //                                        [self onPresentStorePhotoView:image];
                            //                                }
                            //							}];
                        }
                        else {
                            NSString *temporaryFileName = [NSProcessInfo processInfo].globallyUniqueString;
                            NSString *temporaryFilePath = [NSTemporaryDirectory() stringByAppendingPathComponent:[temporaryFileName stringByAppendingPathExtension:@"jpg"]];
                            NSURL *temporaryFileURL = [NSURL fileURLWithPath:temporaryFilePath];
                            
                            [[PHPhotoLibrary sharedPhotoLibrary] performChanges:^{
                                NSError *error = nil;
                                [imageData writeToURL:temporaryFileURL options:NSDataWritingAtomic error:&error];
                                if ( error ) {
                                    NSLog( @"Error occured while writing image data to a temporary file: %@", error );
                                }
                                else {
                                    [PHAssetChangeRequest creationRequestForAssetFromImageAtFileURL:temporaryFileURL];
                                }
                            } completionHandler:^( BOOL success, NSError *error ) {
                                if ( ! success ) {
                                    NSLog( @"Error occurred while saving image to photo library: %@", error );
                                }
                                
                                // Delete the temporary file.
                                [[NSFileManager defaultManager] removeItemAtURL:temporaryFileURL error:nil];
                            }];
                        }
                    }
                }];
            }
            else {
                NSLog( @"Could not capture still image: %@", error );
            }
        }];
    } );
}

- (void)focusAndExposeTap:(UIGestureRecognizer *)gestureRecognizer
{
    CGPoint p = [gestureRecognizer locationInView:gestureRecognizer.view];\
    NSLog(@"----------%@---------",NSStringFromCGPoint(p));
    CGPoint devicePoint = [(AVCaptureVideoPreviewLayer *)self.previewView.layer captureDevicePointOfInterestForPoint:[gestureRecognizer locationInView:gestureRecognizer.view]];
    [self focusWithMode:AVCaptureFocusModeAutoFocus exposeWithMode:AVCaptureExposureModeAutoExpose atDevicePoint:devicePoint monitorSubjectAreaChange:YES];
}

#pragma mark File Output Recording Delegate

- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections
{
    // Enable the Record button to let the user stop the recording.
    dispatch_async( dispatch_get_main_queue(), ^{
        self.recordButton.enabled = YES;
        [self.recordButton setTitle:NSLocalizedString( @"Stop", @"Recording button stop title") forState:UIControlStateNormal];
    });
}

- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{
    // Note that currentBackgroundRecordingID is used to end the background task associated with this recording.
    // This allows a new recording to be started, associated with a new UIBackgroundTaskIdentifier, once the movie file output's isRecording property
    // is back to NO — which happens sometime after this method returns.
    // Note: Since we use a unique file path for each recording, a new recording will not overwrite a recording currently being saved.
    UIBackgroundTaskIdentifier currentBackgroundRecordingID = self.backgroundRecordingID;
    self.backgroundRecordingID = UIBackgroundTaskInvalid;
    
    dispatch_block_t cleanup = ^{
        [[NSFileManager defaultManager] removeItemAtURL:outputFileURL error:nil];
        if ( currentBackgroundRecordingID != UIBackgroundTaskInvalid ) {
            [[UIApplication sharedApplication] endBackgroundTask:currentBackgroundRecordingID];
        }
    };
    
    BOOL success = YES;
    
    if ( error ) {
        NSLog( @"Movie file finishing error: %@", error );
        success = [error.userInfo[AVErrorRecordingSuccessfullyFinishedKey] boolValue];
    }
    if ( success ) {
        // Check authorization status.
        [PHPhotoLibrary requestAuthorization:^( PHAuthorizationStatus status ) {
            if ( status == PHAuthorizationStatusAuthorized ) {
                // Save the movie file to the photo library and cleanup.
                [[PHPhotoLibrary sharedPhotoLibrary] performChanges:^{
                    // In iOS 9 and later, it's possible to move the file into the photo library without duplicating the file data.
                    // This avoids using double the disk space during save, which can make a difference on devices with limited free disk space.
                    if ( [PHAssetResourceCreationOptions class] ) {
                        PHAssetResourceCreationOptions *options = [[PHAssetResourceCreationOptions alloc] init];
                        options.shouldMoveFile = YES;
                        PHAssetCreationRequest *changeRequest = [PHAssetCreationRequest creationRequestForAsset];
                        [changeRequest addResourceWithType:PHAssetResourceTypeVideo fileURL:outputFileURL options:options];
                    }
                    else {
                        [PHAssetChangeRequest creationRequestForAssetFromVideoAtFileURL:outputFileURL];
                    }
                } completionHandler:^( BOOL success, NSError *error ) {
                    if ( ! success ) {
                        NSLog( @"Could not save movie to photo library: %@", error );
                    }
                    cleanup();
                }];
            }
            else {
                cleanup();
            }
        }];
    }
    else {
        cleanup();
    }
    
    // Enable the Camera and Record buttons to let the user switch camera and start another recording.
    dispatch_async( dispatch_get_main_queue(), ^{
        // Only enable the ability to change camera if the device has more than one camera.
        self.cameraButton.enabled = ( [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo].count > 1 );
        self.recordButton.enabled = YES;
        [self.recordButton setTitle:NSLocalizedString( @"Record", @"Recording button record title" ) forState:UIControlStateNormal];
    });
}

- (void)autoFocusAtCenter{
    
    //  CGPoint devicePoint = [(AVCaptureVideoPreviewLayer *)self.previewView.layer captureDevicePointOfInterestForPoint:_previewView.center];
    // [self focusWithMode:AVCaptureFocusModeAutoFocus exposeWithMode:AVCaptureExposureModeAutoExpose atDevicePoint:devicePoint monitorSubjectAreaChange:YES];
    
    AVCaptureDevice *device = self.videoDeviceInput.device;
    CGPoint point = _previewView.center;
    CGSize size = _previewView.bounds.size;
    CGPoint focusPoint = CGPointMake( point.y /size.height ,1-point.x/size.width );
    NSError *error;
    if ([device lockForConfiguration:&error]) {
        //对焦模式和对焦点
        [device setFocusModeLockedWithLensPosition:0.3 completionHandler:nil];
        
        //曝光模式和曝光点
        if ([device isExposureModeSupported:AVCaptureExposureModeAutoExpose ]) {
            [device setExposurePointOfInterest:focusPoint];
            [device setExposureMode:AVCaptureExposureModeAutoExpose];
        }
         [device unlockForConfiguration];
        //设置对焦动画
        _previewView.center = point;
        _previewView.hidden = NO;
        [UIView animateWithDuration:0.3 animations:^{
            _previewView.transform = CGAffineTransformMakeScale(1.25, 1.25);
        }completion:^(BOOL finished) {
            [UIView animateWithDuration:0.5 animations:^{
                _previewView.transform = CGAffineTransformIdentity;
            } completion:^(BOOL finished) {
                _previewView.hidden = YES;
            }];
        }];
    }
    
}

#pragma mark Device Configuration

- (void)focusWithMode:(AVCaptureFocusMode)focusMode exposeWithMode:(AVCaptureExposureMode)exposureMode atDevicePoint:(CGPoint)point monitorSubjectAreaChange:(BOOL)monitorSubjectAreaChange
{
    dispatch_async( self.sessionQueue, ^{
        AVCaptureDevice *device = self.videoDeviceInput.device;
        NSError *error = nil;
        if ( [device lockForConfiguration:&error] ) {
            // Setting (focus/exposure)PointOfInterest alone does not initiate a (focus/exposure) operation.
            // Call -set(Focus/Exposure)Mode: to apply the new point of interest.
            if ( device.isFocusPointOfInterestSupported && [device isFocusModeSupported:focusMode] ) {
                device.focusPointOfInterest = point;
                device.focusMode = focusMode;
            }
            
            if ( device.isExposurePointOfInterestSupported && [device isExposureModeSupported:exposureMode] ) {
                device.exposurePointOfInterest = point;
                device.exposureMode = exposureMode;
            }
            
            device.subjectAreaChangeMonitoringEnabled = monitorSubjectAreaChange;
            [device unlockForConfiguration];
        }
        else {
            NSLog( @"Could not lock device for configuration: %@", error );
        }
    } );
}

+ (void)setFlashMode:(AVCaptureFlashMode)flashMode forDevice:(AVCaptureDevice *)device
{
    if ( device.hasFlash && [device isFlashModeSupported:flashMode] ) {
        NSError *error = nil;
        if ( [device lockForConfiguration:&error] ) {
            device.flashMode = flashMode;
            [device unlockForConfiguration];
        }
        else {
            NSLog( @"Could not lock device for configuration: %@", error );
        }
    }
}

+ (AVCaptureDevice *)deviceWithMediaType:(NSString *)mediaType preferringPosition:(AVCaptureDevicePosition)position
{
    NSArray *devices = [AVCaptureDevice devicesWithMediaType:mediaType];
    AVCaptureDevice *captureDevice = devices.firstObject;
    
    for ( AVCaptureDevice *device in devices ) {
        if ( device.position == position ) {
            captureDevice = device;
            break;
        }
    }
    
    return captureDevice;
}

@end
