//
//  SDCameraViewController.m
//  SKinDiary
//
//  Created by pcyang on 16/7/21.
//  Copyright © 2016年 tencent. All rights reserved.
//

#import <AssetsLibrary/AssetsLibrary.h>

#import "SDCameraViewController.h"
#import "SDNavigationController.h"
#import "SDStorePhotoViewController.h"
#import "SDTabBarController.h"
#import "UIImage+FixOrientation.h"
#import "VPTabBar.h"
#import "SDCameraPreviewView.h"


typedef NS_ENUM( NSInteger, AVCamSetupResult ) {
    AVCamSetupResultSuccess,
    AVCamSetupResultCameraNotAuthorized,
    AVCamSetupResultSessionConfigurationFailed
};

#define kPreviewScaleFactory 2.135921
@interface SDCameraViewController()
{
    CGFloat effectiveScale;
}
@property (nonatomic,strong) SDCameraPreviewView *previewView;

@property (nonatomic,assign) BOOL _showRuler;
@property (nonatomic,strong) UIImageView* rulerImageView;

@property (nonatomic) dispatch_queue_t sessionQueue;
@property (nonatomic) AVCaptureSession *session;
@property (nonatomic) AVCaptureDeviceInput *videoDeviceInput;
@property (nonatomic) AVCaptureMovieFileOutput *movieFileOutput;
@property (nonatomic,strong) AVCaptureStillImageOutput *stillImageOutput;

// Utilities.
@property (nonatomic) AVCamSetupResult setupResult;
@property (nonatomic, getter=isSessionRunning) BOOL sessionRunning;
@end

@implementation SDCameraViewController
- (void)viewDidLoad
{
    [super viewDidLoad];
    [self initViews];
    [self setupAVCapture];
}

-(void)viewDidAppear:(BOOL)animated
{
    [super viewDidAppear:animated];
    
}

-(void)viewWillDisappear:(BOOL)animated
{
    [super viewWillDisappear:animated];
    SDTabBarController* tabBarController = [SDWindowManager defaultManager].getTabBarController;
    tabBarController.myTabBar.hidden = NO;
}

- (void)viewWillAppear:(BOOL)animated
{
    [super viewWillAppear:animated];
    
    SDTabBarController* tabBarController = [SDWindowManager defaultManager].getTabBarController;
    tabBarController.myTabBar.hidden = YES;
    
    dispatch_async( self.sessionQueue, ^{
        switch ( self.setupResult )
        {
            case AVCamSetupResultSuccess:
            {
                // Only setup observers and start the session running if setup succeeded.
                [self.session startRunning];
                self.sessionRunning = self.session.isRunning;
                break;
            }
            case AVCamSetupResultCameraNotAuthorized:
            {
                dispatch_async( dispatch_get_main_queue(), ^{
                    NSString *message = NSLocalizedString( @"AVCam doesn't have permission to use the camera, please change privacy settings", @"Alert message when the user has denied access to the camera" );
                    UIAlertController *alertController = [UIAlertController alertControllerWithTitle:@"AVCam" message:message preferredStyle:UIAlertControllerStyleAlert];
                    UIAlertAction *cancelAction = [UIAlertAction actionWithTitle:NSLocalizedString( @"OK", @"Alert OK button" ) style:UIAlertActionStyleCancel handler:nil];
                    [alertController addAction:cancelAction];
                    // Provide quick access to Settings.
                    UIAlertAction *settingsAction = [UIAlertAction actionWithTitle:NSLocalizedString( @"Settings", @"Alert button to open Settings" ) style:UIAlertActionStyleDefault handler:^( UIAlertAction *action ) {
                        [[UIApplication sharedApplication] openURL:[NSURL URLWithString:UIApplicationOpenSettingsURLString]];
                    }];
                    [alertController addAction:settingsAction];
                    [self presentViewController:alertController animated:YES completion:nil];
                } );
                break;
            }
            case AVCamSetupResultSessionConfigurationFailed:
            {
                dispatch_async( dispatch_get_main_queue(), ^{
                    NSString *message = NSLocalizedString( @"Unable to capture media", @"Alert message when something goes wrong during capture session configuration" );
                    UIAlertController *alertController = [UIAlertController alertControllerWithTitle:@"AVCam" message:message preferredStyle:UIAlertControllerStyleAlert];
                    UIAlertAction *cancelAction = [UIAlertAction actionWithTitle:NSLocalizedString( @"OK", @"Alert OK button" ) style:UIAlertActionStyleCancel handler:nil];
                    [alertController addAction:cancelAction];
                    [self presentViewController:alertController animated:YES completion:nil];
                } );
                break;
            }
        }
    } );
}

- (void)viewDidDisappear:(BOOL)animated
{
    dispatch_async( self.sessionQueue, ^{
        if ( self.setupResult == AVCamSetupResultSuccess ) {
            [self.session stopRunning];
        }
    } );
    
    [super viewDidDisappear:animated];
}


- (BOOL)navigationBarHidden
{
    return NO;
}

-(void)initViews{
    BOOL naviHidden = [self navigationBarHidden];
    if (!naviHidden && self.navBar != nil)
    {
        [self.navBar customNaviLeftButton:self action:@selector(onThumbsButtonClicked:) image:LOAD_IMAGE_USE_CACHE_INBUNDLE(HomePageBundle, @"actionbar_btn_picture.png")];
        [self.navBar customNaviRightButton:self action:@selector(snapStillImage:) image:LOAD_IMAGE_USE_CACHE_INBUNDLE(HomePageBundle,@"actionbar_btn_takepicture.png")];
        
        UIImage* rulerImg = LOAD_IMAGE_USE_CACHE_INBUNDLE(HomePageBundle, @"actionbar_btn_ruler_h.png");
        UIButton* rulerBtn = [UIButton buttonWithType:UIButtonTypeCustom];
        [rulerBtn setImage:rulerImg forState:UIControlStateNormal];
        [rulerBtn setTitleColor:[UIColor blackColor] forState:UIControlStateNormal];
        rulerBtn.frame = CGRectMake(self.navBar.backButton.right + 15, kNaviOrigenY, rulerImg.size.width, 44);
        ///rulerBtn.centerY = navigationBar.centerY;
        [rulerBtn addTarget:self action:@selector(onRulerButtonClicked:) forControlEvents:UIControlEventTouchUpInside];
        [self.navBar addSubview:rulerBtn];
    }
    
    
    _previewView = [[SDCameraPreviewView alloc] initWithFrame:CGRectMake(0, kNavBarHeight, kScreenWidth, kScreenHeight - kNavBarHeight)];
    [self.view addSubview:_previewView];
}

-(void)onThumbsButtonClicked:(id)sender{
    
    SDTabBarController* tabBarController = [SDWindowManager defaultManager].getTabBarController;
    [tabBarController.myTabBar selectTabAtIndex:0];
    tabBarController.selectedIndex = 0;
}

-(void)onRulerButtonClicked:(id)sender{
    __showRuler = !__showRuler;
    if (__showRuler) {
        [self addRulerViewToOverLayeView];
    }
    else{
        [self removeRulerViewFromOverLayView];
    }
}

- (void)onPresentStorePhotoView:(UIImage*)image
{
    SDStorePhotoViewController* storePhotoViewCtrl = [[SDStorePhotoViewController alloc] init];
    storePhotoViewCtrl.photoImage = image;
    [self presentViewController:storePhotoViewCtrl animated:YES completion:nil];
}


-(void)addRulerViewToOverLayeView{
    [self removeRulerViewFromOverLayView];
    
    if (_rulerImageView == nil) {
        UIImage* rulerImg = LOAD_IMAGE_USE_CACHE_INBUNDLE(HomePageBundle, @"takepicture_ruler.png");
        _rulerImageView = [[UIImageView alloc] initWithFrame:CGRectMake(0, self.view.height - 100, rulerImg.size.width, rulerImg.size.height)];
        _rulerImageView.image = rulerImg;
    }
    
    [self.view addSubview:_rulerImageView];
    _rulerImageView.left = (self.view.width - _rulerImageView.width)/2;
}

-(void)removeRulerViewFromOverLayView{
    if (_rulerImageView != nil) {
        [_rulerImageView removeFromSuperview];
        _rulerImageView = nil;
    }
}

- (void)setupAVCapture
{
    // Create the AVCaptureSession.
    self.session = [[AVCaptureSession alloc] init];
    [self.session setSessionPreset:AVCaptureSessionPresetPhoto];
    
    
    // Setup the preview view.
    self.previewView.session = self.session;
    
    // Communicate with the session and other session objects on this queue.
    self.sessionQueue = dispatch_queue_create( "session queue", DISPATCH_QUEUE_SERIAL );
    
    self.setupResult = AVCamSetupResultSuccess;
    
    // Check video authorization status. Video access is required and audio access is optional.
    // If audio access is denied, audio is not recorded during movie recording.
    switch ( [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo] )
    {
        case AVAuthorizationStatusAuthorized:
        {
            // The user has previously granted access to the camera.
            break;
        }
        case AVAuthorizationStatusNotDetermined:
        {
            // The user has not yet been presented with the option to grant video access.
            // We suspend the session queue to delay session setup until the access request has completed to avoid
            // asking the user for audio access if video access is denied.
            // Note that audio access will be implicitly requested when we create an AVCaptureDeviceInput for audio during session setup.
            dispatch_suspend( self.sessionQueue );
            [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^( BOOL granted ) {
                if ( ! granted ) {
                    self.setupResult = AVCamSetupResultCameraNotAuthorized;
                }
                dispatch_resume( self.sessionQueue );
            }];
            break;
        }
        default:
        {
            // The user has previously denied access.
            self.setupResult = AVCamSetupResultCameraNotAuthorized;
            break;
        }
    }
    
    // Setup the capture session.
    // In general it is not safe to mutate an AVCaptureSession or any of its inputs, outputs, or connections from multiple threads at the same time.
    // Why not do all of this on the main queue?
    // Because -[AVCaptureSession startRunning] is a blocking call which can take a long time. We dispatch session setup to the sessionQueue
    // so that the main queue isn't blocked, which keeps the UI responsive.
    dispatch_async( self.sessionQueue, ^{
        if ( self.setupResult != AVCamSetupResultSuccess ) {
            return;
        }
        
//        self.backgroundRecordingID = UIBackgroundTaskInvalid;
        NSError *error = nil;
        
        AVCaptureDevice *videoDevice = [SDCameraViewController deviceWithMediaType:AVMediaTypeVideo preferringPosition:AVCaptureDevicePositionBack];
        AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
        
        if ( ! videoDeviceInput ) {
            NSLog( @"Could not create video device input: %@", error );
        }
        
        [self.session beginConfiguration];
        
        if ( [self.session canAddInput:videoDeviceInput] ) {
            [self.session addInput:videoDeviceInput];
            self.videoDeviceInput = videoDeviceInput;
            
            dispatch_async( dispatch_get_main_queue(), ^{
                // Why are we dispatching this to the main queue?
                // Because AVCaptureVideoPreviewLayer is the backing layer for AAPLPreviewView and UIView
                // can only be manipulated on the main thread.
                // Note: As an exception to the above rule, it is not necessary to serialize video orientation changes
                // on the AVCaptureVideoPreviewLayer’s connection with other session manipulation.
                
                // Use the status bar orientation as the initial video orientation. Subsequent orientation changes are handled by
                // -[viewWillTransitionToSize:withTransitionCoordinator:].
                UIInterfaceOrientation statusBarOrientation = [UIApplication sharedApplication].statusBarOrientation;
                AVCaptureVideoOrientation initialVideoOrientation = AVCaptureVideoOrientationPortrait;
                if ( statusBarOrientation != UIInterfaceOrientationUnknown ) {
                    initialVideoOrientation = (AVCaptureVideoOrientation)statusBarOrientation;
                }
                
                AVCaptureVideoPreviewLayer *previewLayer = (AVCaptureVideoPreviewLayer *)self.previewView.layer;
                [previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
                [previewLayer setAffineTransform:CGAffineTransformMakeScale(kPreviewScaleFactory, kPreviewScaleFactory)];
                previewLayer.connection.videoOrientation = initialVideoOrientation;
            } );
        }
        else {
            NSLog( @"Could not add video device input to the session" );
            self.setupResult = AVCamSetupResultSessionConfigurationFailed;
        }
        
        AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
        AVCaptureDeviceInput *audioDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:&error];
        
        if ( ! audioDeviceInput ) {
            NSLog( @"Could not create audio device input: %@", error );
        }
        
        if ( [self.session canAddInput:audioDeviceInput] ) {
            [self.session addInput:audioDeviceInput];
        }
        else {
            NSLog( @"Could not add audio device input to the session" );
        }
        
        AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
        if ( [self.session canAddOutput:movieFileOutput] ) {
            [self.session addOutput:movieFileOutput];
            AVCaptureConnection *connection = [movieFileOutput connectionWithMediaType:AVMediaTypeVideo];
            if ( connection.isVideoStabilizationSupported ) {
                connection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto;
            }
            self.movieFileOutput = movieFileOutput;
        }
        else {
            NSLog( @"Could not add movie file output to the session" );
            self.setupResult = AVCamSetupResultSessionConfigurationFailed;
        }
        
        AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
        if ( [self.session canAddOutput:stillImageOutput] ) {
            stillImageOutput.outputSettings = @{AVVideoCodecKey : AVVideoCodecJPEG};
            [self.session addOutput:stillImageOutput];
            self.stillImageOutput = stillImageOutput;
        }
        else {
            NSLog( @"Could not add still image output to the session" );
            self.setupResult = AVCamSetupResultSessionConfigurationFailed;
        }
        
        [self.session commitConfiguration];
    } );

    
//    NSError *error = nil;
//    
//    AVCaptureSession *session = [AVCaptureSession new];
//    [session setSessionPreset:AVCaptureSessionPresetPhoto];
//    
//    // Select a video device, make an input
//    AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
//    AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
//    if ( [session canAddInput:deviceInput] )
//        [session addInput:deviceInput];
//    
//    // Make a still image output
//    _stillImageOutput = [AVCaptureStillImageOutput new];
//    if ( [session canAddOutput:_stillImageOutput] )
//        [session addOutput:_stillImageOutput];
//    
//    // Make a video data output
//    videoDataOutput = [AVCaptureVideoDataOutput new];
//    
//    // we want BGRA, both CoreGraphics and OpenGL work well with 'BGRA'
//    NSDictionary *rgbOutputSettings = [NSDictionary dictionaryWithObject:
//                                       [NSNumber numberWithInt:kCMPixelFormat_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
//    [videoDataOutput setVideoSettings:rgbOutputSettings];
//    [videoDataOutput setAlwaysDiscardsLateVideoFrames:YES]; // discard if the data output queue is blocked (as we process the still image)
//
//    
//    if ( [session canAddOutput:videoDataOutput] )
//        [session addOutput:videoDataOutput];
//    [[videoDataOutput connectionWithMediaType:AVMediaTypeVideo] setEnabled:NO];
//    
//    effectiveScale = kPreviewScaleFactory;
//    dispatch_async( dispatch_get_main_queue(), ^{
//        UIInterfaceOrientation statusBarOrientation = [UIApplication sharedApplication].statusBarOrientation;
//        AVCaptureVideoOrientation initialVideoOrientation = AVCaptureVideoOrientationPortrait;
//        if ( statusBarOrientation != UIInterfaceOrientationUnknown ) {
//            initialVideoOrientation = (AVCaptureVideoOrientation)statusBarOrientation;
//        }
//        
//        AVCaptureVideoPreviewLayer *previewLayer = (AVCaptureVideoPreviewLayer *)self.previewView.layer;
//        [previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
//        [previewLayer setAffineTransform:CGAffineTransformMakeScale(kPreviewScaleFactory, kPreviewScaleFactory)];
//        previewLayer.connection.videoOrientation = initialVideoOrientation;
//    } );
//
//    [session startRunning];
//    
//
//    if (error) {
//        UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:[NSString stringWithFormat:@"Failed with error %d", (int)[error code]]
//                                                            message:[error localizedDescription]
//                                                           delegate:nil 
//                                                  cancelButtonTitle:@"Dismiss" 
//                                                  otherButtonTitles:nil];
//        [alertView show];
//    }
}

- (AVCaptureVideoOrientation)avOrientationForDeviceOrientation:(UIDeviceOrientation)deviceOrientation
{
    AVCaptureVideoOrientation result = deviceOrientation;
    if ( deviceOrientation == UIDeviceOrientationLandscapeLeft )
        result = AVCaptureVideoOrientationLandscapeRight;
    else if ( deviceOrientation == UIDeviceOrientationLandscapeRight )
        result = AVCaptureVideoOrientationLandscapeLeft;
    return result;
}


- (void)snapStillImage:(id)sender
{
    dispatch_async( self.sessionQueue, ^{
        AVCaptureConnection *connection = [self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo];
        AVCaptureVideoPreviewLayer *previewLayer = (AVCaptureVideoPreviewLayer *)self.previewView.layer;
        [connection setVideoScaleAndCropFactor:kPreviewScaleFactory];
        
        
        // Update the orientation on the still image output video connection before capturing.
        connection.videoOrientation = previewLayer.connection.videoOrientation;
        
        // Flash set to Auto for Still Capture.
        [SDCameraViewController setFlashMode:AVCaptureFlashModeAuto forDevice:self.videoDeviceInput.device];
        
        // Capture a still image.
        [self.stillImageOutput captureStillImageAsynchronouslyFromConnection:connection completionHandler:^( CMSampleBufferRef imageDataSampleBuffer, NSError *error ) {
            if ( imageDataSampleBuffer ) {
                // The sample buffer is not retained. Create image data before saving the still image to the photo library asynchronously.
                NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
                [PHPhotoLibrary requestAuthorization:^( PHAuthorizationStatus status ) {
                    if ( status == PHAuthorizationStatusAuthorized ) {
                        // To preserve the metadata, we create an asset from the JPEG NSData representation.
                        // Note that creating an asset from a UIImage discards the metadata.
                        // In iOS 9, we can use -[PHAssetCreationRequest addResourceWithType:data:options].
                        // In iOS 8, we save the image to a temporary file and use +[PHAssetChangeRequest creationRequestForAssetFromImageAtFileURL:].
                        UIImage* rulerImg = LOAD_IMAGE_USE_CACHE_INBUNDLE(HomePageBundle, @"takepicture_ruler.png");
                        CGSize rsz = rulerImg.size;
                        UIImage* image = [UIImage imageWithData:imageData];
                        
                        UIGraphicsBeginImageContextWithOptions(CGSizeMake(kScreenWidth, kScreenHeight-kNavBarHeight), NO, 0.0);
                        [image drawInRect:CGRectMake(0.0, 0.0, kScreenWidth, kScreenHeight-kNavBarHeight)];
                        [rulerImg drawInRect:CGRectMake((self.view.width - rulerImg.size.width)/2, kScreenHeight-kNavBarHeight - 100, rulerImg.size.width, rulerImg.size.height)];
                        UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
                        UIGraphicsEndImageContext();
                        
                        //                        //2448 3264
                        //                        CGSize sz = image.size;
                        //                        CGFloat h = sz.height - 500;
                        //                        rulerImg = [SDImage imageWithImage:rulerImg scaledToWidth:rulerImg.size.width*6];
                        //                        UIImage* newImage = [image drawImage:rulerImg inRect:CGRectMake((image.size.width-rulerImg.size.width)/2, h,  rulerImg.size.width, rulerImg.size.height)];
                        
                        newImage = [newImage fixOrientation];
                        [self onPresentStorePhotoView:newImage];
                    }
                }];
            }
            else {
                NSLog( @"Could not capture still image: %@", error );
            }
        }];
    } );
}



+ (void)setFlashMode:(AVCaptureFlashMode)flashMode forDevice:(AVCaptureDevice *)device
{
    if ( device.hasFlash && [device isFlashModeSupported:flashMode] ) {
        NSError *error = nil;
        if ( [device lockForConfiguration:&error] ) {
            device.flashMode = flashMode;
            [device unlockForConfiguration];
        }
        else {
            NSLog( @"Could not lock device for configuration: %@", error );
        }
    }
}

+ (AVCaptureDevice *)deviceWithMediaType:(NSString *)mediaType preferringPosition:(AVCaptureDevicePosition)position
{
    NSArray *devices = [AVCaptureDevice devicesWithMediaType:mediaType];
    AVCaptureDevice *captureDevice = devices.firstObject;
    
    for ( AVCaptureDevice *device in devices ) {
        if ( device.position == position ) {
            captureDevice = device;
            break;
        }
    }
    
    return captureDevice;
}

@end
