//
//  QRCaptureView.m
//  Pods-Runner
//
//  Created by cdx on 2019/10/28.
//

#import "QRCaptureView.h"
#import <AVFoundation/AVFoundation.h>
#import <CoreMotion/CoreMotion.h>

@interface QRCaptureView () <AVCaptureMetadataOutputObjectsDelegate, FlutterPlugin>

@property(nonatomic, strong) AVCaptureSession *session;
@property(nonatomic, strong) FlutterMethodChannel *channel;
@property(nonatomic, weak) AVCaptureVideoPreviewLayer *captureLayer;
@property(readonly, nonatomic) AVCapturePhotoOutput *capturePhotoOutput API_AVAILABLE(ios(10));

@end

@interface FLTSavePhotoDelegate : NSObject <AVCapturePhotoCaptureDelegate>
@property(readonly, nonatomic) NSString *path;
@property(readonly, nonatomic) FlutterResult result;
@property(readonly, nonatomic) AVCaptureDevicePosition cameraPosition;
@end

@implementation FLTSavePhotoDelegate {
    /// Used to keep the delegate alive until didFinishProcessingPhotoSampleBuffer.
    FLTSavePhotoDelegate *selfReference;
}

- initWithPath:(NSString *)path
        result:(FlutterResult)result
{
    self = [super init];
    NSAssert(self, @"super init cannot be nil");
    _path = path;
    _result = result;
    
    _cameraPosition = AVCaptureDevicePositionFront;
    selfReference = self;
    return self;
}

- (void)captureOutput:(AVCapturePhotoOutput *)output
didFinishProcessingPhotoSampleBuffer:(CMSampleBufferRef)photoSampleBuffer
previewPhotoSampleBuffer:(CMSampleBufferRef)previewPhotoSampleBuffer
     resolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings
      bracketSettings:(AVCaptureBracketedStillImageSettings *)bracketSettings
                error:(NSError *)error API_AVAILABLE(ios(10)) {
    selfReference = nil;
    if (error) {
        //_result(getFlutterError(error));
        return;
    }
    NSData *data = [AVCapturePhotoOutput
                    JPEGPhotoDataRepresentationForJPEGSampleBuffer:photoSampleBuffer
                    previewPhotoSampleBuffer:previewPhotoSampleBuffer];
    UIImage *image = [UIImage imageWithCGImage:[UIImage imageWithData:data].CGImage
                                         scale:1.0
                                   orientation:UIImageOrientationRight];
    // TODO(sigurdm): Consider writing file asynchronously.
    bool success = [UIImageJPEGRepresentation(image, 1.0) writeToFile:_path atomically:YES];
    if (!success) {
        _result([FlutterError errorWithCode:@"IOError" message:@"Unable to write file" details:nil]);
        return;
    }
    _result(nil);
}

- (UIImageOrientation)getImageRotation {
    
    // If none of the above, then the device is likely facing straight down or straight up -- just
    // pick something arbitrary
    // TODO: Maybe use the UIInterfaceOrientation if in these scenarios
    return UIImageOrientationLeft;
}
@end


@implementation QRCaptureView

- (AVCaptureSession *)session {
    if (!_session) {
        _session = [[AVCaptureSession alloc] init];
    }
    return _session;
}

- (instancetype)initWithFrame:(CGRect)frame viewIdentifier:(int64_t)viewId arguments:(id _Nullable)args registrar:(NSObject<FlutterPluginRegistrar>*)registrar {
    if (self = [super initWithFrame:frame]) {
        NSString *name = [NSString stringWithFormat:@"plugins/qr_capture/method_%lld", viewId];
        FlutterMethodChannel *channel = [FlutterMethodChannel
                                         methodChannelWithName:name
                                         binaryMessenger:registrar.messenger];
        self.channel = channel;
        [registrar addMethodCallDelegate:self channel:channel];
        
        AVAuthorizationStatus status = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
        if(status == AVAuthorizationStatusAuthorized || status == AVAuthorizationStatusNotDetermined) {
            
            AVCaptureVideoPreviewLayer *layer = [AVCaptureVideoPreviewLayer layerWithSession:self.session];
            self.captureLayer = layer;
            
            layer.backgroundColor = [UIColor blackColor].CGColor;
            [self.layer addSublayer:layer];
            layer.videoGravity = AVLayerVideoGravityResizeAspectFill;
            
            AVCaptureDevice *device;
            if (@available(iOS 10.0, *)) {
                device  = [AVCaptureDevice defaultDeviceWithDeviceType: AVCaptureDeviceTypeBuiltInWideAngleCamera
                                                             mediaType: AVMediaTypeVideo
                                                              position: AVCaptureDevicePositionFront];
            } else {
                // Fallback on earlier versions
                device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
            }
            AVCaptureDeviceInput *input = [[AVCaptureDeviceInput alloc] initWithDevice:device error:nil];
            AVCaptureMetadataOutput *output = [[AVCaptureMetadataOutput alloc] init];
            if (@available(iOS 10.0, *)) {
                _capturePhotoOutput = [AVCapturePhotoOutput new];
                [_capturePhotoOutput setHighResolutionCaptureEnabled:YES];
                //[_captureSession addOutput:_capturePhotoOutput];
            }
            [self.session addInput:input];
            [self.session addOutput:output];
            [self.session addOutput:_capturePhotoOutput];
            self.session.sessionPreset = AVCaptureSessionPresetHigh;
            
            output.metadataObjectTypes = output.availableMetadataObjectTypes;
            [output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
            [output setMetadataObjectTypes:@[AVMetadataObjectTypeFace]];
            
            dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
                [self.session startRunning];
            });
            
        } else {
            
            UIAlertView *alert = [[UIAlertView alloc] initWithTitle:@"Tips" message:@"Authorization is required to use the camera, please check your permission settings: Settings> Privacy> Camera" delegate:nil cancelButtonTitle:nil otherButtonTitles:@"Ok", nil];
            [alert show];
        }
    }
    return self;
}

- (void)captureToFile:(NSString *)path result:(FlutterResult)result API_AVAILABLE(ios(10)) {
    AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings];
    
    [_capturePhotoOutput
     capturePhotoWithSettings:settings
     delegate:[[FLTSavePhotoDelegate alloc] initWithPath:path
                                                  result:result]];
}

- (void)layoutSubviews {
    [super layoutSubviews];
    self.captureLayer.frame = self.bounds;
}

- (void)handleMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result {
    if ([call.method isEqualToString:@"pause"]) {
        [self pause];
    } else if ([call.method isEqualToString:@"resume"]) {
        [self resume];
    } else if ([call.method isEqualToString:@"takePicture"]) {
        if (@available(iOS 10.0, *)) {
            [self captureToFile:call.arguments[@"path"] result:result];
        } else {
            result(FlutterMethodNotImplemented);
        }
    }else if ([call.method isEqualToString:@"setTorchMode"]) {
        AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
        if (!device.hasTorch) {
            return;
        }
        NSNumber *isOn = call.arguments;
        [device lockForConfiguration:nil];
        if (isOn.boolValue) {
            [device setTorchMode:AVCaptureTorchModeOn];
        } else {
            [device setTorchMode:AVCaptureTorchModeOff];
        }
        [device unlockForConfiguration];
    }
}

+ (void)registerWithRegistrar:(nonnull NSObject<FlutterPluginRegistrar> *)registrar {}


- (void)resume {
    [self.session startRunning];
}

- (void)pause {
    [self.session stopRunning];
}

#pragma mark - AVCaptureMetadataOutputObjectsDelegate
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection{
    if (metadataObjects.count>0) {
        AVMetadataFaceObject *metadataObject = metadataObjects[0];
        //NSString *value = metadataObject.stringValue;
        //NSLog(@"检测到人脸:%f,%f",metadataObject.bounds.origin.x,metadataObject.bounds.origin.y);
        NSDictionary *dictionary = @{@"x":[NSNumber numberWithFloat:metadataObject.bounds.origin.x],
                                     @"y":[NSNumber numberWithFloat:metadataObject.bounds.origin.y],
                                     @"w":[NSNumber numberWithFloat:metadataObject.bounds.size.width],
                                     @"h":[NSNumber numberWithFloat:metadataObject.bounds.size.height],
        };
        
        
        if (self.channel) {
            [self.channel invokeMethod:@"onCaptured" arguments:dictionary];
        }
        //NSLog(@"检测到人脸:%f",metadataObject.bounds.size.width);
    }
}

- (void)dealloc {
    [self.session stopRunning];
}

@end
