//
//  YLCameraCaptureView.swift
//  MyTest
//
//  Created by 徐国梁 on 2020/9/11.
//  Copyright © 2020 edz. All rights reserved.
//


import UIKit
import AVFoundation
import CoreMedia
import CoreVideo
import CoreImage
import ImageIO
import GLKit
import CoreGraphics

typealias CompletionHandler = (_ image: UIImage, _ borderDetectFeature: CIRectangleFeature?)->()

class YLCameraCaptureView: UIView, AVCaptureVideoDataOutputSampleBufferDelegate {
    /// 开启边缘检测
    var enableBorderDetection: Bool = false
    /// 是否开启手电筒
    var enableTorch: Bool = false
    /// 是否开启闪关灯
    var enableFlash: Bool = false
    private var coreImageContext: CIContext!
    private var renderBuffer: GLuint = GLuint()
    private var glkView: GLKView!
    private var isStopped: Bool = false
    private var imageDedectionConfidence: Float!
    private var borderDetectTimeKeeper: Timer!
    private var borderDetectFrame: Bool = false
    private var borderDetectLastRectangleFeature: CIRectangleFeature!
    private var isCapturing: Bool = false
    private var rectOverlay: CAShapeLayer!//边缘识别遮盖
    private var captureSession: AVCaptureSession!
    private var captureDevice: AVCaptureDevice!
    private var context: EAGLContext!
    private var stillImageOutput: AVCaptureStillImageOutput!
    private var forceStop: Bool = false
    // 高精度边缘识别器
    private let highAccuracyRectangleDetector = CIDetector(ofType: CIDetectorTypeRectangle, context: nil, options: [CIDetectorAccuracy: CIDetectorAccuracyHigh])
    // 低精度边缘识别器
    private let rectangleDetetor = CIDetector(ofType: CIDetectorTypeRectangle, context: nil, options: [CIDetectorAccuracy: CIDetectorAccuracyLow, CIDetectorTracking: true])
    
    override init(frame: CGRect) {
        super.init(frame: frame)
        
        // 注册进入后台通知
        NotificationCenter.default.addObserver(self, selector: #selector(backgroundMode), name: NSNotification.Name.UIApplicationWillResignActive, object: nil)
        // 注册进入前台通知
        NotificationCenter.default.addObserver(self, selector: #selector(foregroundMode), name: NSNotification.Name.UIApplicationDidBecomeActive, object: nil)
    }
    
    required init?(coder: NSCoder) {
        fatalError("init(coder:) has not been implemented")
    }
    
    deinit {
        NotificationCenter.default.removeObserver(self)
    }
    
    @objc private func backgroundMode() {
        forceStop = true
    }
    
    @objc private func foregroundMode() {
        forceStop = false
    }
    
    //MARK: - egine
    /// 开始捕获图像
    func start() {
        isStopped = false
        //_shouldRemoveBorderLayer = YES;
        captureSession.startRunning()
        
        if borderDetectTimeKeeper != nil {
//            MyLog("-------")
            borderDetectTimeKeeper.invalidate()
        }
        // 每隔0.85监测
        borderDetectTimeKeeper = Timer.scheduledTimer(timeInterval: 0.65, target: self, selector: #selector(enableBorderDetectFrame), userInfo: nil, repeats: true)
        borderDetectTimeKeeper.fire()
        hideGLKView(hidden: false, completion: nil)
    }
    
    // 停止捕获图像
    func stop() {
        isStopped = true
        captureSession.stopRunning()
        
        borderDetectTimeKeeper.invalidate()
        hideGLKView(hidden: true, completion: nil)
    }
    
    // 开启边缘识别
    @objc private func enableBorderDetectFrame() {
//        MyLog("-------")
        borderDetectFrame = true
    }
    
    // 设置手电筒
    private func setEnableTorch(enableTorch: Bool) {
        self.enableTorch = enableTorch;
        
        let device = captureDevice
        if device!.hasTorch && device!.hasFlash {
            do {
                try device?.lockForConfiguration()
                if enableTorch {
                    try device?.setTorchModeOn(level: 1)
                }else {
                    try device?.setTorchModeOn(level: 0)
                }
            } catch {
                //print(error)
            }
            device?.unlockForConfiguration()
        }
    }
    // 设置闪光灯
    private func setEnableFlash(enableFlash: Bool) {
        self.enableFlash = enableFlash;
        let device = captureDevice
        
        if device!.hasTorch && device!.hasFlash {
            do {
                try device?.lockForConfiguration()
                if enableFlash {
                    try device?.setTorchModeOn(level: 1)
                }else {
                    try device?.setTorchModeOn(level: 0)
                }
            } catch {
                print(error)
            }
            device?.unlockForConfiguration()
        }
    }
    
    
    private func createGLKView() {
        if (context != nil) { return }
        
        context = EAGLContext(api: .openGLES2)
        let view = GLKView(frame: bounds)
        view.autoresizingMask = [.flexibleWidth, .flexibleHeight]
        view.context = context;
        view.contentScaleFactor = 1.0
        view.drawableDepthFormat = .format24
        insertSubview(view, at: 0)
        self.glkView = view
        glGenRenderbuffers(1, &renderBuffer);
        glBindRenderbuffer(GLenum(GL_RENDERBUFFER), renderBuffer);
        
        coreImageContext = CIContext(eaglContext: context)
        EAGLContext.setCurrent(context)
    }
    
    func setupCameraView() {
        createGLKView()
        
        let possibleDevices = AVCaptureDevice.devices(for: .video)
        let device = possibleDevices.first
        if (device == nil) { return }
        
        imageDedectionConfidence = 0.0;
        
        let session =  AVCaptureSession()
        captureSession = session;
        session.beginConfiguration()
        captureDevice = device;
        
        do {
            let input = try AVCaptureDeviceInput(device: device!)
            session.sessionPreset = .photo
            session.addInput(input)
            
            let dataOutput =  AVCaptureVideoDataOutput()
            dataOutput.alwaysDiscardsLateVideoFrames = true
            dataOutput.videoSettings = [String(kCVPixelBufferPixelFormatTypeKey): kCVPixelFormatType_32BGRA]
            dataOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main)
            session.addOutput(dataOutput)
            
            stillImageOutput =  AVCaptureStillImageOutput()
            session.addOutput(stillImageOutput)
            
            let connection = dataOutput.connections.first
            connection?.videoOrientation = .portrait
            
            if device!.isFlashAvailable {
                try device?.lockForConfiguration()
                device?.flashMode = .off
                device?.unlockForConfiguration()
                
                if device!.isFocusModeSupported(.autoFocus) {
                    try device?.lockForConfiguration()
                    device?.focusMode = .autoFocus
                    device?.unlockForConfiguration()
                }
            }
            session.commitConfiguration()
        } catch {}
    }
    
    // 聚焦动作
    func focusAtPoint(point: CGPoint, completionHandler: @escaping (()->())) {
        if captureDevice == nil { return }
        let device = captureDevice
        var pointOfInterest = CGPoint.zero;
        let frameSize = bounds.size;
        pointOfInterest = CGPoint(x: point.y / frameSize.height, y: 1 - (point.x / frameSize.width))
        
        if device!.isFocusPointOfInterestSupported && device!.isFocusModeSupported(.autoFocus) {
            do {
                try device?.lockForConfiguration()
                
                if device!.isFocusModeSupported(.continuousAutoFocus) {
                    device?.focusMode = .continuousAutoFocus
                    device?.focusPointOfInterest = pointOfInterest
                }
                
                if device!.isExposurePointOfInterestSupported && device!.isExposureModeSupported(.continuousAutoExposure) {
                    device?.exposurePointOfInterest = pointOfInterest
                    device?.exposureMode = .continuousAutoExposure
                    completionHandler()
                }
                
                device?.unlockForConfiguration()
            } catch  {
                
            }
        }else {
            completionHandler()
        }
    }
    
    // 隐藏glkview
    private func hideGLKView(hidden:Bool, completion: (()->())?) {
        UIView.animate(withDuration: 0.1, animations: {
            self.glkView.alpha = hidden ? 0.0 : 1.0
        }) { (isFinish) in
            if completion != nil {
                completion!()
            }
        }
    }
    
    // 选取feagure rectangles中最大的矩形
    private func biggestRectangleInRectangles(rectangles: [CIFeature]) -> CIRectangleFeature? {
        if rectangles.count == 0 { return nil }
        
        var halfPerimiterValue: Float = 0
        var biggestRectangle: CIRectangleFeature = rectangles.first as! CIRectangleFeature
        
        for rectangle in rectangles {
            let rect: CIRectangleFeature = rectangle as! CIRectangleFeature
            let p1 = rect.topLeft
            let p2 = rect.topRight
            let width = hypotf(Float(p1.x - p2.x), Float(p1.y - p2.y))
            
            let p3 = rect.topLeft
            let p4 = rect.bottomLeft
            let height = hypotf(Float(p3.x - p4.x), Float(p3.y - p4.y))
            
            let currentHalfPerimiterValue = height + width
            
            if (halfPerimiterValue < currentHalfPerimiterValue)
            {
                halfPerimiterValue = currentHalfPerimiterValue
                biggestRectangle = rect
            }
        }
        
        return biggestRectangle;
    }
    
    //MARK: - 滤镜
    private func filteredImageUsingEnhanceFilterOnImage(image: CIImage) -> CIImage {
        return (CIFilter(name: "CIColorControls", withInputParameters: [kCIInputImageKey: image, "inputBrightness": 0, "inputContrast": 1.14, "inputSaturation": 0])?.outputImage)!
    }
    
    private func filteredImageUsingContrastFilterOnImage(image: CIImage) -> CIImage {
        return (CIFilter(name: "CIColorControls", withInputParameters: ["inputContrast": 1.1, kCIInputImageKey:image])?.outputImage)!
    }
    
    /// 将任意四边形转换成长方形
    func correctPerspectiveForImage(image: CIImage, rectangleFeature: CIRectangleFeature) -> CIImage {
        let rectangleCoordinates = NSMutableDictionary()
        rectangleCoordinates["inputTopLeft"] = CIVector(cgPoint: rectangleFeature.topLeft)
        rectangleCoordinates["inputTopRight"] = CIVector(cgPoint: rectangleFeature.topRight)
        rectangleCoordinates["inputBottomLeft"] = CIVector(cgPoint: rectangleFeature.bottomLeft)
        rectangleCoordinates["inputBottomRight"] = CIVector(cgPoint: rectangleFeature.bottomRight)
        return image.applyingFilter("CIPerspectiveCorrection", parameters: rectangleCoordinates as! [String : Any])
    }
    
    //MARK: - AVCaptureVideoDataOutputSampleBufferDelegate
    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        if (forceStop || isStopped || isCapturing || !CMSampleBufferIsValid(sampleBuffer)) { return
        }
        
        let pixelBuffer: CVPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)!
        var image = CIImage(cvPixelBuffer: pixelBuffer)
        image = filteredImageUsingContrastFilterOnImage(image: image)
        
        //MyLog("image: ---------\(image)---------")
        
        if (enableBorderDetection) {//开启了边缘检测
//            MyLog("--------")
            if (borderDetectFrame) {//开启了边缘识别
                // 用高精度边缘识别器 识别特征
                let features: [CIFeature] = (highAccuracyRectangleDetector?.features(in: image))!
                // 选取特征列表中最大的矩形
                borderDetectLastRectangleFeature = biggestRectangleInRectangles(rectangles: features)
                //MyLog("--------")
                borderDetectFrame = false
            }
            
            if (borderDetectLastRectangleFeature != nil) {
                //MyLog("--------")
                imageDedectionConfidence += 0.5;
                // draw border layer
                if (rectangleDetectionConfidenceHighEnough(confidence: imageDedectionConfidence))
                {
                    //MyLog("--------")
                    drawBorderDetectRectWithImageRect(imageRect:image.extent, topLeft: borderDetectLastRectangleFeature.topLeft, topRight: borderDetectLastRectangleFeature.topRight, bottomLeft: borderDetectLastRectangleFeature.bottomLeft, bottomRight: borderDetectLastRectangleFeature.bottomRight)
                }
                
            }else {
                imageDedectionConfidence = 0.0
                if (rectOverlay != nil) {
                    //MyLog("--------")
                    rectOverlay.path = nil;
                }
            }
        }
        
        if (context != nil && coreImageContext != nil) {
            // 将捕获到的图片绘制进_coreImageContext
            coreImageContext.draw(image, in: bounds, from: image.extent)
            context.presentRenderbuffer(Int(GL_RENDERBUFFER))
            
            glkView.setNeedsDisplay()
        }
    }
    
    // 绘制边缘检测图层
    func drawBorderDetectRectWithImageRect(imageRect: CGRect, topLeft: CGPoint,  topRight:CGPoint, bottomLeft: CGPoint, bottomRight: CGPoint) {
        
        if (rectOverlay == nil) {
            rectOverlay = CAShapeLayer(layer: layer)
            rectOverlay.fillRule = kCAFillRuleEvenOdd
            rectOverlay.fillColor = kColor(73, 130, 180, 0.4).cgColor
            rectOverlay.strokeColor = UIColor.white.cgColor
            rectOverlay.lineWidth = 5.0
        }
        if (rectOverlay.superlayer == nil) {
            layer.masksToBounds = true
            layer.addSublayer(rectOverlay)
        }
        
        // 将图像空间的坐标系转换成uikit坐标系
        let featureRect = transfromRealRectWithImageRect(imageRect: imageRect, topLeft: topLeft, topRight: topRight, bottomLeft: bottomLeft, bottomRight: bottomRight)
        
        // 边缘识别路径
        let path = UIBezierPath()
        path.move(to: featureRect.topLeft)
        path.addLine(to: featureRect.topRight)
        path.addLine(to: featureRect.bottomRight)
        path.addLine(to: featureRect.bottomLeft)
        path.close()
        // 背景遮罩路径
        let rectPath  = UIBezierPath(rect: CGRect(x: -5, y: -5, width: frame.size.width + 10, height: frame.size.height + 10))
        rectPath.usesEvenOddFillRule = true
        rectPath.append(path)
        rectOverlay.path = rectPath.cgPath
    }
    
    /// 拍照动作
    func captureImageWithCompletionHandler(completionHandler: @escaping CompletionHandler) {
        if (isCapturing) { return }
        
        isCapturing = false
        //关闭闪光灯
        enableFlash = false
        
        var videoConnection: AVCaptureConnection!
        for connection in stillImageOutput.connections {
            for port in connection.inputPorts {
                if (port.mediaType == .video) {
                    videoConnection = connection
                    break;
                }
            }
            if (videoConnection != nil) { break }
        }
        
        //__weak typeof(self) weakSelf = self;
        stillImageOutput.captureStillImageAsynchronously(from: videoConnection) { [weak self] (imageSampleBuffer, error) in
            //__strong typeof(self) strongSelf = weakSelf;
            
            let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(imageSampleBuffer!)
            if (self?.enableBorderDetection)! {
                var enhancedImage: CIImage = CIImage(data: imageData!)!
                enhancedImage = self?.filteredImageUsingContrastFilterOnImage(image: enhancedImage) as! CIImage
                // 判断边缘识别度阈值, 再对拍照后的进行边缘识别
                var rectangleFeature: CIRectangleFeature!
                if ((self?.rectangleDetectionConfidenceHighEnough(confidence: (self?.imageDedectionConfidence)!)) != nil) {
                   // 获取边缘识别最大矩形
                    rectangleFeature = self?.biggestRectangleInRectangles(rectangles: (self?.highAccuracyRectangleDetector!.features(in: enhancedImage))! as! [CIRectangleFeature])!
            
                   if (rectangleFeature != nil) {
                    enhancedImage = self?.correctPerspectiveForImage(image: enhancedImage, rectangleFeature: rectangleFeature) as! CIImage
                    }
                 }
            
                 // 获取拍照图片
                UIGraphicsBeginImageContext(CGSize(width:enhancedImage.extent.size.height,height: enhancedImage.extent.size.width));
                UIImage(ciImage: enhancedImage, scale: 1.0, orientation: .right).draw(in: CGRect(x:0, y:0, width: enhancedImage.extent.size.height, height: enhancedImage.extent.size.width))
                
                let image = UIGraphicsGetImageFromCurrentImageContext();
                UIGraphicsEndImageContext();
            
                completionHandler(image!, rectangleFeature);
            }else {//未开启边缘识别，直接返回图片
                completionHandler(UIImage(data: imageData!)!, nil)
            }
            
            self?.isCapturing = false
            
            if (self?.rectOverlay != nil) {
                // 移除识别边框
                self?.rectOverlay.path = nil
            }
            // 隐藏照相机视图
            //          [strongSelf hideGLKView:YES completion:^
            //           {
            //
            //               [strongSelf hideGLKView:NO completion:^
            //                {
            //                    [strongSelf hideGLKView:YES completion:nil];
            //                }];
            //           }];
        }
    }
    
    
    // 添加边缘识别遮盖
    func drawHighlightOverlayForPoints(image: CIImage, topLeft: CGPoint, topRight:CGPoint, bottomLeft: CGPoint, bottomRight: CGPoint) -> CIImage {
        // overlay
        var overlay =  CIImage(color: CIColor(red: 73, green: 130, blue: 180, alpha: 0.5))
        overlay = overlay.cropped(to: image.extent)
        
        overlay = overlay.applyingFilter("CIPerspectiveTransformWithExtent", parameters: ["inputExtent": CIVector(cgRect: image.extent), "inputTopLeft":CIVector(cgPoint: topLeft), "inputTopRight": CIVector(cgPoint: topRight),
                                                                                          "inputBottomLeft": CIVector(cgPoint: bottomLeft), "inputBottomRight":CIVector(cgPoint:bottomRight)])
        
        return overlay.composited(over: image)
    }
    
    /// 坐标系转换
    private func transfromRealRectWithImageRect(imageRect: CGRect, topLeft: CGPoint, topRight: CGPoint, bottomLeft:CGPoint, bottomRight: CGPoint) -> TransformCIFeatureRect {
        let previewRect = self.frame;
        
        return MADCGTransfromHelper.transfromRealCIRectInPreviewRect(previewRect, imageRect, topLeft, topRight, bottomLeft, bottomRight)
    }
    
    func rectangleDetectionConfidenceHighEnough(confidence: Float) -> Bool {
        return (confidence > 1.0)
    }
}


struct CIFeatureRect {
    var topLeft: CGPoint
    var topRight: CGPoint
    var bottomRight: CGPoint
    var bottomLeft: CGPoint
}
typealias TransformCIFeatureRect = CIFeatureRect

class MADCGTransfromHelper: NSObject {
    class func transfromRealCIRectInPreviewRect(_ previewRect: CGRect, _ imageRect:CGRect, _ topLeft:CGPoint, _ topRight:CGPoint, _ bottomLeft:CGPoint, _ bottomRight:CGPoint) -> TransformCIFeatureRect {
        return MADCGTransfromHelper.md_transfromRealRectInPreviewRect(previewRect, imageRect, false, topLeft, topRight, bottomLeft, bottomRight)
    }
    
    class func transfromRealCGRectInPreviewRect(_ previewRect: CGRect, _ imageRect: CGRect, _ topLeft: CGPoint, _ topRight: CGPoint, _ bottomLeft: CGPoint, _ bottomRight: CGPoint) -> TransformCIFeatureRect {
        return MADCGTransfromHelper.md_transfromRealRectInPreviewRect(previewRect, imageRect, true, topLeft, topRight, bottomLeft, bottomRight)
    }
    
    
    class func md_transfromRealRectInPreviewRect(_ previewRect: CGRect, _ imageRect: CGRect,  _ isUICoordinate: Bool, _ topLeft: CGPoint, _ topRight: CGPoint, _ bottomLeft: CGPoint, _ bottomRight: CGPoint) -> TransformCIFeatureRect {
        
        // find ratio between the video preview rect and the image rect; rectangle feature coordinates are relative to the CIImage
        let deltaX = previewRect.width/imageRect.width;
        let deltaY = previewRect.height/imageRect.height;
        
        // transform to UIKit coordinate system
        var transform = CGAffineTransform(translationX: 0, y: previewRect.height);
        if (!isUICoordinate) {
            transform = transform.scaledBy(x: 1, y: -1);
        }
        // apply preview to image scaling
        transform = transform.scaledBy(x: deltaX, y: deltaY);
        
        var featureRect = TransformCIFeatureRect(topLeft: .zero, topRight: .zero, bottomRight: .zero, bottomLeft: .zero)
        featureRect.topLeft = topLeft.applying(transform);
        featureRect.topRight = topRight.applying(transform);
        featureRect.bottomRight = bottomRight.applying(transform);
        featureRect.bottomLeft = bottomLeft.applying(transform);
        
        return featureRect;
    }
}

