//
//  CameraKitPhotoSession.swift
//  CameraKit
//
//  Created by Will on 06/07/2020.
//  Copyright © 2020 Will. All rights reserved.
//

import UIKit
import AVFoundation
import Photos

extension CameraKitSession.FlashMode {

    var captureFlashMode: AVCaptureDevice.FlashMode {
        switch self {
        case .off: return .off
        case .on: return .on
        case .auto: return .auto
        }
    }
}

@objc public class CameraKitPhotoSession: CameraKitSession, AVCapturePhotoCaptureDelegate, AVCaptureMetadataOutputObjectsDelegate, AVCaptureVideoDataOutputSampleBufferDelegate {

    fileprivate var orientation: Orientation = Orientation()

    @objc public enum CameraDetection: UInt {
        case none, faces
    }

    @objc public var cameraPosition = CameraPosition.back {
        didSet {
            do {
                let deviceInput = try CameraKitSession.captureDeviceInput(type: cameraPosition.deviceType)
                captureDeviceInput = deviceInput
            } catch let error {
                print(error.localizedDescription)
            }
        }
    }

    @objc public var cameraDetection = CameraDetection.none {
        didSet {
            if oldValue == cameraDetection { return }
            for output in session!.outputs where output is AVCaptureMetadataOutput {
                session?.removeOutput(output)
            }

            faceDetectionBoxes.forEach({ $0.removeFromSuperview() })
            faceDetectionBoxes = []

            if cameraDetection == .faces {
                let metadataOutput = AVCaptureMetadataOutput()
                session?.addOutput(metadataOutput)

                metadataOutput.setMetadataObjectsDelegate(self, queue: .main)
                if metadataOutput.availableMetadataObjectTypes.contains(.face) {
                    metadataOutput.metadataObjectTypes = [.face]
                }
            }
        }
    }

    @objc public var flashMode = CameraKitSession.FlashMode.off

    var captureDeviceInput: AVCaptureDeviceInput? {
        didSet {
            faceDetectionBoxes.forEach({ $0.removeFromSuperview() })
            faceDetectionBoxes = []

            if let oldValue = oldValue {
                session?.removeInput(oldValue)
            }

            if let captureDeviceInput = captureDeviceInput, let session = session, session.canAddInput(captureDeviceInput) {
                session.addInput(captureDeviceInput)
            }
        }
    }

    let photoOutput = AVCapturePhotoOutput()

    var faceDetectionBoxes: [UIView] = []

    @objc public init(position: CameraPosition = .back, detection: CameraDetection = .none) {
        super.init()

        defer {
            cameraPosition = position
            cameraDetection = detection
        }
        let output: AVCaptureVideoDataOutput = AVCaptureVideoDataOutput.init()
        output.setSampleBufferDelegate(self, queue: DispatchQueue.main)
        session?.sessionPreset = .high

        if let photoSession = session, photoSession.canAddOutput(photoOutput) {
            photoSession.addOutput(photoOutput)
            photoOutput.isHighResolutionCaptureEnabled = true
            if let connection = photoOutput.connection(with: .video) {
                if resolution.width > 0, resolution.height > 0 {
                    connection.videoOrientation = .portrait
                } else {
                    connection.videoOrientation = UIDevice.current.orientation.videoOrientation
                }
            }
        } else {
            session?.commitConfiguration()
            return
        }

        if let photoSession = session, photoSession.canAddOutput(output) {
            photoSession.addOutput(output)
        } else {
            session?.commitConfiguration()
            return
        }
        orientation.start()

    }

    @objc deinit {
        orientation.stop()
        faceDetectionBoxes.forEach({ $0.removeFromSuperview() })
    }

    var captureCallback: (UIImage, AVCaptureResolvedPhotoSettings) -> Void = { (_, _) in }
    var errorCallback: (Error) -> Void = { (_) in }

    @objc public func capture(_ callback: @escaping (UIImage, AVCaptureResolvedPhotoSettings) -> Void, _ error: @escaping (Error) -> Void) {
        captureCallback = callback
        errorCallback = error

        if let photoSession = session,
            !photoSession.outputs.contains(photoOutput),
            photoSession.canAddOutput(photoOutput) {
            photoSession.addOutput(photoOutput)
        }

        if !(session?.outputs.contains(photoOutput) ?? false) {
            print("photoSession addOutput 失败")
            return
        }

        let settings = AVCapturePhotoSettings()
        settings.flashMode = flashMode.captureFlashMode
        settings.isAutoStillImageStabilizationEnabled = photoOutput.isStillImageStabilizationSupported

        if settings.availablePreviewPhotoPixelFormatTypes.count > 0 {
            settings.previewPhotoFormat = [ kCVPixelBufferPixelFormatTypeKey as String: settings.availablePreviewPhotoPixelFormatTypes.first!]
        }

        photoOutput.capturePhoto(with: settings, delegate: self)
    }

    public func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        let metadataDict = CMCopyDictionaryOfAttachments(allocator: nil, target: sampleBuffer, attachmentMode: kCMAttachmentMode_ShouldPropagate)
        if let metadataDict = metadataDict {
            let metadata = NSMutableDictionary.init(dictionary: metadataDict)
            let exifMetadata = metadata.object(forKey: kCGImagePropertyExifDictionary)
            if let exifMetadata = exifMetadata as? NSDictionary {
                let brightnessValue = exifMetadata.object(forKey: kCGImagePropertyExifBrightnessValue)
                if let brightnessValue = brightnessValue as? Double, let delegate = delegate {
                    delegate.didChangeValue(session: self, value: brightnessValue, key: "brightness")
                }
            }
        }
    }

    @objc public func togglePosition() {
        cameraPosition = cameraPosition == .back ? .front : .back
    }

    @objc public override var zoom: Double {
        didSet {
            guard let device = captureDeviceInput?.device else {
                return
            }

            do {
                try device.lockForConfiguration()
                device.videoZoomFactor = CGFloat(zoom)
                device.unlockForConfiguration()
            } catch {
                //
            }

            if let delegate = delegate {
                delegate.didChangeValue(session: self, value: zoom, key: "zoom")
            }
        }
    }

    @objc public var resolution = CGSize.zero {
        didSet {
            guard let deviceInput = captureDeviceInput else {
                return
            }

            do {
                try deviceInput.device.lockForConfiguration()
                session?.sessionPreset = .high
                deviceInput.device.unlockForConfiguration()
            } catch {
                //
            }
        }
    }

    @objc public override func focus(at point: CGPoint) {
        if let device = captureDeviceInput?.device, device.isFocusPointOfInterestSupported {
            do {
                try device.lockForConfiguration()
                device.focusPointOfInterest = point
                device.focusMode = .continuousAutoFocus
                device.unlockForConfiguration()

            } catch let error {
                print("Error while focusing at point \(point): \(error)")
            }
        }
    }

    @available(iOS 11.0, *)
    public func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
        defer {
            captureCallback = { (_, _) in }
            errorCallback = { (_) in }
        }

        if let error = error {
            errorCallback( error )
            return
        }

        guard let data = photo.fileDataRepresentation() else {
            errorCallback(CameraKitPhotoError.error("Cannot get photo file data representation"))
            return
        }

        processPhotoData(data: data, resolvedSettings: photo.resolvedSettings)
    }

    private func processPhotoData(data: Data, resolvedSettings: AVCaptureResolvedPhotoSettings) {
        guard UIImage(data: data) != nil else {
            errorCallback(CameraKitPhotoError.error("Cannot get photo"))
            return
        }

        let ciImage = CIImage(data: data)
        guard let imageRaw = ciImage else {
            return
        }

        let dataProvider = CGDataProvider(data: data as CFData)
        let cgImageRef = CGImage(jpegDataProviderSource: dataProvider!, decode: nil, shouldInterpolate: true, intent: CGColorRenderingIntent.defaultIntent)
        let image = UIImage(cgImage: cgImageRef!, scale: 1.0, orientation: self.orientation.getImageOrientation())

        let context = CIContext()
        _ = context.jpegRepresentation(of: imageRaw, colorSpace: imageRaw.colorSpace!, options: [:])

        let width = resolution.width
        let height = resolution.height
        let isMirrored = cameraPosition == .front

        if resolution.width > 0, resolution.height > 0,
           CameraKitUtils.cropAndScale(image, width: Int(width), height: Int(height), orientation: UIDevice.current.orientation, mirrored: isMirrored) != nil {
            captureCallback(image, resolvedSettings)
            return
        }

        captureCallback(image, resolvedSettings)
    }

    public func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
        let faceMetadataObjects = metadataObjects.filter({ $0.type == .face })

        if faceMetadataObjects.count > faceDetectionBoxes.count {
            for _ in 0..<faceMetadataObjects.count - faceDetectionBoxes.count {
                let view = UIView()
                view.layer.borderColor = UIColor.green.cgColor
                view.layer.borderWidth = 1
                overlayView?.addSubview(view)
                faceDetectionBoxes.append(view)
            }
        } else if faceMetadataObjects.count < faceDetectionBoxes.count {
            for _ in 0..<faceDetectionBoxes.count - faceMetadataObjects.count {
                faceDetectionBoxes.popLast()?.removeFromSuperview()
            }
        }

        for i in 0..<faceMetadataObjects.count {
            if let transformedMetadataObject = previewLayer?.transformedMetadataObject(for: faceMetadataObjects[i]) {
                faceDetectionBoxes[i].frame = transformedMetadataObject.bounds
            } else {
                faceDetectionBoxes[i].frame = CGRect.zero
            }
        }
    }
}
