//
//  CameraVC.swift
//  自定义相机
//
//  Created by slience on 2019/8/9.
//  Copyright © 2019 shehuiren. All rights reserved.
//

import UIKit
import AVFoundation

let rect = CGRect(x: 30, y: 100, width: UIDevice.width()-60, height: UIDevice.hight()-100-100-100)// 底部工具栏 高度100  距离上面 下面都是100


class CameraVC: UIViewController {
    

    @IBOutlet weak var backView : UIView!
    
    let session = AVCaptureSession()
    
    var photoOutput: Any?
    
    var imageOutput : Any?
    
    var device : AVCaptureDevice?
    
    var complete :((UIImage?)->())?
    
    override func viewDidLoad() {
        super.viewDidLoad()
        
        backView.backgroundColor = UIColor.black.withAlphaComponent(0.7)
        
        session.sessionPreset = .inputPriority
        
        device = AVCaptureDevice.default(for: .video)
        
        if let dev = device {
            
            let input = try? AVCaptureDeviceInput(device: dev)
            
            if let inputt = input{
                
                //连接输入与会话
                if session.canAddInput(inputt)==true{
                    
                    session.addInput(inputt)
                }
            }
        }
        
        //连接输出与会话
        
        if #available(iOS 10.0, *) {
            photoOutput = AVCapturePhotoOutput()
            
            if session.canAddOutput(photoOutput as! AVCaptureOutput)==true{
                
                session.addOutput(photoOutput as! AVCaptureOutput)
            }
        } else {
            
            imageOutput = AVCaptureStillImageOutput()
            
            if session.canAddOutput(imageOutput as! AVCaptureOutput)==true{
                
                session.addOutput(imageOutput as! AVCaptureOutput)
            }
        }
        
        backView.layoutIfNeeded()
        
        let previewLayer = AVCaptureVideoPreviewLayer(session: session)
        
        previewLayer.frame = CGRect(x: 0, y: 0, width: UIDevice.width(), height: UIDevice.hight())
        
        self.view.layer.insertSublayer(previewLayer, at: 0)
        
        let path = UIBezierPath(roundedRect: CGRect(x: 0, y: 0, width: UIDevice.width(), height: UIDevice.hight()), cornerRadius: 0)
        
        path.append(UIBezierPath(roundedRect:rect, cornerRadius: 0).reversing())
        
        let shapeLayer = CAShapeLayer()
        
        shapeLayer.path = path.cgPath
        
        backView.layer.mask = shapeLayer
        
        session.startRunning()
        
    }
    
    
    @IBAction func cameraClick(_ sender :Any){
        
        startCamera()
    }
    
    
    @IBAction func back(_ sender :Any){
        
        self.dismiss(animated: true, completion: nil)
    }
    
    
}

extension CameraVC : AVCapturePhotoCaptureDelegate{
    
    @available(iOS 11.0, *)
    func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?){
        
        session.stopRunning()
        
        let imageData = photo.fileDataRepresentation()
        
        let image  =  UIImage(data:  imageData!)
        
        print("2")
        
        print(image as Any)
        
        
        complete?(clipWithImageRect(image: image?.fixOrientation()))
        dismiss(animated: true, completion: nil)

    }
    
    @available(iOS 10.0, *)
    func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photoSampleBuffer: CMSampleBuffer?, previewPhoto previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?){
        
        session.stopRunning()
        
        if let buffer = photoSampleBuffer{
            
            let imageData = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: buffer, previewPhotoSampleBuffer: previewPhotoSampleBuffer)
            
            let image  =  UIImage(data:  imageData!)
            
            print("1")
            print(image as Any)
            
            
            
            complete?(clipWithImageRect(image: image?.fixOrientation()))
            dismiss(animated: true, completion: nil)

        }
        
    }
    
    func startCamera(){
        
        
        if #available(iOS 10.0, *) {
            var photoSetting : AVCapturePhotoSettings?
            
            if #available(iOS 11.0, *) {
                photoSetting = AVCapturePhotoSettings(format: [AVVideoCodecKey:AVVideoCodecType.jpeg])
            } else {
                // Fallback on earlier versions
                photoSetting = AVCapturePhotoSettings(format: [AVVideoCodecKey:AVVideoCodecJPEG])
            }
            
            if let setting = photoSetting{
                
                (photoOutput as? AVCapturePhotoOutput)?.capturePhoto(with: setting, delegate: self)
            }
            
        } else {
            
            let connect = (imageOutput as? AVCaptureStillImageOutput)?.connection(with: .video)
            
            if connect == nil{
                
                print("拍摄失败")
                return
            }
            
            
            if let dev = device{
                
                if ((try? dev.lockForConfiguration()) != nil){
                    
                    dev.flashMode = .auto
                    
                }else{
                    
                    print("error")
                }
            }
            
            if let connection = connect{
                
                (imageOutput as? AVCaptureStillImageOutput)?.captureStillImageAsynchronously(from: connection, completionHandler: {[weak self] (imageDataSampleBuffer:CMSampleBuffer?,error: Error?) in
                    
                    self?.session.stopRunning()
                    
                    if let buff = imageDataSampleBuffer{
                        
                        let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(buff)
                        
                       
                        let image  =  UIImage(data:  imageData!)
                        
                        print("3")
                        
                        print(image as Any)
                        
                        self?.complete?(self?.clipWithImageRect(image: image?.fixOrientation()))
                        
                        self?.dismiss(animated: true, completion: nil)
                        
                    }
                    
                    
                })
            }
            
            
        }
    }
    
    
}

/*
 
 enum UIImageOrientation : Int {
 case Up //0：默认方向
 case Down //1：180°旋转
 case Left //2：逆时针旋转90°
 case Right //3：顺时针旋转90°
 case UpMirrored //4：水平翻转
 case DownMirrored //5：水平翻转
 case LeftMirrored //6：垂直翻转
 case RightMirrored //7：垂直翻转
 }
 
 */

extension CameraVC{
    
    //这里截取 方框 大小的图片 此时的图片宽高 都是是图片的像素宽（高度同解），所以计算截图区域时需要按比例来；
    func clipWithImageRect( image: UIImage?) -> UIImage? {
        
        guard let bgImage = image  else {
            
            return nil
        }
        //比例
        let x = bgImage.size.width/UIDevice.width() * 30
        
        let y = bgImage.size.height/UIDevice.hight() * 100
        
        let w = (UIDevice.width() - 30*2)/UIDevice.width() * bgImage.size.width
        
        let h = (UIDevice.hight() - 100*3)/UIDevice.hight() * bgImage.size.height
        
        let  clipFrame = CGRect(x: x, y: y, width: w, height: h)
        let rect_Scale = CGRect(x: clipFrame.origin.x, y: clipFrame.origin.y, width: clipFrame.size.width, height: clipFrame.size.height)
        
        let cgImageCorpped = bgImage.cgImage?.cropping(to: rect_Scale)
        var img_Clip = UIImage.init(cgImage: cgImageCorpped!, scale: 1, orientation: UIImage.Orientation.up)
        
        
        img_Clip = UIImage(cgImage: (img_Clip.cgImage)!, scale: img_Clip.scale, orientation: .left)
        
        return img_Clip
    }
}

// 修复图片旋转
extension UIImage {
    //系统拍完照 会默认旋转90 这里做还原操作
    func fixOrientation() -> UIImage {
        if self.imageOrientation == .up {
            return self
        }
        var transform = CGAffineTransform.identity
        switch self.imageOrientation {
        case .down, .downMirrored:
            transform = transform.translatedBy(x: self.size.width, y: self.size.height)
            transform = transform.rotated(by: .pi)
            break
        case .left, .leftMirrored:
            transform = transform.translatedBy(x: self.size.width, y: 0)
            transform = transform.rotated(by: .pi / 2)
            break
        case .right, .rightMirrored:
            transform = transform.translatedBy(x: 0, y: self.size.height)
            transform = transform.rotated(by: -.pi / 2)
            break
        default:
            break
        }
        switch self.imageOrientation {
        case .upMirrored, .downMirrored:
            transform = transform.translatedBy(x: self.size.width, y: 0)
            transform = transform.scaledBy(x: -1, y: 1)
            break
        case .leftMirrored, .rightMirrored:
            transform = transform.translatedBy(x: self.size.height, y: 0);
            transform = transform.scaledBy(x: -1, y: 1)
            break
        default:
            break
        }
        let ctx = CGContext(data: nil, width: Int(self.size.width), height: Int(self.size.height), bitsPerComponent: self.cgImage!.bitsPerComponent, bytesPerRow: 0, space: self.cgImage!.colorSpace!, bitmapInfo: self.cgImage!.bitmapInfo.rawValue)
        ctx?.concatenate(transform)
        switch self.imageOrientation {
        case .left, .leftMirrored, .right, .rightMirrored:
            ctx?.draw(self.cgImage!, in: CGRect(x: CGFloat(0), y: CGFloat(0), width: CGFloat(size.height), height: CGFloat(size.width)))
            break
        default:
            ctx?.draw(self.cgImage!, in: CGRect(x: CGFloat(0), y: CGFloat(0), width: CGFloat(size.width), height: CGFloat(size.height)))
            break
        }
        let cgimg: CGImage = (ctx?.makeImage())!
        let img = UIImage(cgImage: cgimg)
        return img
    }
}
