//
//  PzHomeGuideView.swift
//  Pz
//
//  Created by 蓝鳍互娱 on 2024/6/26.
//

import Foundation
import UIKit
import SnapKit
import AVFoundation
class PzDefPhotoView: UIView {
    var photoSet:AVCapturePhotoSettings = AVCapturePhotoSettings(format: [AVVideoCodecKey:AVVideoCodecType.jpeg])
    var photoBase64Block: ((_ base64:String?) -> Void)?
    
    var photoSubMdl:PzPhotoSubModel?

    override init(frame: CGRect) {
        super.init(frame: frame)
        self.backgroundColor = UIColor.clear
        creatSubView()
    }
    required init?(coder: NSCoder) {
        fatalError("init(coder:) has not been implemented")
    }
    lazy var photoImgView: UIImageView = {
        let imageView = UIImageView()
        imageView.isUserInteractionEnabled = true
        imageView.contentMode = .scaleAspectFill
        return imageView
    }()
    lazy var captureDevice: AVCaptureDevice = {
        let captureDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) 
        return captureDevice!
    }()
    lazy var captureDeviceInput: AVCaptureDeviceInput = {
        let captureDeviceInput = try? AVCaptureDeviceInput(device: self.captureDevice)
        return captureDeviceInput!
    }()
    lazy var photoOutput: AVCapturePhotoOutput = {
        let photoOutput = AVCapturePhotoOutput()
        return photoOutput
    }()
    lazy var captureSession: AVCaptureSession = {
        let captureSession = AVCaptureSession()
        if captureSession.canSetSessionPreset(AVCaptureSession.Preset.hd1280x720){
            captureSession.sessionPreset = AVCaptureSession.Preset.hd1280x720
        }
        return captureSession
    }()
    lazy var previewLayer: AVCaptureVideoPreviewLayer = {
        let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        previewLayer.videoGravity = .resizeAspectFill
        previewLayer.frame = self.bounds;
        return previewLayer
    }()
    func creatSubView() {
        checkCameraLimit { completion in
            if completion{
                self.layer.insertSublayer(self.previewLayer, at: 0)
                if self.captureSession.canAddInput(self.captureDeviceInput){
                    self.captureSession.addInput(self.captureDeviceInput)
                }
                if self.captureSession.canAddOutput(self.photoOutput){
                    self.captureSession.addOutput(self.photoOutput)
                }
                self.startRunning()
            }
        }
        photoSize()
        self.addSubviews([photoImgView])
        photoImgView.snp.makeConstraints { make in
            make.center.equalToSuperview()
            make.width.equalTo(307.fitScale())
            make.height.equalTo(378.fitScale())
        };
    }
    func photoSize(){
        let shapeLayer = CAShapeLayer()
        shapeLayer.position = self.layer.position
        shapeLayer.bounds = CGRectMake(0, 0, 378, 307)
        self.layer.addSublayer(shapeLayer)
        
        let shapeRectPath = UIBezierPath(roundedRect: CGRectMake((self.width-307)/2, (self.height-378)/2, 307, 378), cornerRadius: 0)
        let path = UIBezierPath(rect: self.bounds)
        path.append(shapeRectPath)
                
        let fillLayer = CAShapeLayer();
        fillLayer.path = path.cgPath;
        fillLayer.fillRule = CAShapeLayerFillRule.evenOdd;
        fillLayer.fillColor = UIColor .black.cgColor;
        fillLayer.opacity = 0.6;
        self.layer.addSublayer(fillLayer)
    }
    func checkCameraLimit(_ completion: @escaping (Bool) -> Void){
        // 检测相机设备是否可用
        let cameraAvailable = UIImagePickerController.isSourceTypeAvailable(.camera)
        if !cameraAvailable{
            completion(false);
            let alertCtrl = UIAlertController(title: "", message: "设备不支持相机", preferredStyle: .alert)
            alertCtrl.addAction(UIAlertAction(title: "OK", style: .cancel, handler: { action in
                self.resetCam()
            }))
            UIApplication.shared.delegate?.window??.rootViewController?.present(alertCtrl, animated: true)
            return;
        }
        let camAuthStatus = AVCaptureDevice.authorizationStatus(for: .video)
        if camAuthStatus == .denied{
            let alertCtrl = UIAlertController(title: "无法打开照相机", message: "前往设置打开相机权限", preferredStyle: .alert)
            alertCtrl.addAction(UIAlertAction(title: "好的", style: .cancel, handler: { action in
                self.resetCam()
            }))
            alertCtrl.addAction(UIAlertAction(title: "设置", style: .default, handler: { action in
                if let settingUrl = URL(string: UIApplication.openSettingsURLString){
                    if UIApplication.shared.canOpenURL(settingUrl) {
                        UIApplication.shared.open(settingUrl)
                    }
                }
            }))
            UIApplication.shared.delegate?.window??.rootViewController?.present(alertCtrl, animated: true)
            completion(false)
        }else if (camAuthStatus == .notDetermined){
            AVCaptureDevice.requestAccess(for: .video) { granted in
                if granted{
                    DispatchQueue.main.async() {
                        completion(true)
                    }
                }else{
                    DispatchQueue.main.async() {
                        completion(false)
                    }
                    self.closeCam()
                }
            }
        }else{
            DispatchQueue.main.async() {
                completion(true)
            }
        }
    }
    func resetCam() {
        photoImgView.image = nil
    }
    func closeCam() {
        UIApplication.shared.delegate?.window??.rootViewController?.dismiss(animated: true, completion: {
            
        })
    }
    func changeCam() {
        let desiredPosition:AVCaptureDevice.Position
        if captureDevice.position == .back{
            desiredPosition = AVCaptureDevice.Position.front
        }else{
            desiredPosition = AVCaptureDevice.Position.back
        }
        let videoDevices = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera, .builtInDualCamera, .builtInTrueDepthCamera], mediaType: .video, position: .unspecified)
        for device in videoDevices.devices {
            if device.position == desiredPosition{
                let newCameraInput = try? AVCaptureDeviceInput(device: device)
                if newCameraInput != nil{
                    let transition = CATransition()
                    transition.type = .fade
                    transition.duration = 0.5
                    previewLayer.add(transition, forKey: "")
                    captureSession.beginConfiguration()
                    captureSession.removeInput(captureDeviceInput)
                    if captureSession.canAddInput(newCameraInput!){
                        captureSession.addInput(newCameraInput!)
                        captureDeviceInput = newCameraInput!
                        captureDevice = device
                    }else{
                        captureSession.addInput(captureDeviceInput)
                    }
                    captureSession.commitConfiguration()
                    break;
                }
            }
        }
    }
    func stopRunning() {
        DispatchQueue.global(qos: .userInitiated).async {
            self.captureSession.stopRunning()
        }
    }
    func startRunning() {
        DispatchQueue.global(qos: .userInitiated).async {
            self.captureSession.startRunning()
        }
    }
    func talkCam() {
        photoSet = AVCapturePhotoSettings(format: [AVVideoCodecKey:AVVideoCodecType.jpeg])
        if captureSession.isRunning{
            photoOutput.capturePhoto(with: photoSet, delegate: self)
        }
    }
    func cutImg(image:UIImage,size:CGSize) -> UIImage {
        UIGraphicsBeginImageContextWithOptions(size, false, 0)
        image.draw(in: CGRect(x: 0, y: 0, width: size.width, height: size.height))
        guard let sizeImage = UIGraphicsGetImageFromCurrentImageContext() else { return UIImage() }
        UIGraphicsEndImageContext()
        return sizeImage
    }
}
extension PzDefPhotoView:AVCapturePhotoCaptureDelegate{
    func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: (any Error)?) {
        if((error == nil)){
            let imageData = photo.fileDataRepresentation()
            guard let image = UIImage(data: imageData ?? NSData() as Data) else { return }
            let photows = photoSubMdl?.specs1?.components(separatedBy: " x ")
            let phtotohs = photows?.last?.components(separatedBy: "px")
            guard let imgW = Float(photows?.first ?? "0") else { return }
            guard let imgH = Float(phtotohs?.first ?? "0") else { return  }
            let redio = Float(imgH)/Float(imgW)
            if photoSubMdl != nil{
                let cutImg = cutImg(image: image, size: CGSize(width: 307, height: 307.0*Double(redio)))
                photoImgView.image = cutImg
                let imgData = cutImg.jpegData(compressionQuality: 0.5)
                let base64Img = imgData?.base64EncodedString(options: .lineLength64Characters)
                photoBase64Block?(base64Img)
            }
        }
    }
}
