//
//  ARViewController.swift
//  ARDEMO_1
//
//  Created by 37Design on 2021/5/7.
//

import Foundation
import ARKit
import SceneKit
import Metal
import SnapKit

class ARViewController: ViewController {
    
    var arSceneView: ARSCNView!//scene
    var planeNode: SCNReferenceNode!//a plane model
    var metalView: ARMetalImageView!
    var isRendering = false
    var backButton = UIButton(type: UIButton.ButtonType.system)
    var cameraImg = UIImage()
    
    
    
    
    override func viewDidLoad() {
        
        arSceneView = ARSCNView(frame: self.view.frame)
        arSceneView.scene = SCNScene()
        arSceneView.showsStatistics = true
        
        
        metalView = ARMetalImageView(frame: self.view.frame)
        self.view.addSubview(arSceneView)
        self.arSceneView.addSubview(metalView)
        sceneView.debugOptions = [SCNDebugOptions.showFeaturePoints]
        arSceneView.delegate = self

        backButton.setTitle("Back", for: UIControl.State.normal)
        backButton.alpha = 0.7
        self.view.addSubview(backButton)
        backButton.snp.makeConstraints { (make) in
            make.size.equalTo(100)
            make.left.equalToSuperview().offset(20)
            make.top.equalToSuperview().offset(40)
        }
        backButton.addTarget(self, action: #selector(backAction), for: UIControl.Event.touchUpInside)
       
        //设置镜头
        setupCamera()
    }
    
    @objc func backAction() {
        self.dismiss(animated: true) {}
    }
    // MARK: - Scene content setup

    func setupCamera() {
        guard let camera = arSceneView.pointOfView?.camera else {
            fatalError("Expected a valid `pointOfView` from the scene.")
        }

        /*
         Enable HDR camera settings for the most realistic appearance
         with environmental lighting and physically based materials.
         */
        camera.wantsHDR = true
        camera.exposureOffset = -1
        camera.minimumExposure = -1
        camera.maximumExposure = 3
    }
    
    override func viewWillAppear(_ animated: Bool) {
        super.viewWillAppear(animated)
        let configuration = ARWorldTrackingConfiguration()
        configuration.planeDetection = .horizontal
        configuration.isLightEstimationEnabled = true
        arSceneView.session.run(configuration, options: [.resetTracking, .removeExistingAnchors])
    }

    override func viewWillDisappear(_ animated: Bool) {
        super.viewWillDisappear(animated)
        arSceneView.session.pause()
    }
    
    func getLightNode() -> SCNNode {
        let spotLight = SCNLight()
        spotLight.type = .omni
        spotLight.spotInnerAngle = 45
        spotLight.spotOuterAngle = 45
        let spotNode = SCNNode()
        spotNode.light = spotLight
        spotNode.eulerAngles = SCNVector3Make(Float(-M_PI) / 2, 0, 0)
        spotNode.position = SCNVector3(0,10,20)

        return spotNode
    }
}



extension ARViewController : ARSCNViewDelegate{
    /** MARK - ARSCNViewDelegate*/
    func renderer(_ renderer: SCNSceneRenderer, updateAtTime time: TimeInterval) {
        guard let frame = arSceneView.session.currentFrame else {return}
        let estimate = frame.lightEstimate
        if (estimate == nil) {
           return;
         }
        let pixelBuffer = frame.capturedImage

        if isRendering {
            return
        }
        isRendering = true
        print("问题")
        DispatchQueue.main.async(execute: {
            let orientation = UIApplication.shared.statusBarOrientation
            let viewportSize = self.arSceneView.bounds.size

            var image = CIImage(cvPixelBuffer: pixelBuffer)

            let transform = frame.displayTransform(for: orientation, viewportSize: viewportSize).inverted()
            image = image.transformed(by: transform)

            let context = CIContext(options:nil)
            guard let cameraImage = context.createCGImage(image, from: image.extent) else {return}
            self.cameraImg = UIImage.init(cgImage: cameraImage)
            guard let snapshotImage = self.arSceneView.snapshot().cgImage else {return}
            self.metalView.registerTexturesFor(cameraImage: cameraImage, snapshotImage: snapshotImage)
            self.metalView.time = Float(time)
//
            
            self.isRendering = false
           
        })
    }
    
    
    func renderer(_ renderer: SCNSceneRenderer, didAdd node: SCNNode, for anchor: ARAnchor) {
        let virtualNode = VirtualObjectNode()
       
//        for child in virtualNode.childNodes {
//            if let material = child.geometry?.firstMaterial {
////                material.diffuse.contents = UIColor.white
//                material.metalness.contents = 1
//                material.roughness.contents = 0
//                material.transparency = 0.5
//            }
//        }
       
//        virtualNode.scale = SCNVector3Make(0.5, 0.5, 0.5)
        let spotNode = self.getLightNode()
        self.arSceneView.scene.rootNode.addChildNode(spotNode)
        self.arSceneView.autoenablesDefaultLighting = true
//            self.arSceneView.automaticallyUpdatesLighting = true
        
        
        DispatchQueue.main.async(execute: {
            node.addChildNode(virtualNode)
        })
    }
    
    func renderer(_ renderer: SCNSceneRenderer, didUpdate node: SCNNode, for anchor: ARAnchor) {
        print("\(self.classForCoder)/" + #function + "更新")
    }
    
    func renderer(_ renderer: SCNSceneRenderer, didRemove node: SCNNode, for anchor: ARAnchor) {
        print("\(self.classForCoder)/" + #function + "删除")
    }

    // MARK: - ARSessionObserver
    
    func session(_ session: ARSession, cameraDidChangeTrackingState camera: ARCamera) {
        print("trackingState: \(camera.trackingState)")
    }

}
