//
//  YCCameraSessionManager.swift
//  YCCamera
//
//  Created by Billy on 2024/11/6.
//

import AVFoundation
import UIKit

protocol YCCameraSessionDelegate: AnyObject {
    func cameraSession(_ manager: YCCameraSessionManager, didOutput sampleBuffer: CMSampleBuffer)
    func cameraSession(_ manager: YCCameraSessionManager, didCapturePhoto image: UIImage, ciImage: CIImage)
    func cameraSession(_ manager: YCCameraSessionManager, willCapturePhoto: Void)
}

class YCCameraSessionManager: NSObject {
    
    // MARK: - Properties
    weak var delegate: YCCameraSessionDelegate?
    
    private let session = AVCaptureSession()
    private let sessionQueue = DispatchQueue(label: "com.yc.camera.sessionQueue")
    private let processingQueue: DispatchQueue
    
    private(set) var videoInput: AVCaptureDeviceInput?
    private var imageOutput: AVCapturePhotoOutput?
    private var videoOutput: AVCaptureVideoDataOutput?
    
    private(set) var currentPosition: AVCaptureDevice.Position = .back
    private(set) var isRunning: Bool = false
    private var isSwitching: Bool = false
    
    // MARK: - Initialization
    init(processingQueue: DispatchQueue) {
        self.processingQueue = processingQueue
        super.init()
    }
    
    // MARK: - Public Methods
    
    /// 配置相机会话
    func setupSession() {
        sessionQueue.async { [weak self] in
            self?.configureSession()
        }
    }
    
    /// 启动相机会话
    func startSession() {
        sessionQueue.async { [weak self] in
            guard let self = self, !self.isRunning else { return }
            self.session.startRunning()
            self.isRunning = true
            print("🎬 相机会话启动完成")
        }
    }
    
    /// 停止相机会话
    func stopSession() {
        sessionQueue.async { [weak self] in
            guard let self = self, self.isRunning else { return }
            self.session.stopRunning()
            self.isRunning = false
            print("⏹ 相机会话已停止")
        }
    }
    
    /// 切换前后摄像头
    func switchCamera(completion: @escaping (Bool) -> Void) {
        guard !isSwitching else {
            completion(false)
            return
        }
        
        guard let currentInput = videoInput else {
            completion(false)
            return
        }
        
        isSwitching = true
        
        sessionQueue.async { [weak self] in
            guard let self = self else {
                completion(false)
                return
            }
            
            // 停止会话
            self.session.stopRunning()
            
            let targetPosition: AVCaptureDevice.Position = currentInput.device.position == .back ? .front : .back
            
            // 获取目标摄像头
            guard let targetDevice = self.getCamera(position: targetPosition),
                  let newInput = try? AVCaptureDeviceInput(device: targetDevice) else {
                self.session.startRunning()
                self.isSwitching = false
                DispatchQueue.main.async {
                    completion(false)
                }
                return
            }
            
            self.session.beginConfiguration()
            
            // 移除旧的输入和输出
            self.session.removeInput(currentInput)
            for output in self.session.outputs {
                self.session.removeOutput(output)
            }
            
            // 添加新的输入
            if self.session.canAddInput(newInput) {
                self.session.addInput(newInput)
                self.videoInput = newInput
                self.currentPosition = targetPosition
                
                // 重新配置输出
                self.reconfigureOutputs(for: targetPosition)
            } else {
                // 恢复原来的输入
                self.session.addInput(currentInput)
            }
            
            self.session.commitConfiguration()
            
            // 重启会话
            self.session.startRunning()
            self.isSwitching = false
            
            DispatchQueue.main.async {
                completion(true)
            }
        }
    }
    
    /// 拍照
    func capturePhoto(
        flashMode: AVCaptureDevice.FlashMode = .off,
        orientation: AVCaptureVideoOrientation = .portrait
    ) {
        guard let imageOutput = imageOutput else {
            print("❌ imageOutput 未初始化")
            return
        }
        
        let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])
        settings.flashMode = flashMode
        
        if let connection = imageOutput.connection(with: .video) {
            connection.videoOrientation = orientation
            
            // 前置摄像头镜像设置
            if currentPosition == .front {
                settings.isAutoStillImageStabilizationEnabled = false
                if #available(iOS 13.0, *) {
                    settings.isAutoDualCameraFusionEnabled = false
                }
                connection.isVideoMirrored = true
            }
        }
        
        imageOutput.capturePhoto(with: settings, delegate: self)
    }
    
    /// 设置缩放倍数
    func setZoomFactor(_ factor: CGFloat) {
        guard let device = videoInput?.device else { return }
        
        do {
            try device.lockForConfiguration()
            let maxZoom = min(YCCameraConstants.Camera.maxZoomFactor, device.maxAvailableVideoZoomFactor)
            device.videoZoomFactor = min(max(1.0, factor), maxZoom)
            device.unlockForConfiguration()
        } catch {
            print("❌ 设置缩放失败: \(error)")
        }
    }
    
    /// 设置聚焦点
    func setFocusPoint(
        _ point: CGPoint,
        mode: AVCaptureDevice.FocusMode = .continuousAutoFocus,
        exposureMode: AVCaptureDevice.ExposureMode = .continuousAutoExposure
    ) {
        guard let device = videoInput?.device else { return }
        
        do {
            try device.lockForConfiguration()
            
            if device.isFocusModeSupported(mode) {
                device.focusMode = mode
            }
            if device.isFocusPointOfInterestSupported {
                device.focusPointOfInterest = point
            }
            if device.isExposureModeSupported(exposureMode) {
                device.exposureMode = exposureMode
            }
            if device.isExposurePointOfInterestSupported {
                device.exposurePointOfInterest = point
            }
            
            device.unlockForConfiguration()
        } catch {
            print("❌ 设置焦点失败: \(error)")
        }
    }
    
    /// 获取最大缩放倍数
    func getMaxZoomFactor() -> CGFloat {
        guard let device = videoInput?.device else { return 1.0 }
        return min(YCCameraConstants.Camera.maxZoomFactor, device.maxAvailableVideoZoomFactor)
    }
    
    /// 检查是否有闪光灯
    func hasFlash() -> Bool {
        return videoInput?.device.hasFlash ?? false
    }
    
    /// 获取当前设备（用于读取相机参数）
    func getCurrentDevice() -> AVCaptureDevice? {
        return videoInput?.device
    }
    
    // MARK: - Private Methods
    
    private func configureSession() {
        session.beginConfiguration()
        session.sessionPreset = .photo
        
        // 配置输入
        guard let device = getCamera(position: currentPosition),
              let input = try? AVCaptureDeviceInput(device: device) else {
            session.commitConfiguration()
            print("❌ 无法配置相机输入")
            return
        }
        
        if session.canAddInput(input) {
            session.addInput(input)
            videoInput = input
        }
        
        // 配置视频输出
        configureVideoOutput(for: currentPosition)
        
        // 配置照片输出
        let imageOutput = AVCapturePhotoOutput()
        if session.canAddOutput(imageOutput) {
            session.addOutput(imageOutput)
            self.imageOutput = imageOutput
        }
        
        session.commitConfiguration()
    }
    
    private func configureVideoOutput(for position: AVCaptureDevice.Position) {
        let videoOutput = AVCaptureVideoDataOutput()
        videoOutput.setSampleBufferDelegate(self, queue: processingQueue)
        videoOutput.videoSettings = [
            kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA
        ]
        
        if session.canAddOutput(videoOutput) {
            session.addOutput(videoOutput)
            self.videoOutput = videoOutput
            
            if let connection = videoOutput.connection(with: .video) {
                connection.videoOrientation = .portrait
                
                if connection.isVideoStabilizationSupported {
                    connection.preferredVideoStabilizationMode = .auto
                }
                
                if position == .front {
                    connection.isVideoMirrored = true
                }
                
                print("📹 视频输出配置完成 - 镜像: \(connection.isVideoMirrored), 方向: \(connection.videoOrientation)")
            }
        }
    }
    
    private func reconfigureOutputs(for position: AVCaptureDevice.Position) {
        // 重新配置视频输出
        configureVideoOutput(for: position)
        
        // 重新配置照片输出
        let imageOutput = AVCapturePhotoOutput()
        if session.canAddOutput(imageOutput) {
            session.addOutput(imageOutput)
            self.imageOutput = imageOutput
        }
    }
    
    private func getCamera(position: AVCaptureDevice.Position) -> AVCaptureDevice? {
        let discoverySession = AVCaptureDevice.DiscoverySession(
            deviceTypes: [.builtInWideAngleCamera],
            mediaType: .video,
            position: position
        )
        
        let devices = discoverySession.devices
        
        switch position {
        case .back:
            // 后置摄像头：优先选择主摄像头
            return devices.first { device in
                if #available(iOS 16.0, *) {
                    return device.position == .back &&
                           device.isVirtualDevice == false &&
                           device.maxAvailableVideoZoomFactor > 1.0
                } else {
                    return device.position == .back &&
                           device.maxAvailableVideoZoomFactor > 1.0
                }
            } ?? AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back)
            
        case .front:
            // 前置摄像头
            return devices.first { device in
                return device.position == .front
            } ?? AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front)
            
        default:
            return nil
        }
    }
}

// MARK: - AVCaptureVideoDataOutputSampleBufferDelegate
extension YCCameraSessionManager: AVCaptureVideoDataOutputSampleBufferDelegate {
    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        delegate?.cameraSession(self, didOutput: sampleBuffer)
    }
}

// MARK: - AVCapturePhotoCaptureDelegate
extension YCCameraSessionManager: AVCapturePhotoCaptureDelegate {
    func photoOutput(_ output: AVCapturePhotoOutput, willCapturePhotoFor resolvedSettings: AVCaptureResolvedPhotoSettings) {
        // 通知即将拍照（播放快门动画）
        DispatchQueue.main.async { [weak self] in
            guard let self = self else { return }
            self.delegate?.cameraSession(self, willCapturePhoto: ())
        }
    }
    
    func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
        // ⚠️ 关键：直接从 AVCapturePhoto 获取完整分辨率的图像，避免通过 UIImage 降采样
        
        // 方法 1: 优先使用 pixelBuffer（最高质量，完整分辨率）
        var ciImage: CIImage?
        if let pixelBuffer = photo.pixelBuffer {
            ciImage = CIImage(cvPixelBuffer: pixelBuffer)
            print("📸 使用 pixelBuffer 获取图像: \(ciImage!.extent.width)x\(ciImage!.extent.height)")
        }
        // 方法 2: 使用 cgImageRepresentation（备用方案）
        else if let cgImage = photo.cgImageRepresentation() {
            ciImage = CIImage(cgImage: cgImage)
            print("📸 使用 cgImageRepresentation 获取图像: \(ciImage!.extent.width)x\(ciImage!.extent.height)")
        }
        
        guard let finalCIImage = ciImage else {
            print("❌ 无法获取完整分辨率的 CIImage")
            return
        }
        
        // 获取预览用的 UIImage（用于缩略图显示）
        guard let imageData = photo.fileDataRepresentation(),
              let previewImage = UIImage(data: imageData) else {
            print("❌ 无法获取预览图像")
            return
        }
        
        DispatchQueue.main.async { [weak self] in
            guard let self = self else { return }
            self.delegate?.cameraSession(self, didCapturePhoto: previewImage, ciImage: finalCIImage)
        }
    }
}

