//
//  VideoBuffer.swift
//  WatermarkCamera
//
//  Created by AxcLogo on 4/7/2024.
//

import UIKit
import Foundation
import AxcBedrock

// MARK: - [VideoBuffer.Articulation]

extension VideoBuffer {
    /// 清晰度
    enum Articulation {
        init?(rawValue: Int) {
            switch rawValue {
            case Articulation._540p.info.rawValue: self = ._540p

            case Articulation._720p.info.rawValue: self = ._720p

            case Articulation._1080p.info.rawValue: self = ._1080p

            default: return nil
            }
        }

        case _540p

        case _720p

        case _1080p

        // 自定义

        case custom(name: String, row_p: Int)

        struct Info {
            var name: String

            var row_p: Int

            var rawValue: Int
        }

        var info: Info {
            switch self {
            case ._540p: return .init(name: "低清", row_p: 540, rawValue: 1)

            case ._720p: return .init(name: "高清", row_p: 720, rawValue: 2)

            case ._1080p: return .init(name: "超清", row_p: 1080, rawValue: 3)

            case let .custom(name: name, row_p: row_p): return .init(name: name, row_p: row_p, rawValue: 0)
            }
        }

        var rawValue: Int {
            return self.info.rawValue
        }
    }
}

extension VideoBuffer {
    /// 开始采集流
    func beginCollectVideoBuffer(articulation: VideoBuffer.Articulation,

                                 aspectRatio: Eyepiece.AspectRatio,

                                 block: @escaping AxcBlock.OneParam<CVBuffer>) {
        self.articulation = articulation

        self.aspectRatio = aspectRatio

        collectVideoBufferBlock = block
    }

    /// 停止采集流
    func endCollectVideoBuffer() {
        collectVideoBufferBlock = nil
    }
}

// MARK: - [VideoBuffer]

class VideoBuffer: NSObject {
    // Open

    /// 推流
    /// 多用于子类实现
    func putCVBuffer(_ cvBuffer: CVBuffer) { }

    // Internal

    /// 采集清晰度
    var articulation: Articulation = ._720p

    /// 采集画幅
    var aspectRatio: Eyepiece.AspectRatio = .ar_16_9

    // Private

    /// 采集回调
    private var collectVideoBufferBlock: AxcBlock.OneParam<CVBuffer>?
}

// MARK: OutputProtocol

extension VideoBuffer: OutputProtocol {
    func setOutput(_ output: GPUImageOutput) {
        output.addTarget(self)
    }
}

// MARK: GPUImageInput

extension VideoBuffer: GPUImageInput {
    func maximumOutputSize() -> CGSize {
        let width = articulation.info.row_p.axc.cgFloat

        let height = width * aspectRatio.ratioHW

        return .init(width: width, height: height)
    }

    func setInputFramebuffer(_ newInputFramebuffer: GPUImageFramebuffer!, at textureIndex: Int) {
        guard let newInputFramebuffer = newInputFramebuffer else { return }

        let cvBuffer: CVBuffer = newInputFramebuffer.pixelBuffer().takeUnretainedValue()

        putCVBuffer(cvBuffer)

        collectVideoBufferBlock?(cvBuffer)
    }

    func newFrameReady(at frameTime: CMTime, at textureIndex: Int) { }

    func nextAvailableTextureIndex() -> Int {
        return 0
    }

    func shouldIgnoreUpdatesToThisTarget() -> Bool {
        return false
    }

    func enabled() -> Bool {
        return true
    }

    func wantsMonochromeInput() -> Bool {
        return false
    }

    func setInputSize(_ newSize: CGSize, at textureIndex: Int) { }

    func setInputRotation(_ newInputRotation: GPUImageRotationMode, at textureIndex: Int) { }

    func endProcessing() { }

    func setCurrentlyReceivingMonochrome(_ newValue: Bool) { }
}
