//
//  QAAudioUnitPlayer.swift
//  DemoUI
//  Audio Unit播放PCM文件

//  Created by 白瑾浩 on 2022/3/5.
//

import Foundation
import AudioUnit
import AVKit

class QAAudioUnitPlayer: NSObject {
    
    var ioUnit: AudioComponentInstance? = nil
    
    private(set) var isPlaying = false
    private var cacheBufferData = Data()
    
    private let semaphore = DispatchSemaphore(value: 1)
    
//    private var bufferList: AudioBufferList = AudioBufferList.init(mNumberBuffers: 1, mBuffers: AudioBuffer.init(mNumberChannels: UInt32(AudioConst.Channels), mDataByteSize: UInt32(AudioConst.mDataByteSize), mData: UnsafeMutableRawPointer.allocate(byteCount: AudioConst.mDataByteSize, alignment: 1)))
    
    override init() {
        super.init()
        
        let _ = self.setupIoUnit()
    }
    
    func addAudioData(data: Data){
        guard isPlaying else { return }
//        print("包大小：\(data.count)")  5 * 4096
        print( "cacheBufferData.count=\(cacheBufferData.count);WSmDataByteSize * audioPlayCacheBufferLen=\(WSmDataByteSize * audioPlayCacheBufferLen)" )
//        if cacheBufferData.count > WSmDataByteSize * audioPlayCacheBufferLen {
//            removeCacheData(count: cacheBufferData.count - WSmDataByteSize*audioPlayCacheBufferLen)
//        }
        print(  "addAudioData:\(#function)" )
        semaphore.wait()
        cacheBufferData.append(data)
        semaphore.signal()
    }
    

    private func setupIoUnit() -> Bool {
        var ioDes: AudioComponentDescription = AudioComponentDescription.init(
            componentType: kAudioUnitType_Output,
            componentSubType: kAudioUnitSubType_RemoteIO,
            componentManufacturer: kAudioUnitManufacturer_Apple,
            componentFlags: 0,
            componentFlagsMask: 0)
        guard let inputComp: AudioComponent = AudioComponentFindNext(nil, &ioDes) else {
            print("outputComp init error")
            return false
        }
        if AudioComponentInstanceNew(inputComp, &ioUnit) != noErr {
            print("io AudioComponentInstanceNew error")
            return false
        }
        
        var value = 1
        if AudioUnitSetProperty(self.ioUnit!, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Output, AudioConst.OutputBus, &value, UInt32(MemoryLayout.size(ofValue: value))) != noErr {
            print("can't enable output io")
            return false
        }
        
        var ioFormat: AudioStreamBasicDescription = AudioStreamBasicDescription.init(
            mSampleRate: Float64(AudioConst.SampleRate),
            mFormatID: kAudioFormatLinearPCM,
            mFormatFlags: kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked,
            mBytesPerPacket:  UInt32(2 * AudioConst.Channels),
            mFramesPerPacket: 1,
            mBytesPerFrame: UInt32(2 * AudioConst.Channels),
            mChannelsPerFrame: UInt32(AudioConst.Channels),
            mBitsPerChannel: AudioConst.mBitsPerChannel,
            mReserved: 0)

        if AudioUnitSetProperty(self.ioUnit!, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, AudioConst.OutputBus, &ioFormat, UInt32(MemoryLayout.size(ofValue: ioFormat))) != noErr {
            print("set StreamFormat error")
            return false
        }
        
        
        let selfPointer = unsafeBitCast(self, to: UnsafeMutableRawPointer.self)
        
        var playCallback: AURenderCallbackStruct = AURenderCallbackStruct.init(inputProc:  { (inRefCon, ioActionFlags, inTimeStamp, inBusNumber, inNumberFrames, ioData) -> OSStatus in
            
            let bridgeSelf: QAAudioUnitPlayer =  Unmanaged<QAAudioUnitPlayer>.fromOpaque(UnsafeRawPointer(inRefCon)!).takeUnretainedValue()
            

            let cacheData = bridgeSelf.cacheBufferData as NSData
            let bufferData: AudioBuffer = ioData!.pointee.mBuffers
            let len = Int(bufferData.mDataByteSize)
            print(  "len=\(cacheData.count);cacheData.count=\(cacheData.count)" )
            if len <= cacheData.count{
                cacheData.getBytes(bufferData.mData!, range: NSMakeRange(0, len))
                bridgeSelf.removeCacheData(count: len)
            }else{
                let mdata = ioData!.pointee.mBuffers.mData
                memset(mdata, 0, Int(ioData!.pointee.mBuffers.mDataByteSize))
            }
            print( "====noErr==\(noErr)")
            return noErr
        }, inputProcRefCon:  selfPointer )
        //   public init(inputProc: AURenderCallback?, inputProcRefCon: UnsafeMutableRawPointer?)
        
        
        if AudioUnitSetProperty(self.ioUnit!, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, AudioConst.OutputBus, &playCallback, UInt32(MemoryLayout.size(ofValue: playCallback))) != noErr {
            print("SetRenderCallback error")
            return false
        }
        return true
        
    }
    private func removeCacheData(count: Int){
        guard count >= 0 else { return }
        semaphore.wait()
        guard cacheBufferData.count >= count else {
            semaphore.signal()
            return
        }
        let startIndex = cacheBufferData.index(cacheBufferData.startIndex, offsetBy: 0)
        let endIndex = cacheBufferData.index(cacheBufferData.startIndex, offsetBy: count)
        let range = startIndex..<endIndex
        cacheBufferData.removeSubrange(range)
        semaphore.signal()
    }
    
    public func startPlay() {
        isPlaying = true
        var error = AudioUnitInitialize(self.ioUnit!)
        if error != noErr  {
            print("AudioUnitInitialize error: \(error)")
        }
        error = AudioOutputUnitStart(self.ioUnit!)
        if  error != noErr {
            print("AudioOutputUnitStart error")
        }

    }
    
    public func stopPlay() {
        isPlaying = false
        AudioUnitUninitialize(self.ioUnit!)
        AudioOutputUnitStop(self.ioUnit!)
        cacheBufferData.removeAll()
    }
    
    
}


func setupAudioSession(){
    let session: AVAudioSession = AVAudioSession.sharedInstance()
            do {
//                try session.setCategory(.playAndRecord, options: [.allowBluetooth, .allowBluetoothA2DP,.allowAirPlay])

                try session.setCategory(AVAudioSession.Category.playback)
                
                try session.setPreferredSampleRate(Double(AudioConst.SampleRate))
                try session.setPreferredIOBufferDuration(Double(WSBufferDuration) / 1000.0)
                try session.setActive(true)
                //AVAudioSessionCategoryPlayback
                //            设置应用程序支持接受远程控制事件
                UIApplication.shared.beginReceivingRemoteControlEvents()
            } catch  {
                print(error.localizedDescription)
            }
}



//var WSBufferDuration: Int {
//    get {
//        var value = UserDefaults.standard.integer(forKey: "key_WSBufferDuration")
//        if value == 0{
//            value = 16
//        }
//        return value
//    }
//    set { UserDefaults.standard.set(newValue, forKey: "WSBufferDuration") }
//}
let sampleMinValue = 64
var WSBufferDuration = 16
var audioPlayCacheBufferLen = 5
var WSmDataByteSize: Int = 4096

struct AudioConst {
    static let SampleRate: Int = 8000 //48000 44100  ios_sound.pcm 采样率为 8000
    static let Channels: UInt32 = 1
    static let InputBus: AudioUnitElement = 1
    static let OutputBus: AudioUnitElement = 0
    static let mBitsPerChannel: UInt32 = 16
}

