//
// AudioSpectrum02
// A demo project for blog: https://juejin.im/post/5c1bbec66fb9a049cb18b64c
// Created by: potato04 on 2019/1/13
//

import Foundation
import AVFoundation

protocol AudioSpectrumPlayerDelegate: AnyObject {
    func player(_ player: AudioSpectrumPlayer, didGenerateSpectrum spectra: [[Float]])
    //func onPlayComplete()
}

class AudioSpectrumPlayer {
    
    weak var delegate: AudioSpectrumPlayerDelegate?
    private let engine = AVAudioEngine()
    private let player = AVAudioPlayerNode()
    
    var audioFile : AVAudioFile!
    var songLengthSamples: AVAudioFramePosition!
    var sampleRateSong: Float = 0
    var lengthSongSeconds: Float = 0
    var startInSongSeconds: Float = 0

    
    public var bufferSize: Int? {
        didSet {
            if let bufferSize = self.bufferSize {
                analyzer = RealtimeAnalyzer(fftSize: bufferSize)
                engine.mainMixerNode.removeTap(onBus: 0)
                engine.mainMixerNode.installTap(onBus: 0, bufferSize: AVAudioFrameCount(bufferSize), format: nil, block: {[weak self](buffer, when) in
                    guard let strongSelf = self else { return }
                    if !strongSelf.player.isPlaying { 
                        
                        return }
                    
                    //print("进度:\(when.hostTime)")
                    buffer.frameLength = AVAudioFrameCount(bufferSize)
                    let spectra = strongSelf.analyzer.analyse(with: buffer)
                    if strongSelf.delegate != nil {
                        strongSelf.delegate!.player(strongSelf, didGenerateSpectrum: spectra)
                    }
                })
            }
        }
    }
    
    public var analyzer: RealtimeAnalyzer!
    
    init(bufferSize: Int = 512) {
        engine.attach(player)
        engine.connect(player, to: engine.mainMixerNode, format: nil)
        engine.prepare()
        
        try! engine.start()
    
        defer {
            self.bufferSize = bufferSize
        }
    }

    func play(withFileName fileName: URL?) {
        guard let audioFileURL = fileName,
            let audioFile = try? AVAudioFile(forReading: audioFileURL) else { return }
        player.stop()
        startInSongSeconds = 0;
        self.audioFile = audioFile
        songLengthSamples = audioFile.length
        let songFormat = audioFile.processingFormat
        sampleRateSong = Float(songFormat.sampleRate)
        lengthSongSeconds = Float(songLengthSamples) / sampleRateSong
        
        player.scheduleFile(audioFile, at: nil, completionHandler: nil)
        player.play()
    }
    
    func stop() {
        player.stop()
        startInSongSeconds = 0;
    }
    func pause(){
        player.pause()
    }
    
    func continuePlay(){
        player.play();
    }
    
    func getCurrentPosition() -> Float {
        if(self.player.isPlaying){
            if let nodeTime = self.player.lastRenderTime, let playerTime = player.playerTime(forNodeTime: nodeTime) {
                let elapsedSeconds = startInSongSeconds + (Float(playerTime.sampleTime) / Float(sampleRateSong))
                return elapsedSeconds
            }
        }
        return 0
    }
    
    func seekTo(time: Float) {
        startInSongSeconds = time;
        player.stop()
        
        let startSample = floor(time * sampleRateSong)
        let lengthSamples = Float(songLengthSamples) - startSample
        
        player.scheduleSegment(audioFile, startingFrame: AVAudioFramePosition(startSample), frameCount: AVAudioFrameCount(lengthSamples), at: nil, completionHandler: {self.player.pause()})
        
        player.play()
    }
    
    func setVolume(volume :Float){
        player.volume = volume
    }
}
