//
//  FWAudioView.swift
//  YueChat
//
//  Created by IMac on 2024/9/28.
//

import UIKit

class FWAudioView: UIView {

    @IBOutlet weak var bgView: UIView!
    @IBOutlet weak var tipsTopConstranints: NSLayoutConstraint!
    @IBOutlet weak var voiceInputBg: UIImageView!
    @IBOutlet weak var Layerview: UIView!
    @IBOutlet weak var tipsLabel: UILabel!
    @IBOutlet weak var audioBtn: UIButton!
    
    var AudioViewStartBlock: () -> Void = {}
    var AudioViewStopBlock: () -> Void = {}
    var audioCompletionBlock: (_ file:String) -> Void = {file in }
    
//    var levelLayer : CAShapeLayer?
    var levelPath = UIBezierPath()

    var currentLevels: [CGFloat] = [0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05]
    
    lazy var levelLayer: CAShapeLayer = {
        let layer = CAShapeLayer()
        layer.frame = CGRect(x: UIScreen.main.bounds.width / 2.8, y: 0, width: UIScreen.main.bounds.width / 2.0, height: 50)
        layer.strokeColor = UIColor.white.cgColor
        layer.lineWidth = CGFloat(levelWidth)
        levelLayer = layer
        replicatorL.addSublayer(layer)
        Layerview.layer.addSublayer(replicatorL)
        return layer
    }()
    
    func getLevelLayer () -> CAShapeLayer {
        if levelLayer == nil {
            let layer = CAShapeLayer()
            layer.frame = CGRect(x: UIScreen.main.bounds.width / 2.8, y: 0, width: UIScreen.main.bounds.width / 2.0, height: 50)
            layer.strokeColor = UIColor.white.cgColor
            layer.lineWidth = CGFloat(levelWidth)
            replicatorL.addSublayer(layer)
            Layerview.layer.addSublayer(replicatorL)
            levelLayer = layer
        }
        return levelLayer
        
//        return layer
    }
    
    
    lazy var replicatorL: CAReplicatorLayer = {
        let layer = CAReplicatorLayer()
        layer.frame = bounds
        layer.instanceCount = 2
        layer.instanceTransform = CATransform3DMakeRotation(CGFloat(Float.pi), 0, 0, 1)
//        layerView.layer.addSublayer(layer)
        replicatorL = layer
//        self.getLevelLayer()
        return layer
    }()
    var curCafFilePath = String()
    var mp3FilePath = String()
    
    private let levelWidth: CGFloat = 3.0
    private let levelMargin: CGFloat = 2.0

    private var audioSession: AVAudioSession = AVAudioSession.sharedInstance()
    private var audioRecorder: AVAudioRecorder!
    private let ALPHA: CGFloat = 0.02
    private var levelTimer: CADisplayLink?

    var audioViewBlock: () -> Void = {}
    
    override func awakeFromNib() {
        super.awakeFromNib()
        // Initialization code
        voiceInputBg.backgroundColor = kBlue
        self.frame = CGRect(x: 0, y: 0, width: kScreenWidth, height: kScreenHeight)
        setupFromNib()
    }
    
    private func setupFromNib() {

//        AudioServicesPlaySystemSound(SystemSoundID(1010))

        self.frame = CGRect(x: 0, y: UIScreen.main.bounds.height - 200  - navHeight - tabBarHeight, width: UIScreen.main.bounds.width, height: 200 + tabBarHeight)
        
        // 设置按钮标题为空
        audioBtn.setTitle("", for: .normal)

        
        // 设置背景视图圆角
        voiceInputBg.layer.masksToBounds = true
        voiceInputBg.layer.cornerRadius = 15
        
        // 设置背景视图颜色
        bgView.backgroundColor = UIColor.white
        
        // 设置按钮图片
        audioBtn.setImage(UIImage(named: "talk_icon"), for: .normal)
        
        // 添加触摸响应
        audioBtn.addTarget(self, action: #selector(recordStart), for: .touchDown)
//        audioBtn.addTarget(self, action: #selector(recordCancel), for: .touchUpOutside)
//        audioBtn.addTarget(self, action: #selector(recordFinish), for: .touchUpInside)
        audioBtn.addTarget(self, action: #selector(recordTouchDragExit), for: .touchDragExit)
//        audioBtn.addTarget(self, action: #selector(recordTouchDragEnter), for: .touchDragEnter)
        
        do {
            try audioSession.setCategory(.playAndRecord, mode: .default, options: [])
            try audioSession.setActive(true)
        } catch {
            print(error.localizedDescription)
        }
        
//        let audioFilePath = generateAudioFilePath(withDate: Date(), andExt: "caf")
        let audioFilePath = AudioPlayer.share().generateAudioFilePath(with: Date(), andExt: "caf")
//        generateAudioFilePath(date: Date(), ext: "caf")
        curCafFilePath = audioFilePath!
        let audioFileUrl = URL(fileURLWithPath: audioFilePath!)
        
        let recordSettings: [String: Any] = [
            AVSampleRateKey: 11025.0,
            AVFormatIDKey: kAudioFormatLinearPCM,
            AVLinearPCMBitDepthKey: 16,
            AVNumberOfChannelsKey: 2,
            AVEncoderAudioQualityKey: AVAudioQuality.low.rawValue
        ]
        
        do {
            audioRecorder = try AVAudioRecorder(url: audioFileUrl, settings: recordSettings)
            audioRecorder.isMeteringEnabled = true
            audioRecorder.prepareToRecord()
            audioRecorder.record()
        } catch {
            print(error.localizedDescription)
        }
    }

    
    // 生成带有日期和扩展名的音频文件路径
        func generateAudioFilePath(withDate date: Date, andExt ext: String) -> String {
            let dateFormatter = DateFormatter()
            dateFormatter.dateFormat = "yyyy-MM-dd HH:mm:ss"
            let dateString = dateFormatter.string(from: date)
            
            let dateStr = "\(timeStampWithDate(dateString))"
            
//            let cachePath = NSSearchPathForDirectoriesInDomains(.cachesDirectory, .userDomainMask, true).first!
            
            let homeDirectoryURL = FileManager.default.urls(for: .cachesDirectory, in: .userDomainMask).first
//            let cachePath = homeDirectoryURL?.absoluteString
//            let directoryPath = "\(cachePath ?? "")/soundFile"
            
            let path = homeDirectoryURL?.appendingPathComponent("soundFile")
            
            if path!.absoluteString.hasPrefix("file://") {
                
                let p = path!.absoluteString.replacingOccurrences(of: "file://", with: "")
    //            path.absoluteString = path.absoluteString.replacingOccurrences(of: "file://", with: "")
                
                do {
                    try FileManager.default.createDirectory(atPath: p, withIntermediateDirectories: true)
    //                FileManager.default.createDirectory(at: path, withIntermediateDirectories: true, attributes: nil)
                } catch {
                    print("Failed to create directory at path: \(p) due to error: \(error)")
                }
                
                let filePath = "\(p)/\(dateStr).\(ext)"
                return filePath
            }
            
//            if !FileManager.default.fileExists(atPath: directoryPath) {
//                try? FileManager.default.createDirectory(atPath: directoryPath, withIntermediateDirectories: true, attributes: nil)
//            }
            
//            let filePath = "\(directoryPath)/\(dateStr).\(ext)"
            return path!.absoluteString
        }
    
    func timeStampWithDate(_ dateString: String) -> TimeInterval {
            let dateFormatter = DateFormatter()
            dateFormatter.dateFormat = "yyyy-MM-dd HH:mm:ss"
            if let date = dateFormatter.date(from: dateString) {
                return date.timeIntervalSince1970 * 1000 // 转换为毫秒
            }
            return 0
        }
    
    // MARK: - ControlEvents
    @objc private func recordStart(_ button: UIButton) {
        if !audioRecorder.isRecording {
            print("录音开始")
            audioStart()
        }
    }

    @objc private func recordCancel(_ button: UIButton) {
        if audioRecorder.isRecording {
            audioClose()
            print("录音取消了")
        }
    }

    @objc private func recordFinish(_ button: UIButton) {
        if audioRecorder.isRecording {
            print("录音发送")
            sendButtonAction()
            audioClose()
        }
    }

    @objc private func recordTouchDragExit(_ button: UIButton) {
        if audioRecorder.isRecording {
            print("松开手指，取消发送")
            tipsLabel.text = NSLocalizedString("松开手指，取消发送", comment: "")
            voiceInputBg.backgroundColor = UIColor.red
            audioBtn.setImage(UIImage(named: "talk_icon_red"), for: .normal)
            // stopAnimate()
        }
    }

    @objc private func recordTouchDragEnter(_ button: UIButton) {
        if audioRecorder.isRecording {
            print("正在录音")
            tipsLabel.text = NSLocalizedString("正在录音", comment: "")
            voiceInputBg.backgroundColor = kBlue
            audioBtn.setImage(UIImage(named: "talk_icon"), for: .normal)
            // startAnimate()
        }
    }

    func gestureRecognizer(_ gestureRecognizer: UIPanGestureRecognizer, shouldRecognizeSimultaneouslyWith otherGestureRecognizer: UIGestureRecognizer) -> Bool {
        return true
    }

    
    // MARK: - Audio Recording Methods

    @objc private func audioStart() {
        AudioViewStartBlock()
        Layerview.isHidden = false
        startMeterTimer()
        tipsLabel.text = NSLocalizedString("手指上滑，取消发送", comment: "")
        
//        let yPosition = UIScreen.main.bounds.height - 200 - tabBarHeight - navHeight
//        self.frame = CGRect(x: 0, y: yPosition, width: UIScreen.main.bounds.width, height: 200 + tabBarHeight)
        // tipsTopConstraints.constant = 60
    }

    @objc private func audioClose() {
        AudioViewStopBlock()
        
        audioRecorder.stop()
        cancelButtonAction()
        levelTimer!.isPaused = true
        Layerview.isHidden = true
        tipsLabel.text = NSLocalizedString("按住说话", comment: "")
        
//        let yPosition = UIScreen.main.bounds.height - 200 - tabBarHeight - navHeight
//        self.frame = CGRect(x: 0, y: yPosition, width: UIScreen.main.bounds.width, height: 200 + tabBarHeight)
        // tipsTopConstraints.constant = 60
        audioBtn.setImage(UIImage(named: "talk_icon"), for: .normal)
        voiceInputBg.backgroundColor = kBlue
    }
    
    private func updateLevelLayer() {
        levelPath = UIBezierPath()
        let height = levelLayer.frame.height
        for (index, level) in currentLevels.enumerated() {
            let x = CGFloat(index) * (levelWidth + levelMargin) + 5
            let pathH = level * height
            let startY = height / 2.0 - pathH / 2.0
            let endY = height / 2.0 + pathH / 2.0
            levelPath.move(to: CGPoint(x: x, y: startY))
            levelPath.addLine(to: CGPoint(x: x, y: endY))
        }
        levelLayer.path = levelPath.cgPath
    }
    
    private func startMeterTimer() {

        if levelTimer == nil {
            // 停止计量定时器（如果需要的话）
            // stopMeterTimer()
            
            levelTimer = CADisplayLink(target: self, selector: #selector(updateMeter))
            
            if #available(iOS 10.0, *) {
                levelTimer?.preferredFramesPerSecond = 10
            } else {
                levelTimer?.frameInterval = 6
            }
                
                levelTimer?.add(to: .current, forMode: .common)
            } else {
                levelTimer?.isPaused = false
            }
    }
    
    func levels () -> Float {
        audioRecorder.updateMeters()
        let aveChannel = pow(10, (ALPHA * Double(audioRecorder.averagePower(forChannel: 0))))
        
        if aveChannel <= 0.05 {
            return 0.05
        }
        if aveChannel >= 1.0 {
            return 1.0
        }
        return Float(max(min(aveChannel, 1.0), 0.05))
    }
    
    private func stopMeterTimer() {
        levelTimer?.invalidate()
    }
    
    var allLevels:[Float]  = {
        return []
    }()
    
    @objc private func updateMeter() {
        let level = levels
        currentLevels.removeLast()
        currentLevels.insert(CGFloat(level()), at: 0)
        allLevels.append(level())
        updateLevelLayer()
    }
    
    
    private func cancelButtonAction() {
        if !mp3FilePath.isEmpty {
            do {
                try FileManager.default.removeItem(at: URL(fileURLWithPath: mp3FilePath))
            } catch {
                print(error.localizedDescription)
            }
        }
    }
    
    private func sendButtonAction() {
        stopAudioRecord()
        audioCompletionBlock(mp3FilePath)
    }
    

    
    private func stopAudioRecord() {
        if audioRecorder.isRecording {
            audioRecorder.stop()
//            mp3FilePath = LGAudioPlayer.share().audio_PCMtoMP3(withFilePath: curCafFilePath)
            mp3FilePath = AudioPlayer.share().audioPCMtoMP3(curCafFilePath)
//            curCafFilePath //audio_PCMtoMP3WithFilePath(filePath: curCafFilePath)
        }
    }
    
//    private func generateAudioFilePath(date: Date, ext: String) -> String {
//        let formatter = DateFormatter()
//        formatter.dateFormat = "yyyy-MM-dd HH:mm:ss"
////        let dateString = formatter.string(from: date)
//        let dateStr = String(Date().timeIntervalSince1970)
//        let directoryPath = getAudioDirectoryPath()
//        let filePath = "\(directoryPath)/\(dateStr).\(ext)"
//        return filePath
//    }
//    
//    private func getAudioDirectoryPath() -> String {
//        let cachePath = NSSearchPathForDirectoriesInDomains(.cachesDirectory, .userDomainMask, true).first!
//        let directoryPath = "\(cachePath)/soundFile"
//        
//        do {
//            try FileManager.default.createDirectory(atPath: directoryPath, withIntermediateDirectories: true)
////            FileManager.default.createDirectory(atPath: directoryPath, withIntermediateDirectories: true)
//        } catch {
//            
//        }
//        return directoryPath
//    }
    
//    private func audio_PCMtoMP3WithFilePath(filePath: String) -> String {
//        
//        if filePath.isEmpty {
//            return ""
//        }
//        
//        let cafFilePath = filePath
//        let mp3FileName = generateAudioFilePath(date: Date(), ext: "mp3")
//        var read = Int()
//        var write = Int()
//        
//  
//        // 使用指定编码获取 C 字符串
//        let macOSRomanEncoding = String.Encoding(rawValue: 1)
//        let pcmStr = cafFilePath.cString(using: macOSRomanEncoding)!
//        let mpStr = mp3FileName.cString(using: macOSRomanEncoding)!
//        let pcm = fopen(pcmStr, "rb")
//        fseek(pcm, 4*1024, SEEK_CUR)
////            try FileHandle(forReadingFrom: URL(fileURLWithPath: cafFilePath))
//        let mp3 = fopen(mpStr, "wb")
//        
//        // 这里省略了具体的编码逻辑，因为 Swift 标准库没有提供直接的编码支持
//        let PCM_SIZE = 8192
//        let MP3_SIZE = 8192
//        
//        var pcm_buffer = [Int16](repeating: 0, count: PCM_SIZE * 2)
//        var mp3_buffer = [UInt8](repeating: 0, count: MP3_SIZE)
//        
////        var pcm_buffer = [Int16](repeating: 0, count: PCM_SIZE * 2)
////        var mp3_buffer = [Int16](repeating: 0, count: MP3_SIZE)
//        
//        let lame = lame_init()
//        lame_set_num_channels(lame, 2)
//        lame_set_in_samplerate(lame, Int32(11025.0))
//        lame_set_brate(lame, 16)
//        lame_set_mode(lame, MPEG_mode(3))
//        lame_set_quality(lame, 2)
//        lame_init_params(lame)
//        
//        
//        
//        do {
////                var ptr1 = withUnsafePointer(to: &pcm_buffer, { $0 })
//            let pcmP = withUnsafeMutablePointer(to: &pcm_buffer, { UnsafeMutableRawPointer($0) })
//            
//            // 定义一个PCM数据缓冲区
//            var pcmBuffer = [Int16](repeating: 0, count: PCM_SIZE * 2)
//            
//
//            let mpBf = withUnsafeMutablePointer(to: &mp3_buffer, { UnsafeMutableRawPointer($0) })
//            read = fread(pcmP, MemoryLayout<Int16>.size, PCM_SIZE, pcm)
//
//            if read == 0 {
//                write = Int(lame_encode_flush(lame, mpBf, Int32(MP3_SIZE)))
//            }else {
////                    write = Int(lame_encode_buffer_interleaved(lame, pcmBufferPointer, Int32(read), mpBf, Int32(MP3_SIZE)))
//                
////                let bufferSize = pcmBuffer.count * MemoryLayout<Int16>.size
//
//                pcmBuffer.withUnsafeMutableBytes { buffer in
//                    let pcmBufferPointer = buffer.bindMemory(to: Int16.self).baseAddress!
//                    // 使用 pcmBufferPointer 进行操作
//                    // 例如，传递给 C 函数或 C++ 函数
//                    write = Int(lame_encode_buffer_interleaved(lame, pcmBufferPointer, Int32(read), mpBf, Int32(MP3_SIZE)))
//                }
//                fwrite(mp3_buffer, write, 1, mp3)
//            }
//        }
//        while read != 0{
//            
//        }
//                
//                lame_close(lame)
//                fclose(mp3)
//                fclose(pcm)
//
////        
//        
//        return mp3FileName
//    }
    
    deinit {
        stopMeterTimer()
        
//        NotificationCenter.default.removeObserver(self, name: Notification.Name(FZ_EVENT_AUDIO_STATE), object: nil)
    }
    
    // 注意这里省略了一些变量和方法的定义，比如 cancelLab, endLab, wordLab, etc.
    // 这些需要在实际的类定义中包含进来
}
