/*
 * IJKSDLAudioUnitController.m
 *
 * Copyright (c) 2013 Bilibili
 * Copyright (c) 2013 Zhang Rui <bbcallen@gmail.com>
 *
 * based on https://github.com/kolyvan/kxmovie
 *
 * This file is part of ijkPlayer.
 *
 * ijkPlayer is free software; you can redistribute it and/or
 * modify it under the terms of the GNU Lesser General Public
 * License as published by the Free Software Foundation; either
 * version 2.1 of the License, or (at your option) any later version.
 *
 * ijkPlayer is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
 * Lesser General Public License for more details.
 *
 * You should have received a copy of the GNU Lesser General Public
 * License along with ijkPlayer; if not, write to the Free Software
 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
 */

#import "IJKSDLAudioUnitController.h"
#import "IJKSDLAudioKit.h"
#include "ijksdl/ijksdl_log.h"

#import <AVFoundation/AVFoundation.h>

@implementation IJKSDLAudioUnitController {
    AUGraph _audioGraph;
    AudioUnit _EQUnit;
    AudioUnit _outUnit;
    BOOL _isPaused;
    BOOL _isRecognize;          // 识别开关
    NSTimeInterval _lastRecognizeTime; // 上一次识别时间
    
}

- (id)initWithAudioSpec:(const SDL_AudioSpec *)aSpec recognizeMusic:(RecognizeMusicBlock)recognizeMusic blockType:(BlockType)blockType
{
    self = [super init];
    if (self) {
        
        self.recognizeMusic = recognizeMusic;
        self.blockType = blockType;
        if (aSpec == NULL) {
            self = nil;
            return nil;
        }
        _spec = *aSpec;

        if (aSpec->format != AUDIO_S16SYS) {
            NSLog(@"aout_open_audio: unsupported format %d\n", (int)aSpec->format);
            return nil;
        }

        if (aSpec->channels > 6) {
            NSLog(@"aout_open_audio: unsupported channels %d\n", (int)aSpec->channels);
            return nil;
        }
        
        NewAUGraph(&_audioGraph);
        AUGraphOpen(_audioGraph);
        
        // EQ
        AudioComponentDescription EQUnitDesc;
        EQUnitDesc.componentType = kAudioUnitType_Effect;
        EQUnitDesc.componentSubType = kAudioUnitSubType_AUiPodEQ;
        EQUnitDesc.componentManufacturer = kAudioUnitManufacturer_Apple;
        EQUnitDesc.componentFlags = 0;
        EQUnitDesc.componentFlagsMask = 0;
        AUNode EQNode;
        AUGraphAddNode(_audioGraph, &EQUnitDesc, &EQNode);

        // out
        AudioComponentDescription outUnitDesc;
        outUnitDesc.componentType = kAudioUnitType_Output;
        outUnitDesc.componentSubType = kAudioUnitSubType_RemoteIO;
        outUnitDesc.componentManufacturer = kAudioUnitManufacturer_Apple;
        outUnitDesc.componentFlags = 0;
        outUnitDesc.componentFlagsMask = 0;
        AUNode outNode;
        AUGraphAddNode(_audioGraph, &outUnitDesc, &outNode);
        
        AUGraphConnectNodeInput(_audioGraph, EQNode, 0, outNode, 0);
        AUGraphNodeInfo(_audioGraph, EQNode, &EQUnitDesc, &_EQUnit);
        AUGraphNodeInfo(_audioGraph, outNode, &outUnitDesc, &_outUnit);
        

        
        // 音频格式
        _spec.format = AUDIO_S16SYS;
        _spec.channels = 2;
        AudioStreamBasicDescription audioFormat;
        // 配置streamDescription音频单元的属性
        IJKSDLGetAudioStreamBasicDescriptionFromSpec(&_spec, &audioFormat);
        
        AudioUnitSetProperty(_EQUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &audioFormat, sizeof(audioFormat));
        
        AudioUnitSetProperty(_EQUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 0, &audioFormat, sizeof(audioFormat));
        
        AudioUnitSetProperty(_outUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &audioFormat, sizeof(audioFormat));
    
        // 这个配置是用来，确保屏幕关闭后也能播放。
        UInt32 maxFPS = 4096;
        AudioUnit mixerUnit;
        AUGraphNodeInfo(_audioGraph, EQNode, NULL, &mixerUnit); // 获取混频器单元
        AudioUnitSetProperty(mixerUnit, kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Global, 0, &maxFPS, sizeof(maxFPS));
        
        // 设置渲染回调
        AURenderCallbackStruct callback;
        callback.inputProcRefCon = (__bridge void*) self;
        callback.inputProc = (AURenderCallback) RenderCallback; // 指定渲染回调函数
         
        AUGraphSetNodeInputCallback(_audioGraph, EQNode, 0, &callback);
        
        // 初始化AUGraph
        AUGraphInitialize(_audioGraph);

        // 计算音频规格（如缓冲区大小）
        SDL_CalculateAudioSpec(&_spec);
        
        CAShow(_audioGraph);
        //AudioUnit初始化完成
        
        // 听歌识曲
        _session = [[SHSession alloc] init];
        _session.delegate = self;


    }
    return self;
}

- (AVAudioPCMBuffer *)loadPCMBufferFromData:(NSData *)pcmData {
    if (!pcmData) {
        NSLog(@"无法读取PCM文件数据");
        return nil;
    }
    
    // PCM 文件的音频格式 (44.1kHz, 16位深, 立体声)
    AVAudioFormat *audioFormat = [[AVAudioFormat alloc] initWithCommonFormat:AVAudioPCMFormatInt16
                                                                  sampleRate: _spec.freq   //44100
                                                                    channels: _spec.channels
                                                                 interleaved:YES];

    // 获取 PCM 文件的总帧数
    AVAudioFrameCount frameCount = (AVAudioFrameCount)(pcmData.length / audioFormat.streamDescription->mBytesPerFrame);

    // 创建 AVAudioPCMBuffer
    AVAudioPCMBuffer *pcmBuffer = [[AVAudioPCMBuffer alloc] initWithPCMFormat:audioFormat frameCapacity:frameCount];
    pcmBuffer.frameLength = frameCount;

    // 将 PCM 数据拷贝到 AVAudioPCMBuffer
    memcpy(pcmBuffer.int16ChannelData[0], pcmData.bytes, pcmData.length);

    return pcmBuffer;
}

static OSStatus RenderCallback(void                        *inRefCon,
                               AudioUnitRenderActionFlags  *ioActionFlags,
                               const AudioTimeStamp        *inTimeStamp,
                               UInt32                      inBusNumber,
                               UInt32                      inNumberFrames,
                               AudioBufferList             *ioData)
{
    @autoreleasepool {
        IJKSDLAudioUnitController* auController = (__bridge IJKSDLAudioUnitController *) inRefCon;

        if (!auController || auController->_isPaused) {
            for (UInt32 i = 0; i < ioData->mNumberBuffers; i++) {
                AudioBuffer *ioBuffer = &ioData->mBuffers[i];
                memset(ioBuffer->mData, auController.spec.silence, ioBuffer->mDataByteSize);
            }
            return noErr;
        }
        

        // 播放音频数据
        for (int i = 0; i < (int)ioData->mNumberBuffers; i++) {
            AudioBuffer *ioBuffer = &ioData->mBuffers[i];
            (*auController.spec.callback)(auController.spec.userdata, ioBuffer->mData, ioBuffer->mDataByteSize);

            if (auController.isRecording) {
                // 假设每个 ioBuffer 是 PCM 格式的音频数据，进行 MP3 编码
                short int pcm_buffer[inNumberFrames * auController.spec.channels];  // 假设是立体声
                unsigned char mp3_buffer[inNumberFrames * 2]; // 存储 MP3 输出数据

                memcpy(pcm_buffer, ioBuffer->mData, ioBuffer->mDataByteSize);
                
                // 使用 LAME 将 PCM 编码为 MP3
                int mp3_bytes = lame_encode_buffer_interleaved(auController.lame, pcm_buffer, inNumberFrames, mp3_buffer, (int)sizeof(mp3_buffer));

                if (auController.mp3_file != NULL) {
                    fwrite(mp3_buffer, mp3_bytes, 1, auController.mp3_file);
                    
                    // 计算本次保存的音频时长
                    // 每帧的时长 = inNumberFrames / 采样率
                    // 假设采样率为 44100 Hz
                    const float sampleRate = auController.spec.freq; // 可以根据实际情况修改
                    float duration = (inNumberFrames / sampleRate); // 当前缓冲区的时长
                    auController.recordTime += duration; // 累加到总时长
                    float allDuration = auController.recordTime;
                    if (auController.blockType) {
                        auController.blockType("recordTime", &allDuration);
                    }
                }
            }
        }
        
        if (auController && auController->_isRecognize) {
            // 每过3秒 持续识别3秒
            NSDate *currentDate = [NSDate date];
            NSTimeInterval seconds = [currentDate timeIntervalSince1970];
            if (seconds - auController->_lastRecognizeTime >= 2 && seconds - auController->_lastRecognizeTime < 5) {
                
                // 听歌识曲：将 ioData 转换为 NSData
                NSMutableData *audioData = [NSMutableData data];
                for (UInt32 i = 0; i < ioData->mNumberBuffers; i++) {
                    AudioBuffer *ioBuffer = &ioData->mBuffers[i];
                    [audioData appendBytes:ioBuffer->mData length:ioBuffer->mDataByteSize];
                }
                NSData *finalAudioData = [audioData copy];  // 如果需要不可变的 NSData
                
                // 你可以在此处使用 finalAudioData，比如保存或处理音频数据
                AVAudioPCMBuffer * pcmBuffer = [auController loadPCMBufferFromData:finalAudioData];
                [auController.session matchStreamingBuffer:pcmBuffer atTime:nil];
            }
            
            if (seconds - auController->_lastRecognizeTime >= 5) {
                auController->_lastRecognizeTime = seconds;
            }

        }
        
        return noErr;
    }
}

- (void)dealloc
{
    [self close];
}

- (void)play
{
    if (!_outUnit)
        return;

    _isPaused = NO;
    NSError *error = nil;
    if (NO == [[AVAudioSession sharedInstance] setActive:YES error:&error]) {
        NSLog(@"AudioUnit: AVAudioSession.setActive(YES) failed: %@\n", error ? [error localizedDescription] : @"nil");
    }

    OSStatus status = AudioOutputUnitStart(_outUnit);
    if (status != noErr)
        NSLog(@"AudioUnit: AudioOutputUnitStart failed (%d)\n", (int)status);
}

- (void)pause
{
    if (!_outUnit)
        return;

    _isPaused = YES;
    OSStatus status = AudioOutputUnitStop(_outUnit);
    if (status != noErr)
        ALOGE("AudioUnit: failed to stop AudioUnit (%d)\n", (int)status);
}

- (void)flush
{
    if (!_outUnit)
        return;

    AudioUnitReset(_outUnit, kAudioUnitScope_Global, 0);
}

- (void)stop
{
    if (!_outUnit)
        return;

    OSStatus status = AudioOutputUnitStop(_outUnit);
    if (status != noErr)
        ALOGE("AudioUnit: failed to stop AudioUnit (%d)", (int)status);
}

- (void)close
{
    [self stop];

    if (!_outUnit)
        return;

    AURenderCallbackStruct callback;
    memset(&callback, 0, sizeof(AURenderCallbackStruct));
    AudioUnitSetProperty(_outUnit,
                         kAudioUnitProperty_SetRenderCallback,
                         kAudioUnitScope_Input, 0, &callback,
                         sizeof(callback));

    AudioComponentInstanceDispose(_outUnit);
    _outUnit = NULL;
}



- (void)session:(SHSession *)session didFindMatch:(SHMatch *)match {
    for (SHMediaItem *mediaItem in match.mediaItems) {
        self.recognizeMusic([mediaItem.title cStringUsingEncoding:NSUTF8StringEncoding], [mediaItem.artist cStringUsingEncoding:NSUTF8StringEncoding], [[[mediaItem artworkURL] absoluteString] cStringUsingEncoding:NSUTF8StringEncoding], [[[mediaItem appleMusicURL] absoluteString] cStringUsingEncoding:NSUTF8StringEncoding], [[mediaItem appleMusicID] cStringUsingEncoding:NSUTF8StringEncoding]);
    }
}

- (void)session:(SHSession *)session didNotFindMatchForSignature:(SHSignature *)signature error:(NSError *)error {
    self.recognizeMusic("", "", "", "", "");
}

- (void)startRecord:(char *)path{
    // NSUTF8StringEncoding
    // kCFStringEncodingUTF8
    _recordTime = 0;
    printf("开始录制 路径 %s\n",path);

    // 初始化 LAME 编码器
    _lame = lame_init();
    lame_set_in_samplerate(_lame, _spec.freq);  // 设置输入采样率
    lame_set_num_channels(_lame, _spec.channels);       // 设置音频通道数
    lame_set_brate(_lame, 128);            // 设置目标比特率
    lame_set_mode(_lame, STEREO);          // 设置音频模式
    lame_set_quality(_lame, 2);            // 设置编码质量
    lame_init_params(_lame);

    _mp3_file = fopen(path, "ab"); // 以附加模式打开 MP3 文件
    _isRecording = YES;
}

- (void)stopRecord{
    
    if (_isRecording) {
        fclose(_mp3_file);
        lame_close(_lame);
        _isRecording = NO;
    }
    printf("结束录制,录制的时长 %f 秒\n",_recordTime);
    
}

// 选择一个系统提供的均衡器
//0 Disabled
// 1 Acoustic
//2 Bass Booster
//3 Bass Reducer
//4 Classical
//5 Dance
//6 Deep
// 7 Electronic
//8 Flat
//9 Hip-Hop
//10 Jazz
//11 Latin
// 12 Loudness
//13 Lounge
//14 Piano
//15 Pop
//16 R&B
//17 Rock
//18 Small Speakers
// 19 Spoken Word
//20 Treble Booster
// 21 Treble Reducer
//22 Vocal Booster
- (void)setEQ:(NSInteger)index
{
    AUPreset *preset = (AUPreset *)CFArrayGetValueAtIndex((CFArrayRef)self.iPodEQPresetsArray, index);
    if (preset != nil) {
        NSLog(@"使用系统预设的均衡器： %@ %d",preset->presetName,(int)preset->presetNumber);
        OSStatus status = AudioUnitSetProperty(_EQUnit, kAudioUnitProperty_PresentPreset, kAudioUnitScope_Global, 0, preset, sizeof(AUPreset));
    }
}

- (void)setRecognize:(NSInteger)index{
    NSLog(@"设置识别 %zd",index);
    _isRecognize = index == 1;
}

- (NSArray*)iPodEQPresetsArray
{
    CFArrayRef presets;
    UInt32 size = sizeof(presets);
    OSStatus status = AudioUnitGetProperty(_EQUnit, kAudioUnitProperty_FactoryPresets, kAudioUnitScope_Global, 0, &presets, &size);
    assert(status == noErr);
    return (__bridge NSArray *)(presets);
}

@end


