//
//  NeoNuiManager.m
//  GLUZSwift
//
//  Created by xyanl on 2025/4/3.
//
/**
 调用步骤
 
 初始化SDK、录音实例。
 根据业务需求配置参数。
 调用nui_dialog_start开始识别。
 根据音频状态回调onNuiAudioStateChanged，打开录音机。
 在onNuiNeedAudioData回调中提供录音数据。
 在EVENT_ASR_PARTIAL_RESULT和EVENT_SENTENCE_END事件回调中获取识别结果。
 调用nui_dialog_cancel结束识别。
 结束调用，使用nui_release接口释放SDK资源。
 */
#import "NeoNuiManager.h"
#import "nuisdk.framework/Headers/NeoNui.h"
#import <ZegoExpressEngine/ZegoExpressEngine.h>

@interface NeoNuiManager ()<ZegoAudioDataHandler, NeoNuiSdkDelegate>

@property(nonatomic,strong) NeoNui* neoNui;
/// 麦克风收录的音频数据
@property(nonatomic,strong) NSMutableData *recordedVoiceData;
/// 语言类型
@property (nonatomic, assign) NeoNuiLanguageType languageType;
/// YES: 打开麦克风, NO: 关闭麦克风
@property (nonatomic, assign) BOOL isOpenAudio;

@property (nonatomic, assign) NeoNuiState state;

@end

@implementation NeoNuiManager

+ (NeoNuiManager *)shared {
    static NeoNuiManager *shared = nil;
    static dispatch_once_t onceToken;
    dispatch_once(&onceToken, ^{
        shared = [[self alloc] init];
    });
    return shared;
}

- (NeoNui *)neoNui {
    if (_neoNui == nil) {
        _neoNui = [NeoNui get_instance];
        _neoNui.delegate = self;
    }
    return _neoNui;
}

- (void)initNeoNui:(NeoNuiLanguageType)languageType delegate:(nonnull id<NeoNuiDelegate>)delegate {
    _languageType = languageType;
    _delegate = delegate;
    // 初始话不能放在主线程
    dispatch_async(dispatch_get_global_queue(0, 0), ^{
        // 请注意此处的参数配置，其中账号相关需要按照genInitParams的说明填入后才可访问服务
        NuiResultCode code = [self.neoNui nui_initialize:[[self genInitParams] UTF8String] logLevel:NUI_LOG_LEVEL_NONE saveLog:NO];
        NSLog(@"[NeoNui] 初始化结果: %d", code);
        [self.neoNui nui_set_params:[[self genParams] UTF8String]];
        NSLog(@"[NeoNui] %s", [self.neoNui nui_get_version]);
        [self startNeoNui];
    });
    
}

- (void)switchLanguage:(NeoNuiLanguageType)languageType {
    
    if (!self.isOpenAudio) {
        /// 开启麦克风
        [self startAudioDataObserver];
    }
    
    // 初始话不能放在主线程
    dispatch_async(dispatch_get_global_queue(0, 0), ^{
        [self stopNeoNui];
        self.languageType = languageType;
        // 请注意此处的参数配置，其中账号相关需要按照genInitParams的说明填入后才可访问服务
        NuiResultCode code = [self.neoNui nui_initialize:[[self genInitParams] UTF8String] logLevel:NUI_LOG_LEVEL_NONE saveLog:YES];
        NSLog(@"[NeoNui] 初始化结果: %d", code);
        [self.neoNui nui_set_params:[[self genParams] UTF8String]];
        NSLog(@"[NeoNui] version: %s", [self.neoNui nui_get_version]);
        [self startNeoNui];
    });
}

- (NSString*)genInitParams {
    // 使用登录用户的token,方式串台
    // NSString *token = [[NSUserDefaults standardUserDefaults] stringForKey:@"token"];
    // app_key 和 token 随便填, 只要不是空就行
    NSMutableDictionary *ticketJsonDict = @{
        @"app_key": @"default",
        @"apikey": @"3bc9127c78fe827bb726108bb3559ba4",
        @"token": @"default",
        @"save_wav": @"false",
        @"service_mode": @"1"
    }.mutableCopy;
    
    
    if (self.languageType == NeoNuiLanguageTypeZh) {
        [ticketJsonDict setObject:@"wss://1an04la087286.vicp.fun/wsttrans/ws/zh?apikey=3bc9127c78fe827bb726108bb3559ba4" forKey:@"url"];
        
    } else if (self.languageType == NeoNuiLanguageTypeFileZh) {
        
    } else if (self.languageType == NeoNuiLanguageTypeRu) {
        [ticketJsonDict setObject:@"wss://1an04la087286.vicp.fun/wsttrans/ws/ru?apikey=3bc9127c78fe827bb726108bb3559ba4" forKey:@"url"];
        
    } else {
    }
    
    // 必填
    //FullMix = 0   // 选用此模式开启本地功能并需要进行鉴权注册
    //FullCloud = 1 // 在线实时语音识别可以选这个
    //FullLocal = 2 // 选用此模式开启本地功能并需要进行鉴权注册
    //AsrMix = 3    // 选用此模式开启本地功能并需要进行鉴权注册
    //AsrCloud = 4  // 在线一句话识别可以选这个
    //AsrLocal = 5  // 选用此模式开启本地功能并需要进行鉴权注册
    [ticketJsonDict setObject:@"1" forKey:@"service_mode"];
    
    //当初始化SDK时的save_log参数取值为true时，该参数生效。表示是否保存音频debug，该数据保存在debug目录中，需要确保debug_path有效可写
    [ticketJsonDict setObject:@"true" forKey:@"save_wav"];
    //debug目录，当初始化SDK时的save_log参数取值为true时，该目录用于保存中间音频文件
    NSString *debug_path = [self createDir];
    [ticketJsonDict setObject:debug_path forKey:@"debug_path"];
    
    
    NSString *mobile = [[NSUserDefaults standardUserDefaults] stringForKey:@"mobile"];
    // 必填, 推荐填入具有唯一性的id, 方便定位问题
    [ticketJsonDict setObject:mobile forKey:@"device_id"];
    
    return [self jsonStringFromDictionary:ticketJsonDict];
}

- (NSString*)genParams {
    NSMutableDictionary *nls_config = [NSMutableDictionary dictionary];
    // 是否返回中间识别结果
    [nls_config setValue:@(YES) forKey:@"enable_intermediate_result"];
    
    // ITN（逆文本inverse text normalization）中文数字转换阿拉伯数字。设置为True时，中文数字将转为阿拉伯数字输出，默认值：False。
    if (self.languageType == NeoNuiLanguageTypeZh) {
        [nls_config setValue:@(YES) forKey:@"enable_inverse_text_normalization"];
    } else {
        [nls_config setValue:@(NO) forKey:@"enable_inverse_text_normalization"];
    }
    
    // 是否在后处理中添加标点
    [nls_config setValue:@(YES) forKey:@"enable_punctuation_prediction"];
    // 音频采样率
    [nls_config setValue:@(16000) forKey:@"sample_rate"];
    // PCM编码
    [nls_config setValue:@"pcm" forKey:@"sr_format"];
    
    NSMutableDictionary *dictM = [NSMutableDictionary dictionary];
    [dictM setObject:nls_config forKey:@"nls_config"];
    // 需要请求的语音服务类型，实时语音识别为“4”。
    [dictM setValue:@(SERVICE_TYPE_SPEECH_TRANSCRIBER) forKey:@"service_type"]; // 必填
    
    return [self jsonStringFromDictionary:dictM];
}

- (NSString *)jsonStringFromDictionary:(NSDictionary *)dic {
    NSData *data = [NSJSONSerialization dataWithJSONObject:dic options:NSJSONWritingPrettyPrinted error:nil];
    NSString * jsonStr = [[NSString alloc]initWithData:data encoding:NSUTF8StringEncoding];
    return jsonStr;
}

/// 开始
- (void)startNeoNui {
    NSLog(@"[NeoNui] startNeoNui");
    self.state = NeoNuiStateRunning;
    [self.neoNui nui_dialog_start:MODE_P2T dialogParam:[@"" UTF8String]];
}

/// 暂停
- (void)pauseNeoNui {
    NSLog(@"[NeoNui] pauseNeoNui");
    self.state = NeoNuiStatePause;
    [self.neoNui nui_dialog_cancel:false];
}

/// 停止
- (void)stopNeoNui {
    self.state = NeoNuiStateStop;
    NSLog(@"[NeoNui] stopNeoNui");
    [self.neoNui nui_release];
}

#pragma mark - 音频采集控制用于翻译

// 开始音频采集
- (void)startAudioDataObserver {
    // 需要的音频数据类型 Bitmask，此处示例四个回调都开启
    ZegoAudioDataCallbackBitMask bitmask = ZegoAudioDataCallbackBitMaskCaptured | ZegoAudioDataCallbackBitMaskPlayer;
    
    // 需要的音频数据参数，此处示例单声道、16 K
    ZegoAudioFrameParam *param = [[ZegoAudioFrameParam alloc] init];
    param.channel = ZegoAudioChannelMono;
    param.sampleRate = ZegoAudioSampleRate16K;
    // https://doc-zh.zego.im/article/api?doc=express-audio-sdk_API~objectivec_ios~class~ZegoExpressEngine#create-engine-with-profile-event-handler
    // 开/关自动增益控制
    [[ZegoExpressEngine sharedEngine] enableAGC:YES];
    // 是否开启回声消除。
    [[ZegoExpressEngine sharedEngine] enableAEC:YES];
    // 开/关噪声抑制
    [[ZegoExpressEngine sharedEngine] enableANS:YES];
    // 开/关瞬态噪声抑制
    [[ZegoExpressEngine sharedEngine] enableTransientANS:YES];
    
    // 开启获取原始音频数据功能
    [[ZegoExpressEngine sharedEngine] startAudioDataObserver:bitmask param:param];
    // 设置音频数据回调
    [[ZegoExpressEngine sharedEngine] setAudioDataHandler:self];
    NSLog(@"[NeoNui] 开始音频采集");
    
    self.isOpenAudio = YES;
    
}

// 停止音频采集
- (void)stopAudioDataObserver {
    NSLog(@"[NeoNui] 停止音频采集");
    self.isOpenAudio = NO;
    
    // 异步停止，避免阻塞调用线程
    dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
        NSLog(@"[NeoNui] 开始异步停止音频采集");
        
        // 在后台线程执行耗时的清理操作
        [self pauseNeoNui];
        [self stopNeoNui];
        [[ZegoExpressEngine sharedEngine] stopAudioDataObserver];
        
        NSLog(@"[NeoNui] 异步停止音频采集完成");
    });
}

#pragma mark - NeoNuiSdkDelegate
- (void)onNuiEventCallback:(NuiCallbackEvent)nuiEvent
                    dialog:(long)dialog
                 kwsResult:(const char *)wuw
                 asrResult:(const char *)asr_result
                  ifFinish:(BOOL)finish
                   retCode:(int)code {
    
    if (nuiEvent == EVENT_TRANSCRIBER_COMPLETE) {
        NSLog(@"[NeoNui] EVENT_TRANSCRIBER_COMPLETE 停止语音识别后上报。");
        dispatch_async(dispatch_get_main_queue(), ^{
            if (self.delegate && [self.delegate respondsToSelector:@selector(neoNuiRealtimeTranslationEnd:)]) {
                [self.delegate neoNuiRealtimeTranslationEnd:finish];
            }
        });
        
    } else if (nuiEvent == EVENT_SENTENCE_START) {
        NSLog(@"[NeoNui] EVENT_SENTENCE_START 检测到一句话开始。");
    } else if (nuiEvent == EVENT_SENTENCE_END) {
        // 实时语音识别事件，检测一句话结束，返回一句的完整结果。
        NSString *result = [NSString stringWithUTF8String:asr_result];
        NSDictionary *dic = [NSJSONSerialization JSONObjectWithData:[result dataUsingEncoding:NSUTF8StringEncoding] options:NSJSONReadingMutableContainers error:nil];
        // NSLog(@"[NeoNui] 结果 EVENT_SENTENCE_END: %@", dic);
        if ([[dic allKeys] containsObject:@"payload"]) {
            NSDictionary *payloadDic = dic[@"payload"];
            if ([[payloadDic allKeys] containsObject:@"result"]) {
                NSString *result = payloadDic[@"result"];
                //主线程刷新UI 解决奇怪的动画bug。
                dispatch_async(dispatch_get_main_queue(), ^{
                    if (self.delegate && [self.delegate respondsToSelector:@selector(neoNuiRealtimeTranslation:languageType:isEnd:)]) {
                        [self.delegate neoNuiRealtimeTranslation:result languageType:self.languageType isEnd:YES];
                    }
                });
            }
        }
        
    } else if (nuiEvent == EVENT_ASR_PARTIAL_RESULT) {
        NSString *result = [NSString stringWithUTF8String:asr_result];
        NSDictionary *dic = [NSJSONSerialization JSONObjectWithData:[result dataUsingEncoding:NSUTF8StringEncoding] options:NSJSONReadingMutableContainers error:nil];
        if ([[dic allKeys] containsObject:@"payload"]) {
            NSDictionary *payloadDic = dic[@"payload"];
            if ([[payloadDic allKeys] containsObject:@"result"]) {
                NSString *result = payloadDic[@"result"];
                NSLog(@"[NeoNui] 语音识别中间结果。 %@", result);
                //主线程刷新UI 解决奇怪的动画bug。
                dispatch_async(dispatch_get_main_queue(), ^{
                    if (self.delegate && [self.delegate respondsToSelector:@selector(neoNuiRealtimeTranslation:languageType:isEnd:)]) {
                        [self.delegate neoNuiRealtimeTranslation:result languageType:self.languageType isEnd:NO];
                    }
                });
            }
        }
        
    } else if (nuiEvent == EVENT_ASR_RESULT) {
        NSLog(@"[NeoNui] EVENT_ASR_RESULT 语音识别最终结果。");
    } else if (nuiEvent == EVENT_VAD_START) {
        NSLog(@"[NeoNui] EVENT_VAD_START 检测到人声起点。");
    } else if (nuiEvent == EVENT_VAD_END) {
        NSLog(@"[NeoNui] EVENT_VAD_END 检测到人声尾点。");
    } else if (nuiEvent == EVENT_ASR_ERROR) {
        // asr_result在EVENT_ASR_ERROR中为错误信息，搭配错误码code和其中的task_id更易排查问题，请用户进行记录保存。
        NSLog(@"[NeoNui] EVENT_ASR_ERROR error[%d]", code);
        if (code == 240069) {
            NSLog(@"[NeoNui] EVENT_ASR_ERRO socket已关闭。内部服务错误，需要客户端进行重试。");
            // 初始话不能放在主线程
            dispatch_async(dispatch_get_global_queue(0, 0), ^{
                [self startNeoNui];
            });
        }
        
        
    } else if (nuiEvent == EVENT_MIC_ERROR) {
        NSLog(@"[NeoNui] MIC ERROR 录音错误，表示SDK连续2秒未收到任何音频，可检查录音系统是否正常。");
    } else {
        NSLog(@"[NeoNui] onNuiEventCallback event %d finish %d", nuiEvent, finish);
    }
    
    //finish 为真（可能是发生错误，也可能是完成识别）表示一次任务生命周期结束，可以开始新的识别
    if (finish) {
        dispatch_async(dispatch_get_main_queue(), ^{
            // UI更新代码
            NSLog(@"[NeoNui] finish 为真（可能是发生错误，也可能是完成识别）表示一次任务生命周期结束，可以开始新的识别");
        });
    }
    
    return;
}

- (int)onNuiNeedAudioData:(char *)audioData length:(int)len {
    static int emptyCount = 0;
    // NSLog(@"[NeoNui] onNuiNeedAudioData");
    @autoreleasepool {
        @synchronized(_recordedVoiceData){
            if (_recordedVoiceData.length > 0) {
                int recorder_len = 0;
                if (_recordedVoiceData.length > len) {
                    recorder_len = len;
                } else {
                    recorder_len = (int)_recordedVoiceData.length;
                }
                NSData *tempData = [_recordedVoiceData subdataWithRange:NSMakeRange(0, recorder_len)];
                [tempData getBytes:audioData length:recorder_len];
                tempData = nil;
                NSInteger remainLength = _recordedVoiceData.length - recorder_len;
                NSRange range = NSMakeRange(recorder_len, remainLength);
                [_recordedVoiceData setData:[_recordedVoiceData subdataWithRange:range]];
                emptyCount = 0;
                return recorder_len;
            } else {
                if (emptyCount++ >= 50) {
                    NSLog(@"_recordedVoiceData length = %lu! empty 50times.", (unsigned long)_recordedVoiceData.length);
                    emptyCount = 0;
                }
                return 0;
            }
        }
    }
    return 0;
}

// 根据音频状态进行录音功能的开关。
- (void)onNuiAudioStateChanged:(NuiAudioState)state {
    if (state == STATE_OPEN) {
        self.recordedVoiceData = [NSMutableData data];
        NSLog(@"[NeoNui] onNuiAudioStateChanged 开始 STATE_OPEN(%u)", state);
        
    } else if (state == STATE_PAUSE) {
        NSLog(@"[NeoNui] onNuiAudioStateChanged 暂停 STATE_PAUSE(%u)", state);
        if (self.state == NeoNuiStateRunning) {
            // 正在运行中,不是手动暂停,是系统不知名的暂停,重启
            NSLog(@"[NeoNui] onNuiAudioStateChanged 重启");
            [self startNeoNui];
        }
        
    } else if (state == STATE_CLOSE) {
        NSLog(@"[NeoNui] onNuiAudioStateChanged 关闭 STATE_CLOSE(%u)", state);
    }
}

- (void)onNuiRmsChanged:(float)rms {
    // 音频能量事件。
    // NSLog(@"[NeoNui] onNuiRmsChanged rms=%f", rms);
    if (self.delegate && [self.delegate respondsToSelector:@selector(neoNuiRmsChanged:)]) {
        [self.delegate neoNuiRmsChanged:rms];
    }
}

#pragma mark - ZegoAudioDataHandler
// 根据需要实现以下四个回调，分别对应上述 Bitmask 的四个选项
- (void)onCapturedAudioData:(const unsigned char *)data dataLength:(unsigned int)dataLength param:(ZegoAudioFrameParam *)param {
    // 本地采集音频数据，推流后可收到回调
    NSData *sendData = [NSData dataWithBytes:data length:dataLength];
    // 实时传译
    @synchronized(_recordedVoiceData) {
        [_recordedVoiceData appendData:sendData];
    }
}


- (NSString *)dirDoc {
    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
    NSString *documentsDirectory = [paths objectAtIndex:0];
    NSLog(@"app_home_doc: %@",documentsDirectory);
    return documentsDirectory;
}

//create dir for saving files
- (NSString *)createDir {
    NSString *documentsPath = [self dirDoc];
    NSFileManager *fileManager = [NSFileManager defaultManager];
    NSString *testDirectory = [documentsPath stringByAppendingPathComponent:@"voices"];
    // 创建目录
    BOOL res = [fileManager createDirectoryAtPath:testDirectory withIntermediateDirectories:YES attributes:nil error:nil];
    if (res) {
        NSLog(@"文件夹创建成功");
    } else {
        NSLog(@"文件夹创建失败");
    }
    return testDirectory;
}


@end
