//
//  VoiceRecognition.m
//  NiuBi
//
//  Created by chuangjia on 29/9/15.
//  Copyright © 2015 Danpin Inc. All rights reserved.
//

#import "VoiceRecognition.h"
#import "BDVoiceRecognitionClient.h"
#import "BDVRSConfig.h"
#define API_KEY @"9HiuaugOu2oo8VZga3N1LsRO" // 请修改为您在百度开发者平台申请的API_KEY
#define SECRET_KEY @"1e4089206d8916598cabd0d547a3ef92" // 请修改您在百度开发者平台申请的SECRET_KEY
#define APPID @"6927298" // 请修改为您在百度开发者平台申请的APP ID

@interface VoiceRecognition ()<MVoiceRecognitionClientDelegate>
{
    
}
@property(nonatomic,copy) void(^actionBlock)(id,id);
@end

@implementation VoiceRecognition
+ (instancetype)sharedInstance
{
    static VoiceRecognition *instance;
    static dispatch_once_t onceToken;
    dispatch_once(&onceToken, ^{
        instance = [[VoiceRecognition alloc] init];
    });
    return instance;
}

- (id)init {
    self = [super init];
    if (self) {
        [self initVoiceRecognition];
    }
    return self;
}
-(void)dealloc
{
//    NSLog(@"%s", __PRETTY_FUNCTION__);
}
-(void)monitorVoiceRecognition:(void(^)(id,id))callback
{
    self.actionBlock = callback;
}
-(void)initVoiceRecognition
{
//     NSLog(@"==========initVoiceRecognition==========");
    // 设置开发者信息
    [[BDVoiceRecognitionClient sharedInstance] setApiKey:API_KEY withSecretKey:SECRET_KEY];
    // 设置语音识别模式，默认是输入模式
    [[BDVoiceRecognitionClient sharedInstance] setPropertyList:@[[BDVRSConfig sharedInstance].recognitionProperty]];
    // 设置城市ID，当识别属性包含EVoiceRecognitionPropertyMap时有效
    [[BDVoiceRecognitionClient sharedInstance] setCityID: 1];
    // 设置是否需要语义理解，只在搜索模式有效
    [[BDVoiceRecognitionClient sharedInstance] setConfig:@"nlu" withFlag:[BDVRSConfig sharedInstance].isNeedNLU];
    //开启联系人识别
    [[BDVoiceRecognitionClient sharedInstance] setConfig:@"enable_contacts" withFlag:YES];
    
    // 设置识别语言
    [[BDVoiceRecognitionClient sharedInstance] setLanguage:[BDVRSConfig sharedInstance].recognitionLanguage];
    // 是否打开语音音量监听功能，可选
    if ([BDVRSConfig sharedInstance].voiceLevelMeter)
    {
        BOOL res = [[BDVoiceRecognitionClient sharedInstance] listenCurrentDBLevelMeter];
        if (res == NO)  // 如果监听失败，则恢复开关值
        {
            [BDVRSConfig sharedInstance].voiceLevelMeter = NO;
        }
    }
    else
    {
        [[BDVoiceRecognitionClient sharedInstance] cancelListenCurrentDBLevelMeter];
    }
}
//
-(void)startVoiceRecognition
{
    int startStatus = [[BDVoiceRecognitionClient sharedInstance] startVoiceRecognition:self];
    if (startStatus != EVoiceRecognitionStartWorking) // 创建失败则报告错误
    {
        NSString *statusString = [NSString stringWithFormat:@"%d",startStatus];
//        NSLog(@"=创建失败则报告错误==========%@",statusString);
        return;
    }
}
- (void)finishRecord
{
    [[BDVoiceRecognitionClient sharedInstance] speakFinish];
}
-(void)cancelVoiceRecognition
{
    [[BDVoiceRecognitionClient sharedInstance] stopVoiceRecognition];
}
#pragma method voicerecognition delegate
- (void)VoiceRecognitionClientWorkStatus:(int) aStatus obj:(id)aObj
{
    switch (aStatus)
    {
        case EVoiceRecognitionClientWorkStatusFlushData: // 连续上屏中间结果
        {
            NSString *text = [aObj objectAtIndex:0];
//            NSLog(@"VoiceRecognitionClientWorkStatus==1=%d---%@",aStatus,text);
            if (self.actionBlock) {
                self.actionBlock(text,@"1");
            }
            break;
        }
        case EVoiceRecognitionClientWorkStatusFinish: // 识别正常完成并获得结果
        {
            [self createRunLogWithStatus:aStatus];
            
            if ([[BDVoiceRecognitionClient sharedInstance] getRecognitionProperty] != EVoiceRecognitionPropertyInput)
            {
                //  搜索模式下的结果为数组，示例为
                // ["公园", "公元"]
                NSMutableArray *audioResultData = (NSMutableArray *)aObj;
                NSMutableString *tmpString = [[NSMutableString alloc] initWithString:@""];
                
                for (int i=0; i < [audioResultData count]; i++)
                {
                    NSString *tmpString1 =[audioResultData objectAtIndex:i];
                    if (self.actionBlock) {
                        self.actionBlock(tmpString1,@"2");
                    }
                    [tmpString appendFormat:@"%@\r\n",[audioResultData objectAtIndex:i]];
                }
//                NSLog(@"VoiceRecognitionClientWorkStatus=2==%d---%@",aStatus,tmpString);
                if (audioResultData.count>0) {
                    NSString *tmpString =[audioResultData objectAtIndex:0];
                    if (self.actionBlock) {
                        self.actionBlock(tmpString,@"2");
                    }
                }
                
            }
            else
            {
                NSString *tmpString = [[BDVRSConfig sharedInstance] composeInputModeResult:aObj];
//                NSLog(@"VoiceRecognitionClientWorkStatus=3==%d---%@",aStatus,tmpString);
            }
            
            break;
        }
        case EVoiceRecognitionClientWorkStatusReceiveData:
        {
            
            NSString *tmpString = [[BDVRSConfig sharedInstance] composeInputModeResult:aObj];
//            NSLog(@"VoiceRecognitionClientWorkStatus=4==%d---%@",aStatus,tmpString);
            
            break;
        }
        case EVoiceRecognitionClientWorkStatusEnd: // 用户说话完成，等待服务器返回识别结果
        {
            [self createRunLogWithStatus:aStatus];
            
            break;
        }
        case EVoiceRecognitionClientWorkStatusCancel:
        {
            
            [self createRunLogWithStatus:aStatus];
            
            break;
        }
        case EVoiceRecognitionClientWorkStatusStartWorkIng: // 识别库开始识别工作，用户可以说话
        {
            
            break;
        }
        case EVoiceRecognitionClientWorkStatusNone:
        case EVoiceRecognitionClientWorkPlayStartTone:
        case EVoiceRecognitionClientWorkPlayStartToneFinish:
        case EVoiceRecognitionClientWorkStatusStart:
        case EVoiceRecognitionClientWorkPlayEndToneFinish:
        case EVoiceRecognitionClientWorkPlayEndTone:
        {
            break;
        }
        case EVoiceRecognitionClientWorkStatusNewRecordData:
        {
            break;
        }
        default:
        {
            break;
        }
    }
}
- (void)VoiceRecognitionClientErrorStatus:(int) aStatus subStatus:(int)aSubStatus
{
    // 为了更加具体的显示错误信息，此处没有使用aStatus参数
    [self createErrorViewWithErrorType:aSubStatus];
}
- (void)VoiceRecognitionClientNetWorkStatus:(int) aStatus
{
    switch (aStatus)
    {
        case EVoiceRecognitionClientNetWorkStatusStart:
        {
            [self createRunLogWithStatus:aStatus];
            [[UIApplication sharedApplication] setNetworkActivityIndicatorVisible:YES];
            break;
        }
        case EVoiceRecognitionClientNetWorkStatusEnd:
        {
            [self createRunLogWithStatus:aStatus];
            [[UIApplication sharedApplication] setNetworkActivityIndicatorVisible:NO];
            break;
        }
    }
}

- (void)createRunLogWithStatus:(int)aStatus
{
    NSString *statusMsg = nil;
    switch (aStatus)
    {
        case EVoiceRecognitionClientWorkStatusNone: //空闲
        {
//            statusMsg = @"空闲";
            break;
        }
        case EVoiceRecognitionClientWorkPlayStartTone:  //播放开始提示音
        {
//            statusMsg = @"播放开始提示音";
            break;
        }
        case EVoiceRecognitionClientWorkPlayStartToneFinish: //播放开始提示音完成
        {
//            statusMsg =@"播放开始提示音完成";
            break;
        }
        case EVoiceRecognitionClientWorkStatusStartWorkIng: //识别工作开始，开始采集及处理数据
        {
//            statusMsg = @"识别工作开始，开始采集及处理数据";
            break;
        }
        case EVoiceRecognitionClientWorkStatusStart: //检测到用户开始说话
        {
//            statusMsg = @"检测到用户开始说话";
            break;
        }
        case EVoiceRecognitionClientWorkPlayEndTone: //播放结束提示音
        {
//            statusMsg = @"播放结束提示音 ";
            break;
        }
        case EVoiceRecognitionClientWorkPlayEndToneFinish: //播放结束提示音完成
        {
//            statusMsg = @"播放结束提示音完成";
            break;
        }
        case EVoiceRecognitionClientWorkStatusReceiveData: //语音识别功能完成，服务器返回正确结果
        {
//            statusMsg = @"语音识别功能完成，服务器返回正确结果";
            break;
        }
        case EVoiceRecognitionClientWorkStatusFinish: //语音识别功能完成，服务器返回正确结果
        {
//            statusMsg = @"语音识别功能完成，服务器返回正确结果";
            break;
        }
        case EVoiceRecognitionClientWorkStatusEnd: //本地声音采集结束结束，等待识别结果返回并结束录音
        {
//            statusMsg = @"本地声音采集结束结束，等待识别结果返回并结束录音";
            break;
        }
        case EVoiceRecognitionClientNetWorkStatusStart: //网络开始工作
        {
//            statusMsg = @"网络开始工作";
            break;
        }
        case EVoiceRecognitionClientNetWorkStatusEnd:  //网络工作完成
        {
//            statusMsg = @"网络工作完成";
            break;
        }
        case EVoiceRecognitionClientWorkStatusCancel:  // 用户取消
        {
//            statusMsg = @"用户取消";
            break;
        }
        case EVoiceRecognitionClientWorkStatusError: // 出现错误
        {
//            statusMsg = @"出现错误";
            break;
        }
        default:
        {
//            statusMsg =@"出现异常";
            break;
        }
    }
//    NSLog(@"VoiceRecognitionClientWorkStatus===%d---%@",aStatus,statusMsg);
}


- (void)createErrorViewWithErrorType:(int)aStatus
{
    NSString *errorMsg = @"";
    
    switch (aStatus)
    {
        case EVoiceRecognitionClientErrorStatusIntrerruption:
        {
            errorMsg = @"录音中断";
            break;
        }
        case EVoiceRecognitionClientErrorStatusChangeNotAvailable:
        {
            errorMsg = @"麦克风临时被占用";
            break;
        }
        case EVoiceRecognitionClientErrorStatusUnKnow:
        {
            errorMsg =@"一般错误";
            break;
        }
        case EVoiceRecognitionClientErrorStatusNoSpeech:
        {
            errorMsg = @"咋不说话呢？";
            break;
        }
        case EVoiceRecognitionClientErrorStatusShort:
        {
            errorMsg = @"说的太少啦";
            break;
        }
        case EVoiceRecognitionClientErrorStatusException:
        {
            errorMsg = @"前端库出现异常";
            break;
        }
        case EVoiceRecognitionClientErrorNetWorkStatusError:
        {
            errorMsg = @"网络连接错误, 请重试";
            break;
        }
        case EVoiceRecognitionClientErrorNetWorkStatusUnusable:
        {
            errorMsg = @"没有网络连接";
            break;
        }
        case EVoiceRecognitionClientErrorNetWorkStatusTimeOut:
        {
            errorMsg = @"网络超时";
            break;
        }
        case EVoiceRecognitionClientErrorNetWorkStatusParseError:
        {
            errorMsg = @"服务器解析错误";
            break;
        }
        case EVoiceRecognitionStartWorkNoAPIKEY:
        {
            errorMsg = @"没有设置API KEY";
            break;
        }
        case EVoiceRecognitionStartWorkGetAccessTokenFailed:
        {
            errorMsg = @"获取token出错";
            break;
        }
        case EVoiceRecognitionStartWorkDelegateInvaild:
        {
            errorMsg = @"没有语音识别代理方法";
            break;
        }
        case EVoiceRecognitionStartWorkNetUnusable:
        {
            errorMsg = @"没有网络";
            break;
        }
        case EVoiceRecognitionStartWorkRecorderUnusable:
        {
            errorMsg = @"没检测到麦克风";
            break;
        }
        case EVoiceRecognitionStartWorkNOMicrophonePermission:
        {
            errorMsg = @"没麦克风权限，请在系统“设置”→“隐私”→“麦克风”中打开开关";
            break;
        }
            //服务器返回错误
        case EVoiceRecognitionClientErrorNetWorkStatusServerNoFindResult:     //没有找到匹配结果
        case EVoiceRecognitionClientErrorNetWorkStatusServerSpeechQualityProblem:    //声音过小
            
        case EVoiceRecognitionClientErrorNetWorkStatusServerParamError:       //协议参数错误
        case EVoiceRecognitionClientErrorNetWorkStatusServerRecognError:      //识别过程出错
        case EVoiceRecognitionClientErrorNetWorkStatusServerAppNameUnknownError: //appName验证错误
        case EVoiceRecognitionClientErrorNetWorkStatusServerUnknownError:      //未知错误
        {
            errorMsg = [NSString stringWithFormat:@"%@-%d",@"服务器返回错误！！",aStatus] ;
            break;
        }
        default:
        {
            break;
        }
        
    }
    if (self.actionBlock) {
        self.actionBlock(errorMsg,@"5");
    }
//    NSLog(@"VoiceRecognitionClientWorkStatus===%d---%@",aStatus,errorMsg);
}
@end
