	//
	//  GaoVoiceInputView.m
	//  DuTe
	//
	//  Created by Gikki Ares on 2020/12/23.
	//  Copyright © 2020 vgemv. All rights reserved.
	//

#import "JsVoiceInputView.h"
#import<Speech/Speech.h>
#import<AVFoundation/AVFoundation.h>
#import <JsUtilitySdk/JsUtilitySdk.h>
//#import <JsUtilitySdk/JsUtilitySdk.h>

#define Prefix(x) Js##x

@interface Prefix(VoiceInputViewVm) : NSObject

@property (nonatomic,assign) int action;

@end

@implementation Prefix(VoiceInputViewVm)


@end


@interface Prefix(VoiceInputView)()<
SFSpeechRecognizerDelegate,
Prefix(ButtonLiiOneDelegate),
SFSpeechRecognitionTaskDelegate
>

{
	Prefix(ButtonLiiOne) * mgaoButtonLii;
	UILabel * lb_inputTint;
	UIView * mv_separator;
	UIImageView * miv;
	NSString * mstr_text;
	
	BOOL mb_isRecording;
}

@property (nonatomic,strong)Prefix(VoiceInputViewVm) * vm;

@property(nonatomic,strong)SFSpeechRecognizer*speechRecognizer;//语音识别器

@property (nonatomic,strong) SFSpeechAudioBufferRecognitionRequest *recognitionRequest;//语音识别请求

@property (nonatomic, strong) SFSpeechRecognitionTask *recognitionTask;//语音任务管理器

@property (nonatomic,strong) AVAudioEngine *audioEngine;//语音控制器

@property (nonatomic,strong) NSTimer * mTimer_finishWatchDog;

@end

@implementation Prefix(VoiceInputView)

- (instancetype)initWithFrame:(CGRect)frame {
	if(self = [super initWithFrame:frame]) {
		[self commonInit];
	}
	return self;
}

- (void)commonInit {
		//发送语音认证请求(首先要判断设备是否支持语音识别功能)
	[SFSpeechRecognizer requestAuthorization:^(SFSpeechRecognizerAuthorizationStatus status) {
		bool isButtonEnabled =false;
		switch(status) {
			case SFSpeechRecognizerAuthorizationStatusAuthorized:{
				isButtonEnabled =true;
//				NSLog(@"可以语音识别");
				break;
			}
			case SFSpeechRecognizerAuthorizationStatusDenied:{
				
				isButtonEnabled =false;
				
				NSLog(@"用户未授权使用语音识别");
				
				break;
			}
			case SFSpeechRecognizerAuthorizationStatusRestricted:{
				
				isButtonEnabled =false;
				
				NSLog(@"语音识别在这台设备上受到限制");
				
				break;
			}
			case SFSpeechRecognizerAuthorizationStatusNotDetermined:{
				
				isButtonEnabled =false;
				
				NSLog(@"语音识别未授权");
				
				break;
			}
			default:
				break;
		}
	}];
	

	
	AVAudioSession *audioSession = [AVAudioSession sharedInstance];
	NSError*error;
	bool audioBool = [audioSession setCategory:AVAudioSessionCategoryRecord error:&error];
	NSParameterAssert(!error);
	bool audioBool1= [audioSession setMode:AVAudioSessionModeMeasurement error:&error];
	NSParameterAssert(!error);
	bool audioBool2= [audioSession setActive:true withOptions:AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation error:&error];
	NSParameterAssert(!error);
	
	if(audioBool || audioBool1||audioBool2) {
//		NSLog(@"可以使用");
	}else{
		NSLog(@"不支持语音识别");
	}
	_vm = [Prefix(VoiceInputViewVm) new];
	[self updateUi:_vm];
}

- (void)speechRecognizer:(SFSpeechRecognizer*)speechRecognizer availabilityDidChange:(BOOL)available{
	
	
}

- (void)updateUi:(Prefix(VoiceInputViewVm) *)vm {
	if(!mv_separator) {
		mv_separator = [UIView new];
		[self addSubview:mv_separator];
		mv_separator.backgroundColor = [UIColor lightGrayColor];
	}
	mv_separator = [[UIView alloc]initWithFrame:CGRectMake(0, 0, self.bounds.size.width, 0.5)];
	
	if(!mgaoButtonLii) {
		mgaoButtonLii = [Prefix(ButtonLiiOne) new];
		[self addSubview:mgaoButtonLii];
		mgaoButtonLii.delegate = self;
		mgaoButtonLii.frame = [JsGeometryManager frameAlignCenterWithContainerSize:self.frame.size size:CGSizeMake(150, 300)];
		miv = [UIImageView new];
		UIImage * image = [JsBundleManager iconInBundleNamed:@"Microphone_White.png"];
		miv.image = image;
		[mgaoButtonLii.imageView addSubview:miv];
		
		
		
		
		Prefix(ButtonLiiVm) * btnVm = [mgaoButtonLii vm];
		btnVm.foregroundImageViewFrame = [JsGeometryManager frameAlignCenterWithContainerSize:mgaoButtonLii.frame.size size:CGSizeMake(60, 60) offset:CGPointMake(0, -20)];
		btnVm.labelFont = [UIFont systemFontOfSize:15];
		btnVm.text = @"点击开始语音输入";
		mgaoButtonLii.imageView.backgroundColor = [UIColor darkGrayColor];
		btnVm.isForegroundImageCircle = YES;
//		btnVm.backgroundColor = [UIColor whiteColor];
		CGSize size = [JsGeometryManager sizeForOneLineStringWithContext:btnVm.text font:btnVm.labelFont];
		
		btnVm.labelFrame = [JsGeometryManager frameHAlignCenterVBelowWithContainerSize:mgaoButtonLii.frame.size size:size siblingFrame:btnVm.foregroundImageViewFrame offset:CGPointMake(0, 20)];
		[mgaoButtonLii setupUi];
	}
	
	Prefix(ButtonLiiVm) * btnVm = [mgaoButtonLii vm];
	mgaoButtonLii.frame = [JsGeometryManager frameAlignCenterWithContainerSize:self.frame.size size:CGSizeMake(150, 300)];
	CGSize size = [JsGeometryManager sizeForOneLineStringWithContext:btnVm.text font:btnVm.labelFont];
//	mgaoButtonLii.imageView.frame = [GaoGeometryManager frameHCenterVCenterWithSize:CGSizeMake(60, 60) containerSize:mgaoButtonLii.frame.size  offset:CGPointMake(0, -20)];
	miv.frame = [JsGeometryManager frameAlignCenterWithContainerSize:btnVm.foregroundImageViewFrame.size size:CGSizeMake(40, 40)];
	// Do any additional setup after loading the view, typically from a nib.
	[mgaoButtonLii setupUi];
	if(vm.action == 1) {
		//MARK:开始录音
		
		dispatch_async(dispatch_get_main_queue(), ^{
			[self->mgaoButtonLii.imageView js_addWaveAnimationWithWaveColor:[UIColor redColor]];
			CGRect rc = [JsGeometryManager frameHAlignCenterVBelowWithContainerSize:mgaoButtonLii.frame.size size:size siblingFrame:btnVm.foregroundImageViewFrame offset:CGPointMake(0, 50)];
			[UIView animateWithDuration:0.6  delay:0.6 options:UIViewAnimationOptionCurveLinear animations:^{
				self->mgaoButtonLii.label.frame = rc;
				self->mgaoButtonLii.imageView.backgroundColor = [UIColor redColor];
			} completion:nil];
			self->mgaoButtonLii.label.text = @"点击停止语音输入";
			self->mgaoButtonLii.label.textColor = [UIColor blackColor];
		});
		
	}
	else if(vm.action == 2) {
		//MARK:停止录音
		CGRect rc = [JsGeometryManager frameHAlignCenterVBelowWithContainerSize:mgaoButtonLii.frame.size size:size siblingFrame:btnVm.foregroundImageViewFrame offset:CGPointMake(0, 20)];
		[UIView animateWithDuration:0.2 animations:^{
			self->mgaoButtonLii.label.frame = rc;
			self->mgaoButtonLii.imageView.backgroundColor = [UIColor darkGrayColor];
		}];
		[mgaoButtonLii.imageView js_removeWaveAnimation];
		mgaoButtonLii.label.text = @"点击开始语音输入";
		mgaoButtonLii.label.textColor = [UIColor blackColor];
	}
	
	
	self.backgroundColor = [UIColor lightGrayColor];
	vm.action = 0;
	_vm = vm;
	
}



- (void)layoutSubviews {
	[super layoutSubviews];
	
//	[self updateUi:_vm];
}

#pragma mark----语音识别

- (SFSpeechRecognizer*)speechRecognizer{
	if (!_speechRecognizer) {
		
		NSLocale *cale = [[NSLocale alloc]initWithLocaleIdentifier:@"zh-CN"];
		
		_speechRecognizer= [[SFSpeechRecognizer alloc]initWithLocale:cale];
		
		
		
			//设置代理
		
		_speechRecognizer.delegate = self;
		
	}
	
	return _speechRecognizer;
	
}

#pragma mark 2 InnerFunction
- (void)stopVoiceInput{
	if(mb_isRecording) {
	lb_inputTint.hidden = YES;
	[self.audioEngine stop];
	if (_recognitionRequest) {
		[_recognitionRequest endAudio];
	}
	if (_recognitionTask) {
		[_recognitionTask cancel];
	_recognitionTask = nil;
	}
	}
}

- (void)stopVoiceUi {
	_vm.action = 2;
	mb_isRecording = NO;
	[self updateUi:_vm];
	
}


#pragma mark---开始录音
- (void)startVoiceInput{
	_vm.action = 1;
	mb_isRecording = YES;
	[self updateUi:_vm];

	
	mb_isRecording = YES;
	lb_inputTint.hidden = NO;
	
	if (self.recognitionTask) {
		[self.recognitionTask cancel];
		self.recognitionTask = nil;
	}
	self.recognitionRequest = [[SFSpeechAudioBufferRecognitionRequest alloc]init];
	AVAudioInputNode *inputNode = self.audioEngine.inputNode;
	NSAssert(inputNode,@"录入设备没有准备好");
	NSAssert(self.recognitionRequest, @"请求初始化失败");
	self.recognitionRequest.shouldReportPartialResults = true;
	__weak typeof(self) weakSelf =self;
	
		//开始识别任务,使用代理
//	self.recognitionTask = [self.speechRecognizer recognitionTaskWithRequest:self.recognitionRequest delegate:self];
	//不使用代理
	self.recognitionTask = [self.speechRecognizer recognitionTaskWithRequest:self.recognitionRequest resultHandler:^(SFSpeechRecognitionResult * _Nullable result, NSError * _Nullable error) {
		
		__strong typeof(weakSelf) strongSelf = weakSelf;
		
		bool isFinal =false;
		
		if(result) {
			//语音转文本
			self->mstr_text = [[result bestTranscription]formattedString];
			NSLog(@"Text is %@",self->mstr_text);
			isFinal = [result isFinal];
		}
		
		if(self->_mTimer_finishWatchDog) {
			[self->_mTimer_finishWatchDog invalidate];
			self->_mTimer_finishWatchDog = nil;
		}
		
		if(error || isFinal) {
			NSLog(@"This voice input is over");
//			NSString * text = [[result bestTranscription]formattedString];
//			NSLog(@"Last text is %@",text);
			[strongSelf.audioEngine stop];
			[inputNode removeTapOnBus:0];
			[self stopVoiceInput];
			[self stopVoiceUi];
			if(!self->mstr_text) {
				self->mstr_text = @"";
			}
			if(self->_delegate&&[self->_delegate respondsToSelector:@selector(jsVoiceInputView:getText:)]) {
				[self->_delegate jsVoiceInputView:self getText:self->mstr_text];
			}
		}
		else {
			//1秒钟之内没有检测到新的输入就判定为输入结束.
			//如果没有结束,就加上定时器定时关闭
			self->_mTimer_finishWatchDog = [NSTimer scheduledTimerWithTimeInterval:1 repeats:NO block:^(NSTimer * _Nonnull timer) {
				NSLog(@"The vioce Time is up.");
				[self stopVoiceInput]; //会触发完成回调.
		}];
		}
	}];
	

	AVAudioFormat * recordingFormat = [inputNode outputFormatForBus:0];
		//在添加tap之前先移除上一个不然有可能报"Terminating app due to uncaught exception 'com.apple.coreaudio.avfaudio',"之类的错误
	[inputNode removeTapOnBus:0];
	[inputNode installTapOnBus:0 bufferSize:1024 format:recordingFormat block:^(AVAudioPCMBuffer*_Nonnull buffer,AVAudioTime*_Nonnull when) {
		__strong typeof(weakSelf) strongSelf = weakSelf;
		if(strongSelf.recognitionRequest) {
			[strongSelf.recognitionRequest appendAudioPCMBuffer:buffer];
		}
	}];
	[self.audioEngine prepare];
	
	NSError * error = nil;
	bool audioEngineBool = [self.audioEngine startAndReturnError:&error];
	NSParameterAssert(!error);
	NSLog(@"%d",audioEngineBool);

}

#pragma mark---创建录音引擎

- (AVAudioEngine*)audioEngine{
	if (!_audioEngine) {
		_audioEngine= [[AVAudioEngine alloc]init];
	}
	return _audioEngine;
}

//
//#pragma mark---识别本地音频文件
//
//- (void)recognizeLocalAudioFile:(UIButton*)sender {
//
//	NSLocale *local =[[NSLocale alloc] initWithLocaleIdentifier:@"zh_CN"];
//
//	SFSpeechRecognizer *localRecognizer =[[SFSpeechRecognizer alloc] initWithLocale:local];
//
//	NSURL *url =[[NSBundle mainBundle] URLForResource:@"录音.m4a" withExtension:nil];
//
//	if(!url)return;
//
//	SFSpeechURLRecognitionRequest *res =[[SFSpeechURLRecognitionRequest alloc] initWithURL:url];
//
//		//	__weak typeof(self) weakSelf = self;
//
//	[localRecognizer recognitionTaskWithRequest:res resultHandler:^(SFSpeechRecognitionResult*_Nullable result,NSError*_Nullable error) {
//		if(error) {
//			NSLog(@"语音识别解析失败,%@",error);
//		}
//		else{
//			NSString * str = result.bestTranscription.formattedString;
//			NSLog(@"%@",str);
//		}
//	}];
//}


#pragma mark
// Called when the task first detects speech in the source audio
//一个task调用一次
- (void)speechRecognitionDidDetectSpeech:(SFSpeechRecognitionTask *)task {
	NSLog(@"Did detect speech");
}

// Called for all recognitions, including non-final hypothesis
//多次调用
- (void)speechRecognitionTask:(SFSpeechRecognitionTask *)task didHypothesizeTranscription:(SFTranscription *)transcription {
	NSLog(@"Got  result");
	NSString * text = [transcription formattedString];
	NSLog(@"Temp String is %@",text);
}

	// Called only for final recognitions of utterances. No more about the utterance will be reported
- (void)speechRecognitionTask:(SFSpeechRecognitionTask *)task didFinishRecognition:(SFSpeechRecognitionResult *)recognitionResult {
	NSLog(@"Got final result");
	NSString * text = [[recognitionResult bestTranscription] formattedString];
	NSLog(@"String is %@",text);
	mstr_text = text;
	[self stopVoiceInput];
	if(_delegate&&[_delegate respondsToSelector:@selector(jsVoiceInputView:getText:)]) {
		[_delegate jsVoiceInputView:self getText:mstr_text];
	}
}

	// Called when the task is no longer accepting new audio but may be finishing final processing
- (void)speechRecognitionTaskFinishedReadingAudio:(SFSpeechRecognitionTask *)task {
	NSLog(@"FinishReading!");
}

	// Called when the task has been cancelled, either by client app, the user, or the system
- (void)speechRecognitionTaskWasCancelled:(SFSpeechRecognitionTask *)task {
	NSLog(@"Cancelled!");
}

	// Called when recognition of all requested utterances is finished.
	// If successfully is false, the error property of the task will contain error information
- (void)speechRecognitionTask:(SFSpeechRecognitionTask *)task didFinishSuccessfully:(BOOL)successfully {
	NSLog(@"Finished!");
}

- (void)jsButtonLiiOneClicked:(Prefix(ButtonLiiOne) *)button {
	if(!mb_isRecording) {
		[self startVoiceInput];
	}
	else {
		[self stopVoiceInput];
	}
}

@end

