<template>
  <view></view>
</template>
<script>
	import storage from "@/utils/storage.js";

	/** 先引入Recorder （ 需先 npm install recorder-core ）**/
	import Recorder from 'recorder-core';

	/** 引入RecordApp **/
	import RecordApp from 'recorder-core/src/app-support/app.js'
	//【所有平台必须引入】uni-app支持文件
	import '@/uni_modules/Recorder-UniCore/app-uni-support.js'

	import "recorder-core/src/engine/pcm.js"

	// #ifdef MP-WEIXIN
	//可选引入微信小程序支持文件
	import 'recorder-core/src/app-support/app-miniProgram-wx-support.js'
	// #endif


	// #ifdef MP-TOUTIAO
	//可选引入抖音小程序支持文件
	import 'recorder-core/src/app-support/app-douying-support.js'
	// #endif



	//引入阿里云语音识别插件
	import 'recorder-core/src/extensions/asr.aliyun.short.js'




	export default {
		data() {
			return {
				asrTokenApi: "https://api.osisx.com/buyer/chat/asr/token", //生成token的api接口地址
				// asrTokenApi: "http://192.168.31.97:8888/buyer/chat/asr/token", //生成token的api接口地址
				asrLang: "普通话",
	
				asrTxt: ""

					,
				SyncID: 0 //同步操作，如果同时操作多次，之前的操作全部终止

					,
				reclogs: []
			}
		},
		mounted() {
			this.isMounted = true;
			RecordApp.UniPageOnShow(this); //onShow可能比mounted先执行，页面准备好了时再执行一次
		},
		/*#ifdef VUE3*/
		unmounted() /*#endif*/ /*#ifndef VUE3*/ destroyed() /*#endif*/ {
			RecordApp.Stop(); //清理资源，如果打开了录音没有关闭，这里将会进行关闭
		},
		onShow() { //当组件用时没这个回调
			if (this.isMounted) RecordApp.UniPageOnShow(this); //onShow可能比mounted先执行，页面可能还未准备好
		},
		methods: {
			
			//开始录音，然后开始语音识别
			recStart() {
				var sid = ++this.SyncID;
				if (!this.asrTokenApi) {
					this.reclog("需要提供TokenApi", 1);
					return;
				}
				if (this.asr) {
					this.reclog("上次asr未关闭", 1);
					return;
				}
			
				this.reclog("正在请求录音权限...");
				RecordApp.UniWebViewActivate(this); //App环境下必须先切换成当前页面WebView
				RecordApp.RequestPermission(() => {
					this.reclog(this.currentKeyTag() + " 已获得录音权限", 2);
					this.recordStart(sid);
					this.recStart__asrStart(sid);
				}, (msg, isUserNotAllow) => {
					if (isUserNotAllow) { //用户拒绝了录音权限
						//这里你应当编写代码进行引导用户给录音权限，不同平台分别进行编写
					}
					this.reclog(this.currentKeyTag() + " " +
						(isUserNotAllow ? "isUserNotAllow," : "") + "请求录音权限失败：" + msg, 1);
				});
			},
			
			//开始语音识别
			recStart__asrStart(sid) { 
				if (sid != this.SyncID) {
					return;
				}
				//创建语音识别对象，每次识别都要新建，asr不能共用
				var asr = this.asr = Recorder.ASR_Aliyun_Short({
					tokenApi: this.asrTokenApi,
					apiArgs: {
						lang: this.asrLang
					},
					apiRequest: uni_ApiRequest //tokenApi的请求实现方法
						,
					compatibleWebSocket: uni_WebSocket //返回兼容WebSocket的对象
						,
					asrProcess: (text, nextDuration, abortMsg) => {
						/***实时识别结果，必须返回true才能继续识别，否则立即超时停止识别***/
						if (abortMsg) {
							//语音识别中途出错
							this.reclog("[asrProcess回调]被终止：" + abortMsg, 1);
							this.recCancel("语音识别出错"); //立即结束录音，就算继续录音也不会识别
							return false;
						};
			
						this.$emit('getAsrTxt', text)
						
						return nextDuration <= 2 * 60 * 1000; //允许识别2分钟的识别时长（比录音时长小5秒）
					},
					log: (msg, color) => {
						this.reclog(msg, color == 1 ? "#faa" : "#aaa");
					}
				});
				this.reclog("语言：" + asr.set.apiArgs.lang + "，tokenApi：" + asr.set.tokenApi + "，正在打开语音识别...");
				//打开语音识别，建议先打开asr，成功后再开始录音
				asr.start(() => { //无需特殊处理start和stop的关系，只要调用了stop，会阻止未完成的start，不会执行回调
					this.reclog("asr.start已开始语音识别", 2);
					// this.recordStart(sid);
				}, (errMsg) => {
					this.reclog("语音识别开始失败，请重试：" + errMsg, 1);
			
					this.recCancel("语音识别开始失败");
				});
			},
			
			 //开始录音
			recordStart(sid) {
				if (sid != this.SyncID) {
					this.reclog("sync cancel recordStart", "#f60");
					return;
				}

				this.reclog(this.currentKeyTag() + " 正在打开录音...");
				RecordApp.UniWebViewActivate(this); //App环境下必须先切换成当前页面WebView
				RecordApp.Start({
					type: "pcm",
					bitRate: 16,
					sampleRate: 16000,
					onProcess: (buffers, powerLevel, duration, sampleRate, newBufferIdx, asyncEnd) => {
						if (sid != this.SyncID) return;
						if (this.asr) { //已打开实时语音识别
							this.asr.input(buffers, sampleRate, newBufferIdx);
						}

					},
				}, () => {
					this.reclog(this.currentKeyTag() + " 已开始录音，请讲话（asrProcess中已限制最多识别60*2-5*(2-1)=115秒）...", 2);

				}, (msg) => {
					this.reclog(this.currentKeyTag() + " 开始录音失败：" + msg, 1);
					this.recCancel("开始录音失败"); //立即结束语音识别
				});
			},
			
		

			currentKeyTag() {
				if (!RecordApp.Current) return "[?]";
				// #ifdef APP
				var tag2 = "Renderjs+H5";
				if (RecordApp.UniNativeUtsPlugin) {
					tag2 = RecordApp.UniNativeUtsPlugin.nativePlugin ? "NativePlugin" : "UtsPlugin";
				}
				return RecordApp.Current.Key + "(" + tag2 + ")";
				// #endif
				return RecordApp.Current.Key;
			},

			//停止录音，结束语音识别
			recStop() {
				++this.SyncID;

				this.recCancel();
			},
			recCancel(cancelMsg) {
				this.reclog("正在停止...");

				var asr2 = this.asr;
				this.asr = null; //先干掉asr，防止重复stop
				if (!asr2) {
					this.reclog("未开始识别", 1);
				} else {
					//asr.stop 和 rec.stop 无需区分先后，同时执行就ok了
					asr2.stop((text, abortMsg) => {
						if (abortMsg) {
							abortMsg = "发现识别中途被终止(一般无需特别处理)：" + abortMsg;
						};
						this.reclog("语音识别完成" + (abortMsg ? "，" + abortMsg : ""), abortMsg ? "#f60" : 2);
						this.reclog("识别最终结果：" + text, 2);
						this.$emit('getAstTxtResult', text)
					}, (errMsg) => {
						this.reclog("语音识别" + (cancelMsg ? "被取消" : "结束失败") + "：" + errMsg, 1);
					});
				};

				RecordApp.Stop((aBuf, duration, mime) => {
					//得到录音数据，可以试听参考
					var recSet = (RecordApp.GetCurrentRecOrNull() || {
						set: {
							type: "pcm"
						}
					}).set;
					this.reclog("已录制[" + mime + "]：" + this.formatTime(duration, 1) + " " + aBuf.byteLength +
						"字节 " +
						recSet.sampleRate + "hz " + recSet.bitRate + "kbps", 2);

				}, (msg) => {
					this.reclog("结束录音失败：" + msg, 1);
				});
			}
			,
			reclog(msg, color) {
				var now = new Date();
				var t = ("0" + now.getHours()).substr(-2) +
					":" + ("0" + now.getMinutes()).substr(-2) +
					":" + ("0" + now.getSeconds()).substr(-2);
				var txt = "[" + t + "]" + msg;
				console.log(txt);
			}

			,
			formatTime(ms, showSS) {
				var ss = ms % 1000;
				ms = (ms - ss) / 1000;
				var s = ms % 60;
				ms = (ms - s) / 60;
				var m = ms % 60;
				ms = (ms - m) / 60;
				var h = ms,
					v = "";
				if (h > 0) v += (h < 10 ? "0" : "") + h + ":";
				v += (m < 10 ? "0" : "") + m + ":";
				v += (s < 10 ? "0" : "") + s;
				if (showSS) v += "″" + ("00" + ss).substr(-3);;
				return v;
			}
		}
	}







	/*******************下面的接口实现代码可以直接copy到你的项目里面使用**********************/

	/**实现apiRequest接口，tokenApi的请求实现方法**/
	var uni_ApiRequest = function(url, args, success, fail) {

		uni.request({
			url: url,
			data: args,
			method: "POST",
			dataType: "text",
			header: {
				"content-type": "application/x-www-form-urlencoded",
				// accessToken: storage.getAccessToken(),
				uuid: storage.getUuid()
			},
			success: (res) => {
				let resut = JSON.parse(res.data)
				//【自行修改】根据自己的接口格式提取出数据并回调
				if (resut.success) {
					let result = resut.result.v;
					success({
						appkey: result.appkey,
						token: result.token
					});
				}
			},
			fail: (e) => {
				fail(e.errMsg || "请求出错");
			}
		});
	};

	/**实现compatibleWebSocket接口**/
	var uni_WebSocket = function(url) {
		//事件回调
		var ws = {
			onopen: () => {},
			onerror: (event) => {},
			onclose: (event) => {},
			onmessage: (event) => {}
		};
		var store = ws.storeData = {};

		//发送数据，data为字符串或者arraybuffer
		ws.send = (data) => {
			store.wsTask.send({
				data: data
			});
		};
		//进行连接
		ws.connect = () => {
			var wsTask = store.wsTask = uni.connectSocket({
				url: url,
				success: () => {},
				fail: (res) => {
					if (store.isError) return;
					store.isError = 1;
					ws.onerror({
						message: "创建连接出现错误：" + res.errMsg
					});
				}
			});
			wsTask.onOpen(() => {
				if (store.isOpen) return;
				store.isOpen = 1;
				ws.onopen();
			});
			wsTask.onClose((e) => {
				if (store.isClose) return;
				store.isClose = 1;
				ws.onclose({
					code: e.code || -1,
					reason: e.reason || ""
				});
			});
			wsTask.onError((e) => {
				if (store.isError) return;
				store.isError = 1;
				ws.onerror({
					message: e.errMsg || "未知错误"
				});
			});
			wsTask.onMessage((e) => {
				ws.onmessage({
					data: e.data
				});
			});
		};
		//关闭连接
		ws.close = (code, reason) => {
			var obj = {};
			if (code != null) obj.code = code;
			if (reason != null) obj.reason = reason;
			store.wsTask.close(obj);
		};
		return ws;
	};
</script>




<!-- #ifdef APP -->
<script module="testMainVue" lang="renderjs">
	/**============= App中在renderjs中引入RecordApp，这样App中也能使用H5录音、音频可视化 =============**/
	/** 先引入Recorder **/
	import Recorder from 'recorder-core';

	/** 引入RecordApp **/
	import RecordApp from 'recorder-core/src/app-support/app.js'
	//【必须引入】uni-app支持文件
	import '../../uni_modules/Recorder-UniCore/app-uni-support.js'


	export default {
		mounted() {
			//App的renderjs必须调用的函数，传入当前模块this
			RecordApp.UniRenderjsRegister(this);
		},
		methods: {
			//这里定义的方法，在逻辑层中可通过 RecordApp.UniWebViewVueCall(this,'this.xxxFunc()') 直接调用
			//调用逻辑层的方法，请直接用 this.$ownerInstance.callMethod("xxxFunc",{args}) 调用，二进制数据需转成base64来传递
		}
	}
</script>
<!-- #endif -->