<template>
	<view class="bottom-btn">
		<view class="input-btn">
			<view class="voice-content" v-if="inputType == 'voice'">
				<view class="voice-text" @touchstart="start" @longpress="record" @touchend.stop.prevent="end"
					@touchcancel="end" @touchmove="moveHandle">
					<view class="voice-tips" v-show="isStart">
						{{ifMoveUp ? '松手取消':'松手发送，上移取消'}}
					</view>
					<view :class="[`start-voice`,`${!!ifMoveUp?'move-start-voice':''}`]" v-if="isStart">

					</view>
					<text v-show="!isStart">按住说话</text>
				</view>
			</view>
			<input type="text" v-else class="bottom-input" v-model="input" @confirm="submit" />
			<view class="submit-btn" @tap.stop="submit" v-if="!!input&&inputType == 'text'">
			</view>
			<template v-if="!isStart">
				<view class="input-icon" v-if="inputType == 'voice'" @tap.stop="changeType('text')"></view>
				<view class="voice-icon" v-else-if="inputType == 'text'&&!input" @tap.stop="changeType('voice')">

				</view>
			</template>
		</view>
	</view>
</template>

<script setup>
	import permision from '@/js_sdk/wa-permission/permission.js';
	import {
		ref,
		watch,
		defineEmits,
		inject
	} from "vue"
	const emits = defineEmits(['sendMessage']);
	const input = ref('');
	const inputType = ref('voice');
	const isStart = ref(false)
	const ifMoveUp = ref(false)
	const startMoveUpY = ref(0)
	const toast = inject("toast")

	// 录音管理对象
	let recordTime = 0
	// 录音文件路径
	let voicePath = ''
	// 最大时长
	const maxTime = 30
	//#ifdef APP-PLUS
	// 录音管理对象
	const recorderManager = uni.getRecorderManager()
	recorderManager.onStop(res => {
		clearInterval(startTimer)
		console.log('onStop-----------------------', ifMoveUp.value)
		if (!ifMoveUp.value) {
			voicePath = res.tempFilePath;
			loadFileData(res.tempFilePath);
		}
		ifMoveUp.value = false
	});
	// #endif

	function changeType(type) {
		inputType.value = type
	}

	async function start(event) {
		event.preventDefault();
		//#ifdef APP-PLUS
		const result = await permision.requestAndroidPermission('android.permission.RECORD_AUDIO');
		if (result !== 1) {
			uni.showModal({
				title: '温馨提示',
				content: '语音识别功能需要您开启麦克风权限才能使用，请开启麦克风使用权限',
				showCancel: false,
				confirmText: '好的'
			});
			return;
		}
		// #endif
		touchInit()
		isStart.value = true
		uni.vibrateShort()
	}
	// watch("isStart.value", (val) => {
	// 	console.log(val, "isStart123")
	// })
	let startTimer = null
	let startTime = 0

	function record() {
		clearInterval(startTimer)
		// 开始录音
		const maxTimems = maxTime * 1000;
		startTimer = setInterval(() => {
			startTime++
			console.log(startTime, 'startTime')
			if (startTime >= maxTime) {
				toast("最多录制30秒语音")
				recorderManager.stop();
				startTime = 0
				clearInterval(startTimer)
			}
		}, 1000)
		//#ifdef APP-PLUS
		recorderManager.start({
			duration: maxTimems + 2000
		});
		// #endif
		console.log('开始录音')
	}

	function end(event) {
		console.log('end-----')
		event.stopPropagation();
		//松开按键
		event.preventDefault();
		recorderManager.stop();
		touchInit()
	}

	function moveHandle(e) {
		e.preventDefault();
		if (!startMoveUpY.value) {
			startMoveUpY.value = e.touches[0].clientY
		}
		if (startMoveUpY.value - e.touches[0].clientY > 20) {
			ifMoveUp.value = true
		} else {
			ifMoveUp.value = false
		}
		// console.log(e.touches[0].clientY, '-----------------', e.touches[0].clientY - startMoveUpY.value)
	}

	async function submit() {
		if (input.value === '') return;
		emits('sendMessage', input.value);
		input.value = ""
	}

	function touchInit() {
		voicePath = ""
		isStart.value = false
		startMoveUpY.value = 0
		console.log(isStart.value, 'isStart touchInit')
	}

	function loadFileData(recordPath) {
		plus.io.requestFileSystem(
			plus.io.PRIVATE_DOC,
			fs => {
				fs.root.getFile(
					recordPath, {
						create: false
					},
					fileEntry => {
						fileEntry.file(
							file => {
								const fileReader = new plus.io.FileReader();
								fileReader.onloadend = evt => {
									// 获取 Data URL 中的数据部分
									// https://developer.mozilla.org/zh-CN/docs/Web/HTTP/Basics_of_HTTP/Data_URLs
									const content = evt.target.result.split(',')[1];
									// console.log('onloadend: ', content.length);
									// 识别方法
									const action = speechRecognizing(content, file.size,
										'cuid');
									// 执行识别
									action.then(result => {
										emits('sendMessage', result);
										console.log(result, '识别结果')
									}).catch(
										() => {});
								};
								fileReader.readAsDataURL(file);
							},
							fileError => {
								console.log('FileError: ', fileError);
								// uni.showToast({
								// 	icon: 'none',
								// 	title: fileError.message
								// });
							}
						);
					},
					getFileError => {
						console.log('GetFileError: ', getFileError);
						uni.showToast({
							icon: 'none',
							title: getFileError.message
						});
					}
				);
			},
			requestFileSystemError => {
				console.log('RequestFileSystemError: ', requestFileSystemError);
				uni.showToast({
					icon: 'none',
					title: requestFileSystemError.message
				});
			}
		);
	}
	/**
	 * @abstract 百度语音识别文字接口封装
	 * @param {String} speech mp3 格式，音频数据的 Base64 编码字符串
	 * @param {Number} len 音频文件大小（非 speech 字符串长度）
	 * @param {String} cuid 用户唯一标识，用来区分用户，计算 UV 值
	 * @param {Boolean} [showLoading] 是否显示加载提示
	 * @param {Boolean} [showErrorToast] 是否显示错误提示
	 */
	function speechRecognizing(speech, len, cuid, showLoading = true, showErrorToast = true) {
		return new Promise((resolve, reject) => {
			const errorHandler = (message, no = '') => {
				if (no) console.log(`${no}: ${message}`);
				showLoading && uni.hideLoading();
				// showErrorToast && uni.showToast({
				// 	icon: 'none',
				// 	title: message,
				// 	duration: 2000
				// });
				reject(message);
			};
			showLoading && uni.showLoading({
				title: '正在识别'
			});
			// 获取最新的 Token
			// https://ai.baidu.com/ai-doc/REFERENCE/Ck3dwjhhu
			uni.request({
				url: 'https://aip.baidubce.com/oauth/2.0/token',
				data: {
					grant_type: 'client_credentials',
					client_id: 'nn94anui75iuTulAtmG2anri',
					client_secret: 'ai7rAcf4VUzoJv4038agfAF0XZaww3yr'
				},
				success: result => {
					console.log(result);
					if (result.statusCode !== 200) {
						errorHandler(`【${result.statusCode}】${result.errMsg}`, 'F11');
						return;
					}
					if (result.data.error_description) {
						errorHandler(result.data.error_description, 'F12');
						return;
					}
					const token = result.data.access_token;
					// 上传数据并识别
					// https://cloud.baidu.com/doc/SPEECH/s/Vk38lxily#json方式上传音频
					uni.request({
						method: 'POST',
						url: 'http://vop.baidu.com/server_api',
						header: {
							'Content-Type': 'application/json'
						},
						data: {
							format: 'm4a',
							rate: 16000,
							channel: 1,
							dev_pid: 1537,
							token: token,
							cuid: cuid,
							len: len,
							speech: speech
						},
						success: res => {
							console.log(res, '结果');
							uni.hideLoading();
							if (res.statusCode !== 200) {
								errorHandler(`【${res.statusCode}】${res.errMsg}`,
									'F2');
								return;
							}
							if (res.data.err_no === 0) {
								const result = res.data.result[0].replace('。', '');
								resolve(result);
							} else {
								errorHandler(
									`【${res.data.err_no}】${res.data.err_msg}`,
									'F3');
							}
						},
						fail: err => errorHandler(err.errMsg, 'F1')
					});
				},
				fail: error => errorHandler(error.errMsg, 'F10')
			});
		});
	}
</script>

<style scoped lang="less">
	.bottom-btn {
		position: fixed;
		bottom: 76rpx;
		left: 50%;
		transform: translateX(-50%);

		.input-btn {
			position: relative;
			width: 650rpx;
			height: 86rpx;
			line-height: 86rpx;
			text-align: center;
			opacity: 1;
			border-radius: 22rpx;
			background: rgba(255, 255, 255, 1);
			box-shadow: 0rpx 2rpx 4rpx rgba(0, 0, 0, 0.25);
			font-size: 32rpx;
			font-weight: 400;
			letter-spacing: 0px;
			color: rgba(0, 0, 0, 1);
			display: flex;
			align-items: center;
			// padding-right: 100rpx;
			box-sizing: border-box;

			.bottom-input {
				max-width: 540rpx;
				box-sizing: border-box;
				padding-left: 32rpx;
				width: 100%;
				height: 86rpx;
				line-height: 86rpx;
				font-size: 28rpx;
				text-align: left;
			}
		}

		.speak-btn {
			width: 650rpx;
			height: 86rpx;
			background-image: url(../../static/speak.png);
			background-size: 100% 100%;
		}

		.keyboard-icon {
			position: absolute;
			top: 20rpx;
			right: 30rpx;
			width: 58rpx;
			height: 58rpx;
			background-image: url(../../static/keyboard.png);
			background-size: 100% 100%;
		}

		.submit-btn {
			position: absolute;
			top: 20rpx;
			right: 30rpx;
			width: 58rpx;
			height: 58rpx;
			background-image: url(../../static/send.png);
			background-size: 100% 100%;
		}

		.voice-icon {
			position: absolute;
			top: 20rpx;
			right: 30rpx;
			width: 58rpx;
			height: 58rpx;
			background-image: url(../../static/voiceIcon.png);
			background-size: 100% 100%;
		}

		.input-icon {
			position: absolute;
			top: 20rpx;
			right: 30rpx;
			width: 58rpx;
			height: 58rpx;
			background-image: url(../../static/keyboard.png);
			background-size: 100% 100%;
		}

		.voice-content {
			width: 100%;
			height: 100%;
			position: relative;

			.voice-text {
				font-size: 32rpx;
				font-weight: 400;
				letter-spacing: 0rpx;
				color: rgba(0, 0, 0, 1);
			}

			.voice-tips {
				position: absolute;
				top: -68rpx;
				left: 50%;
				transform: translateX(-50%);
				font-size: 28rpx;
				font-weight: 400;
				letter-spacing: 0rpx;
				color: rgba(73, 80, 87, 1);
				vertical-align: top;
			}

			.start-voice {
				width: 660rpx;
				height: 106rpx;
				background-image: url(../../static/speak.png);
				background-size: 100% 100%;
			}

			.move-start-voice {
				background-image: url(../../static/cencel-speak.png);
				background-size: 100% 100%;
			}
		}
	}
</style>