<template>
	<view class="backview">
		<lexiconNavVue title="蜂Talk" />
		<view class="main">
			<view class="top-menu">
				<!-- 键盘监听输入框 -->
				<view class="input-content-back" v-if="platform == 'ios'">
					<input v-model="inputValue" maxlength="1" type="password" class="input-class" :focus='focusState'
						:auto-blur="false" :show-confirm-bar="false" @input="inputChange" @confirm="enterClicked"
						@keydown.enter="enterClicked" inputmode="none"> </input>
					<input v-model="inputValue1" maxlength="1" type="password" class="input-class" :focus='focusState2'
						:auto-blur="false" :show-confirm-bar="false" @input="inputChange" @confirm="enterClicked"
						@keydown.enter="enterClicked" inputmode="none"> </input>
				</view>
			</view>
			<scroll-view class="message-scroll" :style="{ height: scrollHeight + 'px'}" scroll-y
				:scroll-with-animation='true' :scroll-top="scrollTop" ref="scrollView">
				<view class="chat-container" id="chat-container">
					<view class="" v-for="(item, index) in messageList">
						<view class="right-bubble" v-if="item.sender == 'user'">
							<view class="message message-right">
								{{item.message}}
							</view>
							<view class="avatar avatar2"></view>
						</view>
						<view class="left-bubble" v-else>
							<view class="avatar avatar1"></view>
							<view class="message message-left">
								{{item.message}}
							</view>
						</view>
					</view>
				</view>
			</scroll-view>
			<view class="bottom-menuview">
				<view class="mifeng">
					<mifeng v-if="canvasheight>0" ref="uniWordList2"></mifeng>
				</view>
				<view style="width: 50px; position: absolute;">
					<text class="donghuaType" style="color:#ffffff00;">{{donghua}}</text>
				</view>
				<view class="bottom-content-one">
					<view class="bottom-content-two">
						<view class="voicebegin-img" v-if="recognition== true && !beginAiLoad">
							<image src="../../static/image/ftalk/ai-say1.gif" mode="scaleToFill"
								style="width: 100%; height: 100%; border-radius: 30px;">
							</image>
						</view>
						<view class="voice-back" v-if="!beginAiLoad">
							<view class="voice-content" @click="voiceClicked"
								:style="{ backgroundImage: 'url(' + longPressImage + ')' }">
							</view>
							<view class="voice-str">
								{{voiceStr}}
							</view>
						</view>
						<view class="voice-back" v-if="beginAiLoad">
							<image class="aiLoadClass" src="../../static/image/ftalk/ai-sikao.gif" mode="aspectFit">
							</image>
							<view class="voice-str">
								{{voiceStr}}
							</view>
						</view>
						<view class="delete-back" v-if="recognition== true" @click="resetAiLoadClicked">
							<image class="deleteclass"
								:style="isCancelSpeech ? 'box-shadow: 0px 0px 20px 10px red;' : ''" mode="aspectFit"
								src="../../static/image/ftalk/delete-img1.png"></image>
						</view>
						<view class="delete-back" v-if="showDelete" @click="resetAiLoadClicked">
							<image class="deleteclass" mode="aspectFit" src="../../static/image/ftalk/delete-img1.png">
							</image>
						</view>
					</view>
				</view>
			</view>
		</view>
		<popPromptBox v-if="showPromptbox" :msg="PromptMsg" @confirm="hidePromptbox"></popPromptBox>
		<selectContentView v-if="showTipPromptbox" :msg="msg" confirmTitle="确认" cancelTitle="取消"
			@confirm="confirmCreate" @cancel="cancelClicked"></selectContentView>
		<keyboardListener v-if="platform == 'android'" @keydown="listenOnKeydown"></keyboardListener>

	</view>
</template>

<script>
	var isIos;
	// #ifdef APP-PLUS
	isIos = plus.os.name == 'iOS';
	// #endif
	import mifeng from '../../components/games/mifeng.vue'
	import keyboardListener from '../games/keyboard-listener.vue'
	import sendVoice from '../txasr/sendVoice.js';
	const recorderManager = uni.getRecorderManager();


	// ===============原生录音插件start=============================>>>>>>
	/** 先引入Recorder （ 需先 npm install recorder-core ）**/
	import Recorder from 'recorder-core'; //注意如果未引用Recorder变量，可能编译时会被优化删除（如vue3 tree-shaking），请改成 import 'recorder-core'，或随便调用一下 Recorder.a=1 保证强引用

	/** 引入RecordApp **/
	import RecordApp from 'recorder-core/src/app-support/app.js'
	//【所有平台必须引入】uni-app支持文件
	import '../../uni_modules/Recorder-UniCore/app-uni-support.js'

	/** 可选：App中引入原生录音插件来进行录音，兼容性和体验更好，原生插件市场地址: https://ext.dcloud.net.cn/plugin?name=Recorder-NativePlugin （试用无任何限制）
		在调用RecordApp.RequestPermission之前进行配置，建议放到import后面直接配置（全局生效）
		也可以判断一下只在iOS上或Android上启用，不判断就都启用，比如判断iOS：RecordApp.UniIsApp()==2 */
	RecordApp.UniNativeUtsPlugin = {
		nativePlugin: true
	}; //目前仅支持原生插件，uts插件不可用
	// ===============原生录音插件end=============================>>>>>>
	export default {
		components: {
			mifeng,
			keyboardListener
		},
		data() {
			return {
				//speech 蓝牙
				isMounted: false,
				recpowerx: 0,
				recpowert: "",
				reclogs: [],
				reclogLast: "",

				// 一句话录音结果
				result: '',
				// 更多参数参考腾讯文档,全部兼容
				defaultData: {
					EngSerViceType: '16k_en',
					SourceType: 1,
					VoiceFormat: 'mp3',
					Data: '',
					DataLen: null
				},
				luyin_count: 0,
				base64: '',
				bigBytes: null,

				start_time: '',
				end_time: '',
				last_time: '',




				msg: '',
				showTipPromptbox: false,
				isAuthorization: false,
				canvasheight: 200,
				canvaswidth: 300,
				showPromptbox: false,
				PromptMsg: '',
				recognition: null,
				beginAiLoad: false,
				startX: '', //触摸起点
				start_time: '',
				isCancelSpeech: false, // 是否取消语音识别
				speechRecognitionString: "", // 接收语音识别的字符串
				showDelete: false,
				audioContext: null, //音频对象
				ai_content: '', // ai回答内容
				longPressImage: require('../../static/image/ftalk/voice-nomal.png'),
				voiceStr: 'Tap to start voice recognition.',
				scrollTop: 0,
				scrollHeight: 0,
				newMessage: '',
				messageList: [],
				secretKey: 'V2X3d8ulhirvtkVFtPygTJa0Nl6bCHVe',
				secretId: 'AKIDDYp3Nf0FR4vpeNzW9gK1oYSMCHONMKNB',
				appId: '1323065199',
				donghua: 'donghua_daiji',
				// 更多参数参考腾讯文档,全部兼容
				defaultData: {
					EngSerViceType: '16k_zh-PY',
					SourceType: 1,
					VoiceFormat: 'mp3',
					Data: '',
					DataLen: null
				},
				platform: '',
				// 键盘监听
				focusState: true,
				focusState2: false,
				inputValue: '',
				inputValue1: '',
				isPad: false,
				checkAuthFirst: false
			}
		},
		watch: {
			recognition(newValue, oldValue) {
				var that = this;
				if (newValue == true) { // 语音识别开始
					this.longPressImage = require('../../static/image/ftalk/voice-select.png');
					// scanpen.startRecord();这里后面调用一句话识别
					this.voiceStr = 'Tap to stop voice recognition.';
					// recorderManager.start();
					that.startRecord();
				} else { // 语音识别结束
					that.stopRecord();
					console.log('语音识别结束')
					if (this.beginAiLoad == true) {
						this.voiceStr = 'Thinking, please wait a moment.'
					}
				}
			},
		},
		onUnload() {
			this.audioContext.destroy(() => {
				console.log('销毁音频上下文对象');
			})
			this.audioContext = null;
			// // #ifdef APP-PLUS
			// plus.screen.lockOrientation('portrait-primary');
			// // #endif
		},
		onLoad(options) {
			var that = this;
			uni.getSystemInfo({
				success: function(res) {
					const platform = res.platform.toLowerCase(); // 全部转化为小写字母 ios/android
					that.platform = platform
				}
			})
			// const systemInfo = uni.getSystemInfoSync();
			// this.isPad = systemInfo.screenWidth > 600 && systemInfo.pixelRatio < 3;
			// if (this.isPad) {
			// 	// #ifdef APP-PLUS
			// 	plus.screen.lockOrientation('default');
			// 	// #endif
			// }
			this.dealStop();
			// this.getAuthorization();
			this.checkAuth()
		},
		onShow() {
			if (this.isMounted) this.uniPage__onShow(); //onShow可能比mounted先执行，页面可能还未准备好
		},
		onReady() {
			// if (this.isPad) {
			// 	// #ifdef APP-PLUS
			// 	plus.screen.lockOrientation('landscape-primary');
			// 	const that = this
			// 	uni.createSelectorQuery().select('.top-menu').boundingClientRect(rect => {
			// 		console.log('uni.getSystemInfoSync()', uni.getSystemInfoSync());
			// 		that.scrollHeight = uni.getSystemInfoSync().windowWidth - rect.bottom - 340;
			// 	}).exec();
			// 	// #endif
			// } else {
			const that = this
			uni.createSelectorQuery().select('.top-menu').boundingClientRect(rect => {
				that.scrollHeight = uni.getSystemInfoSync().windowHeight - rect.bottom - 340;
			}).exec();
			// }
		},
		mounted() {
			var that = this;
			that.audioContext = uni.createInnerAudioContext();
			that.audioContext.volume = that.fayinguan_volume_value;
			// 监听错误事件
			that.audioContext.onError((res) => {
				console.error('音频加载或播放错误1', res);
				//MediaError（媒体错误）：表示音频播放过程中出现了媒体相关的错误。
				if (res && res.errMsg == 'MediaError') {

				}
				// SrcError（资源路径错误）：表示音频资源路径设置不正确的错误
				if (res && res.errMsg == 'SrcError') {
					uni.showToast({
						title: 'SrcError，请重试~',
						icon: 'error'
					})
				}
			});
			// 监听音频开始播放事件
			that.audioContext.onPlay(() => {
				console.log('音频开始播放')
				that.donghua = 'donghua_shuohua';
			});
			// 监听播放中事件
			that.audioContext.onTimeUpdate(() => {});
			// 监听音频播放完毕事件
			that.audioContext.onEnded(() => {
				that.donghua = 'donghua_daiji';
				that.getFileList();
				console.log('音频播放完毕');
			});
		},
		/*#ifdef VUE3*/
		unmounted() /*#endif*/ /*#ifndef VUE3*/ destroyed() /*#endif*/ {
			RecordApp.Stop(); //清理资源，如果打开了录音没有关闭，这里将会进行关闭
		},
		methods: {

			// START ------ speech 录音（蓝牙耳机收音）
			uniPage__onShow() { //页面onShow时【必须调用】的函数，传入当前组件this
				RecordApp.UniPageOnShow(this);
			},


			// 请求权限
			recReq() {
				this.reclog("正在请求录音权限...");
				RecordApp.UniWebViewActivate(this); //App环境下必须先切换成当前页面WebView
				RecordApp.RequestPermission(() => {
					this.reclog("已获得录音权限，可以开始录音了", 2);
					this.recStart()
				}, (msg, isUserNotAllow) => {
					if (isUserNotAllow) { //用户拒绝了录音权限
						//这里你应当编写代码进行引导用户给录音权限，不同平台分别进行编写
						this.startRecording = false
						this.showAuthTip()
					}
					this.reclog(
						(isUserNotAllow ? "isUserNotAllow," : "") + "请求录音权限失败：" + msg, 1);
				});
			},

			showAuthTip() {
				// 提示弹窗
				this.confirmTipShow = true
				this.confirmTipMsg = 'App需要您的同意，才能访问麦克风进行语音识别转换/录制音频，如禁止将无法识别语音转换/录制音频发送，请在“设置”中允许“拼词之星”访问麦克风'
				this.confirmTipConfirmText = '确定'
				this.confirmTipCancelText = ''
			},

			// 开始录音
			recStart() {
				var that = this;

				that.luyin_count = 0;
				that.bigBytes = new Uint8Array(0)

				var file = Date.now() + ".mp3";
				this.filePath = "";

				RecordApp.UniWebViewActivate(this); //App环境下必须先切换成当前页面WebView
				RecordApp.Start({
					type: "mp3",
					sampleRate: 16000,
					bitRate: 16,
					onProcess: (buffers, powerLevel, duration, sampleRate, newBufferIdx, asyncEnd) => {
						this.recpowerx = powerLevel;
						this.recpowert = duration + " / " + powerLevel;
					},
					onProcess_renderjs: `function(buffers,powerLevel,duration,sampleRate,newBufferIdx,asyncEnd){
						//App中是在renderjs中进行的可视化图形绘制
						if(this.waveView){
							this.waveView.input(buffers[buffers.length-1],powerLevel,sampleRate);
						}
					}`,
					takeoffEncodeChunk: (chunkBytes) => {
						that.luyin_count++;
						if ('base64音频文件专用') {
							var temp = new Uint8Array(chunkBytes.length + that.bigBytes.length)
							temp.set(that.bigBytes);
							temp.set(chunkBytes, that.bigBytes.length);
							that.bigBytes = temp
						}
					},
					start_renderjs: `function(){
						//App中可以放一个函数，在Start成功时renderjs中会先调用这里的代码，this是renderjs模块的this（也可以用This变量）
						//放一些仅在renderjs中才生效的事情，比如初始化，不提供也行
					}`,
					stop_renderjs: `function(aBuf,duration,mime){
						//App中可以放一个函数，在Stop成功时renderjs中会先调用这里的代码，this是renderjs模块的this（也可以用This变量）
						this.audioData=aBuf; //留着给Stop时进行转码成wav播放
					}`
				}, () => {
					this.reclog("录制中", 2);

					//创建音频可视化图形绘制，App环境下是在renderjs中绘制，H5、小程序等是在逻辑层中绘制，因此需要提供两段相同的代码（宽高值需要和canvas style的宽高一致）
					RecordApp.UniFindCanvas(this, [".recwave-WaveView"], `
						this.waveView=Recorder.WaveView({compatibleCanvas:canvas1, width:300, height:100});
					`, (canvas1) => {
						this.waveView = Recorder.WaveView({
							compatibleCanvas: canvas1,
							width: 300,
							height: 100
						});
					});
				}, (msg) => {
					this.reclog("开始录音失败：" + msg, 1);
				});
			},

			// 停止录音
			recStop() {
				const that = this
				RecordApp.Stop((aBuf, duration, mime) => {
					that.base64 = uni.arrayBufferToBase64(that.bigBytes.buffer)
					that.end_time = new Date().getTime();
					var diff_time = that.end_time - that.start_time;
					if (that.end_time - that.start_time < 1000) {
						uni.showToast({
							title: '录音时长太短',
							icon: 'error'
						})
					} else {
						that.fn_tx_asr();
					}
				}, (msg) => {
					that.reclog("结束录音失败：" + msg, 1);
				});
			},

			// 腾讯一句话 uniapp
			async fn_tx_asr() {
				var that = this;
				// base64录音内容
				that.defaultData.Data = that.base64;
				// 录音长度
				that.defaultData.DataLen = uni.base64ToArrayBuffer(that.base64).byteLength;
				// 一句话识别返回结果
				const res = await sendVoice({
					secretId: that.secretId,
					secretKey: that.secretKey,
					payload: that.defaultData
				});
				var content = res.Response.Result;

				console.log(content)

				// 显示识别结果
				setTimeout(function() {
					if (content) {
						// // 这里正式应该是调用结束一句话识别，并且将识别内容speechRecognitionString传入该方法
						that.speechRecognitionString = content;
						that.fnContentToMp3(content);
						that.sendMessage('user');
					}
				}, 100)
			},

			reclog(msg, color) {
				var now = new Date();
				var t = ("0" + now.getHours()).substr(-2) +
					":" + ("0" + now.getMinutes()).substr(-2) +
					":" + ("0" + now.getSeconds()).substr(-2);
				var txt = "[" + t + "]" + msg;
				console.log(txt);
				this.reclogs.splice(0, 0, {
					txt: txt,
					color: color
				});
				this.reclogLast = {
					txt: txt,
					color: color
				};
			},

			// END ------ speech 录音（蓝牙耳机收音）


			// 一句话识别》》》》》》》》》》》》》》》》》》》》》》》》》》》》
			stopOtherState() {
				this.donghua = 'donghua_daiji';
				// 重置状态
				this.zhudongClick = false;
				this.playType = '';
			},

			startRecord() {
				var that = this;
				that.start_time = new Date().getTime();
				that.stopOtherState();
				if (that.audioContext) {
					that.audioContext.stop();
				}
				that.recReq()
			},

			stopRecord() {
				var that = this;
				that.recStop()
			},


			// 键盘监听方法Start==========================
			inputChange: function(event) {
				console.log('this.value================', event.target.value)
				var value = event.target.value;
				var that = this;
				if (value.includes(" ")) {
					that.voiceClicked();
					setTimeout(function() {
						that.inputValue = '';
						that.inputValue1 = '';
					}, 50);
				}
			},
			// 全局监听外接键盘点击按下事件
			listenOnKeydown(value) {
				console.log('value==============', value);
				console.log('this.platform===============', this.platform);
				if (this.platform === 'android') {
					var that = this;
					if (value.key == " ") { // 语音识别开始
						this.voiceClicked();
					}
				}
			},
			// enter 点击事件
			enterClicked() {
				uni.hideKeyboard(); //隐藏软键盘	
				if (this.focusState == true) {
					this.focusState = false;
					this.$nextTick(function() {
						this.focusState2 = true;
					});
				} else {
					this.focusState2 = false;
					this.$nextTick(function() {
						this.focusState = true;
					});
				}
			},
			checkAuth() {
				// 首次下载需要先弹窗提示
				const notFirstInTalk = uni.getStorageSync('notFirstInTalk')
				if (!notFirstInTalk) { // 首次登录
					this.msg = '使用该功能需要您开启麦克风权限，是否去开启？';
					this.showTipPromptbox = true;
					this.checkAuthFirst = true
				} else {
					this.getAuthorization()
				}
			},
			getAuthorization() {
				var that = this;
				if (isIos) {
					var avaudiosession = plus.ios.import("AVAudioSession");
					var avaudio = avaudiosession.sharedInstance();
					var permissionStatus = avaudio.recordPermission();
					console.log("permissionStatus:" + permissionStatus);
					if (permissionStatus == 1684369017) {
						that.msg = '使用该功能需要您开启麦克风权限，是否去开启？';
						that.showTipPromptbox = true;
					} else {
						if (permissionStatus == 1970168948) {
							// that.msg = '使用该功能需要您开启麦克风权限，是否去开启？';
							// that.showTipPromptbox = true;
						} else {}
					}
				} else {
					plus.android.requestPermissions(
						["android.permission.RECORD_AUDIO"],
						function(e) {
							console.log('permission========', e)
							if (e.deniedAlways.length > 0) {
								that.msg = '使用该功能需要您开启麦克风权限，是否去开启？';
								that.showTipPromptbox = true;
							}
							if (e.deniedPresent.length > 0) {
								that.msg = '使用该功能需要您开启麦克风权限，是否去开启？';
								that.showTipPromptbox = true;
							}
							if (e.granted.length > 0) {}
						},
						function(e) {
							uni.showToast({
								title: '权限获取失败',
								icon: 'error'
							})
						});
				}
			},
			// 去设置界面开启麦克风权限
			confirmCreate() {
				this.showTipPromptbox = false;
				if (this.checkAuthFirst) {
					uni.setStorageSync('notFirstInTalk', '1')
					this.getAuthorization()
				} else {
					if (isIos) {
						plus.runtime.openURL("app-settings://")
					} else {
						var Intent = plus.android.importClass("android.content.Intent");
						var Settings = plus.android.importClass("android.provider.Settings");
						var Uri = plus.android.importClass("android.net.Uri");
						var mainActivity = plus.android.runtimeMainActivity();
						var intent = new Intent();
						intent.setAction(Settings.ACTION_APPLICATION_DETAILS_SETTINGS);
						var uri = Uri.fromParts("package", mainActivity.getPackageName(), null);
						intent.setData(uri);
						mainActivity.startActivity(intent);
					}
				}
			},
			cancelClicked() {
				this.showTipPromptbox = false;
				uni.navigateBack();
			},
			getFileList() {
				uni.getSavedFileList({
					success: (res) => {
						res.fileList.forEach(file => {
							// 使用uni.removeSavedFile删除指定的文件
							uni.removeSavedFile({
								filePath: file.filePath,
								success: function() {
									console.log('文件删除成功');
								},
								fail: function(err) {
									console.error('文件删除失败', err);
								}
							});
						});
					},
					fail: (err) => {}
				})
			},

			// 开始录音
			voiceClicked() {
				var that = this;
				if (this.recognition == true) {
					// if (!that.isCancelSpeech && that.recognition) { // 未取消
					that.end_time = new Date().getTime();
					var diff_time = that.end_time - that.start_time;
					if (diff_time < 1500) {
						that.resetAiLoadClicked();
						uni.showToast({
							title: '录音时长太短',
							icon: 'error'
						})
					} else {
						recorderManager.stop();
						that.beginAiLoad = true;
						that.recognition = false;
						that.showDelete = true;
					}
					// } else { // 取消识别
					// 	that.resetAiLoadClicked();
					// }
				} else {
					this.recognition = true;
					this.start_time = new Date().getTime();
				}
			},
			async dealStop() {
				var that = this;
				recorderManager.onStop(async path => {
					if (that.beginAiLoad == true) {
						console.log('%c监听录音结束 Line:47 🍖 res', 'color:#465975', path);
						// 获取临时路径
						this.voicePath = path.tempFilePath;
						console.log('%c Line:59 🍰 this.voicePath', 'color:#4fff4B', this.voicePath);
						// 临时路径转base64
						const base64 = await this.pathToBase64(this.voicePath);
						console.log('%c Line:55 🌮 base64', 'color:#f5ce50', base64);
						// base64录音内容
						this.defaultData.Data = base64;
						// 录音长度
						this.defaultData.DataLen = uni.base64ToArrayBuffer(base64).byteLength;
						console.log('%c Line:60 🍺 this.defaultData.DataLen', 'color:#2eafb0', this
							.defaultData
							.DataLen);
						const {
							secretId,
							secretKey,
							defaultData
						} = this;
						// 一句话识别返回结果
						const res = await sendVoice({
							secretId,
							secretKey,
							payload: defaultData
						});
						this.result = res.Response.Result;
						console.log('%c Line:58 🍕 res', 'color:#3f7cff', res);
						// // 这里正式应该是调用结束一句话识别，并且将识别内容speechRecognitionString传入该方法
						this.speechRecognitionString = this.result;
						this.fnContentToMp3(this.result);
						this.sendMessage('user');
					}
				});
			},
			async pathToBase64(path) {
				console.log('读取文件：', path);
				const base64 = await new Promise((resolve, reject) => {
					plus.io.resolveLocalFileSystemURL(
						path,
						function(entry) {
							entry.file(
								function(file) {
									// @ts-ignore
									var fileReader = new plus.io.FileReader();
									fileReader.onload = function(evt) {
										console.log('%c Line:80 🌭 evt', 'color:#3f7cff', evt);
										let result = evt.target.result;
										result = result.replace('data:audio/mpeg;base64,', '');
										resolve(result);
									};
									fileReader.onerror = function(error) {
										console.log('failed: ', error);
										reject(error);
									};
									fileReader.readAsDataURL(file);
								},
								function(error) {
									console.log('failed: ', error);
									reject(error);
								}
							);
						},
						function(error) {
							console.log('failed: ', error);
						}
					);
				});
				return base64;
			},
			// 重新ai识别
			resetAiLoadClicked() {
				recorderManager.stop();
				if (this.audioContext) {
					this.audioContext.stop();
				}
				this.longPressImage = require('../../static/image/ftalk/voice-nomal.png');
				this.voiceStr = 'Tap to start voice recognition.';
				this.speechRecognitionString = "";
				this.beginAiLoad = false;
				this.isCancelSpeech = false;
				this.showDelete = false;
				this.recognition = false;
			},

			// 传入文字返回音频和文本
			fnContentToMp3(questionStr) {
				this.$http.contentToMp3({
					content: questionStr,
					is_vip: 1
				}).then(res => {
					if (res.code == 200) {
						if (!res.data.content || res.data.content == "") {
							this.ai_content = 'Time out, please try again';
						} else {
							this.ai_content = res.data.content;
						}
						if (res.data.url) {
							this.voiceurl = res.data.url;
						}
						this.showDelete = false;
						this.resetAiLoadClicked();
						this.fnPlayMp3();
						this.sendMessage('bot');
					} else if (res.code == 5010) {
						this.resetAiLoadClicked();
						this.PromptMsg = '今日训练次数已达上限';
						this.showPromptbox = true;
					} else {
						uni.showToast({
							title: res.msg,
							icon: 'none',
							duration: 2000
						})
					}
				})
			},
			// 处理识别后的内容
			fnPlayMp3() {
				var that = this;
				// 下载文件到本地
				uni.downloadFile({
					url: that.voiceurl, // 要下载的文件地址
					success: (downloadRes) => {
						console.log('downloadRes============', downloadRes);
						if (downloadRes.statusCode === 200) {
							// 下载成功，将文件保存到本地
							uni.saveFile({
								tempFilePath: downloadRes.tempFilePath,
								success: (saveRes) => {
									if (that.audioContext) {
										that.audioContext.stop();
										that.audioContext.src = saveRes.savedFilePath;
										setTimeout(function() {
											that.audioContext.play();
										}, 500)
									}
								},
								fail: (error) => {
									console.error('保存文件失败：', error);
								}
							});
						} else {
							console.error('下载文件失败，状态码：', downloadRes.statusCode);
						}
					},
					fail: (error) => {
						console.error('下载文件失败：', error);
					}
				});
			},
			sendMessage(sender) {
				if (sender == 'user') {
					if (this.speechRecognitionString.trim() === '') return;
					// 添加新消息到消息列表
					this.messageList.push({
						message: this.speechRecognitionString,
						sender: 'user'
					});
					this.$nextTick(() => {
						this.scrollToBottom();
					});
				} else {
					// 模拟接收到的回复
					this.messageList.push({
						message: this.ai_content,
						sender: 'bot'
					});
					this.$nextTick(() => {
						this.scrollToBottom();
					});
				}
			},
			scrollToBottom() {
				var that = this;
				that.$nextTick(() => {
					uni.createSelectorQuery().in(that).select('#chat-container')
						.boundingClientRect((res) => {
							let top = res.height - that.scrollHeight + 200;
							if (top > 0) {
								that.scrollTop = top;
							}
						}).exec()
				})
			},
			hidePromptbox() {
				this.showPromptbox = false;
			}
		}
	}
</script>

<!-- #ifdef APP -->
<script module="testMainVue" lang="renderjs">
	/**============= App中在renderjs中引入RecordApp，这样App中也能使用H5录音、音频可视化 =============**/
	/** 先引入Recorder **/
	import Recorder from 'recorder-core'; //注意如果未引用Recorder变量，可能编译时会被优化删除（如vue3 tree-shaking），请改成 import 'recorder-core'，或随便调用一下 Recorder.a=1 保证强引用

	//按需引入需要的录音格式编码器，用不到的不需要引入，减少程序体积；H5、renderjs中可以把编码器放到static文件夹里面用动态创建script来引入，免得这些文件太大
	import 'recorder-core/src/engine/mp3.js'
	import 'recorder-core/src/engine/mp3-engine.js'
	import 'recorder-core/src/engine/wav.js'
	import 'recorder-core/src/engine/pcm.js'

	//可选引入可视化插件
	import 'recorder-core/src/extensions/waveview.js'
	import 'recorder-core/src/extensions/wavesurfer.view.js'

	import 'recorder-core/src/extensions/frequency.histogram.view.js'
	import 'recorder-core/src/extensions/lib.fft.js'

	/** 引入RecordApp **/
	import RecordApp from 'recorder-core/src/app-support/app.js'
	//【必须引入】uni-app支持文件
	import '../../uni_modules/Recorder-UniCore/app-uni-support.js'

	export default {
		mounted() {
			//App的renderjs必须调用的函数，传入当前模块this
			RecordApp.UniRenderjsRegister(this);
		},
		methods: {
			//这里定义的方法，在逻辑层中可通过 RecordApp.UniWebViewVueCall(this,'this.xxxFunc()') 直接调用
			//调用逻辑层的方法，请直接用 this.$ownerInstance.callMethod("xxxFunc",{args}) 调用，二进制数据需转成base64来传递
		}
	}
</script>
<!-- #endif -->

<style lang="less">
	.backview {
		background: radial-gradient(circle, #373632, #000);
		width: 100%;
		height: 100vh;
		text-align: center;
	}

	.main {
		width: 100%;
		height: 100vh;
		display: flex;
		position: relative;
		flex-direction: column;
		background-image: url(../../static/image/homePage/diwen.png);
		background-size: 100% auto;
		background-position: top left;
		background-repeat: repeat-y;
		box-sizing: border-box;
	}

	.top-menu {
		width: 100%;
		height: 1px;

		.input-content-back {
			width: 100px;
			height: 1px;
			position: absolute;
			top: -200px;

			.input-class {
				width: 100px;
				height: 10px;
			}
		}
	}

	.message-scroll {
		width: 100%;
		// background-color: red;
		font-family: Arial, sans-serif;
		padding: 20px 0px;
		box-sizing: border-box;
	}

	.chat-container {
		display: flex;
		width: 100%;
		flex-direction: column;
		gap: 20px;
	}

	.bottom-menuview {
		width: 88%;
		margin-left: 6%;
		bottom: 10px;
		box-sizing: border-box;
		border-radius: 30px;
		height: 330px;
		position: relative;
		display: flex;
		flex-direction: column;
		align-items: center;
		justify-content: center;

		.mifeng {
			width: 100%;
			height: 180px;
		}

		.bottom-content-one {
			width: 100%;
			height: 150px;
			overflow: hidden;
			background: linear-gradient(to bottom, #e3b75f, #feef7e);
			padding: 3px;
			border-radius: 30px;

			.bottom-content-two {
				width: 100%;
				height: 100%;
				position: relative;
				background: #000;
				border-radius: 30px;
			}
		}
	}

	.message {
		max-width: 70%;
		min-width: 50px;
		padding: 10px;
		min-height: 40px;
		text-align: left;
		border-radius: 20px;
		color: #fff;
	}

	.message-left {
		align-self: flex-start;
		background-color: #e0e0e0;
		color: #000;
	}

	.message-right {
		align-self: flex-end;
		background-color: #0084ff;
	}

	.avatar {
		width: 40px;
		height: 40px;
		border-radius: 50%;
		background-size: cover;
	}

	.left-bubble {
		display: flex;
		align-items: flex-start;
		gap: 10px;
	}

	.right-bubble {
		display: flex;
		flex-direction: row;
		align-items: flex-start;
		justify-content: flex-end;
		gap: 10px;
	}

	.avatar1 {
		background-image: url(../../static/image/ftalk/ftalk-left-avater.png);
		margin-left: 15px;
	}

	.avatar2 {
		background-image: url(../../static/image/ftalk/ftalk-right-avater.png);
		margin-right: 15px;
	}


	.voicebegin-img {

		width: 100%;
		height: 120px;
		// z-index: -1;
	}

	.voice-back {
		width: 100%;
		height: 100%;
		position: absolute;
		top: 0;
		left: 0;
		display: flex;
		flex-direction: column;
		align-items: center;
		justify-content: center;

		image {
			width: 75px;
			height: 75px;
		}

		.voice-content {
			width: 75px;
			height: 76px;
			background-size: 100% 100%;
			background-position: center center;
			background-repeat: no-repeat;
		}

		.voice-str {
			width: 100%;
			font-size: 12px;
			margin-top: 10px;
			font-weight: 800;
			text-align: center;
			color: white;
			// margin: 20px;
		}
	}

	.aiload-back {
		width: 100%;
		height: 200px;
		position: relative;
		display: flex;
		flex-direction: row;
		align-items: center;
		justify-content: center;

		.aiload-str {
			width: 960px;
			position: absolute;
			bottom: 20px;
			left: 0;
			text-align: center;
			font-size: 26px;
			font-weight: 800;
			text-align: center;
			color: white;
		}

		.aileft-content {
			width: 200px;
			height: 200px;
			display: flex;
			flex-direction: column;
			align-items: center;
			justify-content: flex-start;
		}

		.aiLoadClass {
			width: 75px;
			height: 75px;
		}

		.right-text {
			width: 740px;
			position: absolute;
			right: 10px;
			height: 200px;
			font-size: 26px;
			box-sizing: border-box;
			color: white;
		}

		.aiLoadgoon {
			width: 120px;
			height: 40px;
			border-radius: 10px;
			line-height: 40px;
			text-align: center;
			background: linear-gradient(to bottom, #e3b75f, #feef7e);
			color: black;
			font-size: 20px;
		}
	}

	.delete-back {
		width: 30px;
		height: 30px;
		z-index: 100;
		display: flex;
		align-items: center;
		justify-content: center;
		left: 60%;
		top: 10px;
		position: absolute;
		border-radius: 20px;

		.deleteclass {
			width: 20px;
			height: 20px;
			border-radius: 20px;
		}
	}
</style>