<template>
	<div class="recoder">
		<!-- 按钮 -->
		<div class="realSource">
			<audio :src="state.uploadSrc" controls></audio>
		</div>
		<div class="recode">
			<div class="top">
				<div v-if="state.stopStaus">
					<audio :src="state.audioSrc" controls></audio>
				</div>
				<div class="waveSound" v-else>
					<WaveSound></WaveSound>
				</div>
				<el-button @click="upload(<Blob>recBlob)">上传录音</el-button>
			</div>
			<div class="bottom">
				<div class=" icon" @click="recOpen" v-if="state.stopStaus">
					<span>
						<el-icon><Microphone/></el-icon>
					</span>
					<span class="text">
						（点击开始录音）
					</span>

				</div>
				<div class=" icon" @click="recStop" v-else>
					<span>
						<el-icon><Mic/></el-icon>
					</span>
					<span class="text">
						（点击暂停录音）
					</span>
				</div>
				<div class="tips">
					录音结束后别忘记上传哦
				</div>
			</div>
		</div>
	</div>
</template>
<script setup lang="ts">
//必须引入的核心
import Recorder from 'recorder-core';
import axios from "axios";
import {useRoute} from "vue-router";
//引入mp3格式支持文件；如果需要多个格式支持，把这些格式的编码引擎js文件放到后面统统引入进来即可
import 'recorder-core/src/engine/mp3';
import 'recorder-core/src/engine/mp3-engine';
//录制wav格式的用这一句就行
import 'recorder-core/src/engine/wav';
//可选的插件支持项，这个是波形可视化插件
import 'recorder-core/src/extensions/waveview';
import {defineProps, onUnmounted, reactive, ref, toRefs} from 'vue';
import {Mic, Microphone} from "eleicon";
import WaveSound from "@/components/Recoder/Wave/WaveSound.vue";
import Cookies from "js-cookie";
import {ElMessage} from "element-plus";
import Bus from "@/utils/bus";

import { nanoid } from 'nanoid';
import { uuid } from 'vue-uuid';


const props = defineProps({
	//子组件接收父组件传递过来的值
	questionIndex: Number,
	answerIndex: Number,
})

const {questionIndex, answerIndex} = toRefs(props);
let rec: any;
let recBlob: any;
let wave: any;
const router = useRoute();
const recwave = ref(null);
const state = reactive<{ stopStaus: boolean, audioSrc: string, uploadSrc: string ,socket: any}>({
	stopStaus: true,
	audioSrc: '',
	uploadSrc: '',
	socket: null
})

const BAI_DU_ASR_CONFIG = {
	type: "START",
	data: {
		appid: 27358668,
		appkey: "GGktxTujO4VFRbvHBzVG7um3",
		dev_pid: 15372,
		cuid: nanoid(),
		format: "pcm",
		sample: 16000,
	},
};
onUnmounted(() => {
	const audios = document.getElementsByTagName('audio');
	for (let i = 0; i < audios.length; i++) {
		audios[i].pause();
	}
})


const initSocket = () => {
	const url = `wss://vop.baidu.com/realtime_asr?sn=${uuid.v1()}`;
	state.socket = new WebSocket(url);
	state.socket.addEventListener("open", (event: any) => {
		if (!state.socket) {
			return;
		}

		state.socket.send(JSON.stringify(BAI_DU_ASR_CONFIG));
	});

	state.socket.addEventListener("message", (event: any) => {
		console.log(event.data);
	});
}
// 打开录音
const recOpen = () => {
	initSocket();
	//创建录音对象
	rec = Recorder({
		type: 'wav', //录音格式，可以换成wav等其他格式
		sampleRate: 48000, //录音的采样率，越大细节越丰富越细腻
		bitRate: 16, //录音的比特率，越大音质越好
		onProcess: (
			buffers: any,
			powerLevel: any,
			bufferDuration: any,
			bufferSampleRate: 48000,
			newBufferIdx: any,
			asyncEnd: any,
		) => {
			//录音实时回调，大约1秒调用12次本回调
			const buffer = new ArrayBuffer(buffers[buffers.length - 1].length * 2);
			const view = new DataView(buffer);
			floatTo16BitPCM(view, 0, buffers[buffers.length - 1])
			state.socket && state.socket.send(view);
		},
	});
	if (!rec) {
		alert('当前浏览器不支持录音功能！');
		return;
	}
	//打开录音，获得权限
	rec.open(
		() => {
			console.log('录音已打开');
			if (recwave.value) {
				//创建音频可视化图形绘制对象
				wave = Recorder.WaveView({elem: recwave.value});
			}
			recStart();
		},
		(msg: any, isUserNotAllow: any) => {
			//用户拒绝了录音权限，或者浏览器不支持录音
			console.log((isUserNotAllow ? 'UserNotAllow，' : '') + '无法录音:' + msg);
		},
	);

}
// 开始录音
const recStart = () => {
	//打开录音权限
	if (!rec) {
		console.error('未打开录音');
		return;
	}
	rec.start();
	console.log('已开始录音');
	state.stopStaus = false;
}
// 结束录音
const recStop = () => {
	if (!rec) {
		console.error('未打开录音');
		return;
	}

	rec.stop(
		(blob: any, duration: any) => {
			//blob就是我们要的录音文件对象，可以上传，或者本地播放
			recBlob = blob;
			if (state.socket) {
				state.socket.send(
					JSON.stringify({
						type: "FINISH",
					})
				);
				state.socket.close();
				state.socket = null;
			}
			console.log(recBlob)
			//简单利用URL生成本地文件地址，此地址只能本地使用，比如赋值给audio.src进行播放，赋值给a.href然后a.click()进行下载（a需提供download="xxx.mp3"属性）
			const localUrl = (window.URL || window.webkitURL).createObjectURL(blob);
			console.log('录音成功', blob, localUrl, '时长:' + duration + 'ms');
			// upload(blob); //把blob文件上传到服务器
			state.audioSrc = URL.createObjectURL(recBlob);
			rec.close(); //关闭录音，释放录音资源，当然可以不释放，后面可以连续调用start
			rec = null;
		},
		(err: any) => {
			console.error('结束录音出错：' + err);
			rec.close(); //关闭录音，释放录音资源，当然可以不释放，后面可以连续调用start
			rec = null;
		},
	);
	let binaryData = [];
	binaryData.push(recBlob);

	// state.audioSrc = URL.createObjectURL(recBlob);
	state.stopStaus = true;

}
const upload = (blob: Blob) => {
	if(!blob && state.audioSrc === ''){
		ElMessage.error('请先录音');
		return;
	}
	//FormData是HTML5新增的js类型，用于将数据封装成"键-值"形式，
	// 以便用于发送HTTP请求。在创建FormData对象时，可以利用JS的Blob对象来封装文件流数据：
	const formData = new FormData();
	const file = convertBlobToMP3(blob);
	const filename = state.audioSrc.split('/')[state.audioSrc.split('/').length - 1];
	formData.append('file', file, filename + '.mp3'); // file为你要上传的文件
	axios.post('/api/webstudent.exam_papers/upload', formData, {
		headers: {
			'Content-Type': 'multipart/form-data',
			'q_id': '', // 假设q_id为一个变量，表示请求头中的q_id参数
			'group_id': String(router.query.coursewareId),
			'share_id': String(router.query.shareId),
			'Authorization': Cookies.get('token'),
		}
	}).then(res => {
		// 处理成功
		state.uploadSrc = location.origin + '/new_uploads/' + res.data.data.url;
		Bus.emit('uploadAudio' + questionIndex?.value,{answer: state.uploadSrc , index: answerIndex?.value});
		ElMessage.success('上传成功');

	}).catch(error => {
		// 处理错误
		ElMessage.error('上传失败');
	});
}
const convertBlobToMP3 = (blob: Blob) => {
	return new Blob([blob], {type: 'audio/mpeg'});
}




const onAudioProcess = (event: AudioProcessingEvent) =>  {
	const audioBuffer = event.inputBuffer;

	const leftChannelData = audioBuffer.getChannelData(0);
	const rightChannelData = audioBuffer.getChannelData(1);

	const buffer = new ArrayBuffer(leftChannelData.length * 2),
		view = new DataView(buffer);
	floatTo16BitPCM(view, 0, leftChannelData);

}


const floatTo16BitPCM = (output: DataView, offset: number, input: Float32Array) => {
	for (let i = 0; i < input.length; i++, offset += 2) {
		let s = Math.max(-1, Math.min(1, input[i]));
		output.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7fff, true);
	}
}

</script>


<style scoped lang="scss">
.recoder {
	box-sizing: border-box;

	.icon {
		display: flex;
		align-items: center;
		font-size: 50px;

		.text {

			font-size: 15px;
		}
	}

	.realSource {
		height: 200px;
		display: flex;
		justify-content: center;
		align-items: center;
		border: 1px #dcdee2 dotted;
	}

	.recode {
		border: 1px #dcdee2 dotted;
		margin-top: 30px;
		padding: 30px;

		.top {
			display: flex;
			justify-content: space-around;
			align-items: center;
		}

		.bottom {
			display: flex;
			flex-direction: column;
			justify-content: center;
			align-items: center;

			.tips {
				margin-top: 20px;
			}
		}
	}
}

</style>