<template>
	<view>
		<button @click="startRecording">开始录音</button>
		<button @click="stopRecording">停止录音</button>
		<canvas canvas-id="gauge" style="width: 300px; height: 300px;"></canvas>
	</view>
</template>

<script>
	import FFT from 'fft.js';

	function analyzeFrequency(buffer) {
		const fftSize = 128; // 与 frameSize 一致
		const sampleRate = 44100;

		const fft = new FFT(fftSize);

		const spectrum = fft.createComplexArray();
		fft.realTransform(spectrum, buffer);

		const magnitudes = new Float32Array(fftSize / 2);
		for (let i = 0; i < magnitudes.length; i++) {
			magnitudes[i] = Math.sqrt(spectrum[2 * i] ** 2 + spectrum[2 * i + 1] ** 2);
		}

		let maxIndex = 0;
		let maxValue = -Infinity;
		for (let i = 0; i < magnitudes.length; i++) {
			if (magnitudes[i] > maxValue) {
				maxValue = magnitudes[i];
				maxIndex = i;
			}
		}

		const frequency = maxIndex * (sampleRate / fftSize);
		console.log('Detected Frequency (Hz):', frequency);
		const note = frequencyToNote(frequency);
		console.log('Corresponding Note:', note);
	}

	function frequencyToNote(frequency) {
		const A4 = 440;
		const notes = ['A', 'A#', 'B', 'C', 'C#', 'D', 'D#', 'E', 'F', 'F#', 'G', 'G#'];

		const semitone = Math.round(12 * Math.log2(frequency / A4));
		const noteIndex = (semitone + 69) % 12;
		const octave = Math.floor((semitone + 69) / 12) - 1;

		return notes[noteIndex] + octave;
	}
	export default {
		data() {
			return {
				recorderManager: null,
				audioContext: null,
				detector: null,
			};
		},
		methods: {
			startRecording() {
				this.recorderManager = wx.getRecorderManager();
				this.recorderManager.onFrameRecorded(this.processAudioFrame);
				this.recorderManager.start({
					duration: 60000, // 最长录音时间
					sampleRate: 44100, // 采样率
					numberOfChannels: 1, // 单声道
					encodeBitRate: 192000, // 编码比特率
					format: 'pcm', // 音频格式
					frameSize: 128, // 帧大小
				});
			},

			stopRecording() {
				this.recorderManager.stop();
			},
			processAudioFrame(res) {
				console.log("processAudioFrame"+JSON.stringify(new Date()))
				const {
					frameBuffer
				} = res;
				const int16Array = new Int16Array(frameBuffer);
				const float32Array = new Float32Array(int16Array.length);
				for (let i = 0; i < int16Array.length; i++) {
					float32Array[i] = int16Array[i] / 32768.0; // 将 Int16 范围[-32768, 32767]映射到[-1.0, 1.0]
				}
				analyzeFrequency(float32Array);
			},
			drawGauge(pitch) {
				const ctx = wx.createCanvasContext('gauge');
				ctx.clearRect(0, 0, 300, 300);

				ctx.beginPath();
				ctx.arc(150, 150, 100, 0, 2 * Math.PI);
				ctx.stroke();

				const angle = (pitch / 1000) * 2 * Math.PI;
				ctx.beginPath();
				ctx.moveTo(150, 150);
				ctx.lineTo(150 + 100 * Math.cos(angle), 150 + 100 * Math.sin(angle));
				ctx.stroke();

				ctx.draw();
			}
		},
		mounted() {

		}
	};
</script>