'use client';
import { getTxtNode } from '@/lib/nodeUtil';
import { RefObject, useEffect, useRef, useState } from 'react';
import { Prism as SyntaxHighlighter } from 'react-syntax-highlighter';
import * as fabric from 'fabric'; // v6
import WebrtcNav from '@/components/WebrtcNav';
let audioContext;
let analyser: AnalyserNode;
let canvasContext: CanvasRenderingContext2D;
let canvas: HTMLCanvasElement;
let stream;

// 初始化 Canvas
function setupCanvas(canvasRef: RefObject<HTMLCanvasElement>) {
	canvas = canvasRef.current;
	// canvas = document.getElementById('waveform') as HTMLCanvasElement;
	canvasContext = canvas.getContext('2d')!;

	// 实际分辨率（避免模糊）
	const width = canvas.clientWidth;
	const height = canvas.clientHeight;
	canvas.width = width;
	canvas.height = height;
}
function drawSpectrum(dataArray: Uint8Array<ArrayBuffer>) {
	analyser.getByteFrequencyData(dataArray);

	canvasContext.fillStyle = 'rgb(240, 240, 240)';
	canvasContext.fillRect(0, 0, canvas.width, canvas.height);

	const barWidth = (canvas.width / dataArray.length) * 2.5;
	let x = 0;

	for (let i = 0; i < dataArray.length; i++) {
		const barHeight = dataArray[i];
		canvasContext.fillStyle = `rgb(0, ${barHeight}, 255)`;
		canvasContext.fillRect(x, canvas.height - barHeight, barWidth, barHeight);
		x += barWidth + 1;
	}
}
// 绘制波形
function drawWaveform(dataArray: string | any[] | Uint8Array<any>) {
	canvasContext.clearRect(0, 0, canvas.width, canvas.height);
	canvasContext.lineWidth = 2;
	canvasContext.strokeStyle = 'rgb(0, 128, 255)';
	canvasContext.beginPath();

	const sliceWidth = canvas.width / dataArray.length;
	let x = 0;

	for (let i = 0; i < dataArray.length; i++) {
		const v = dataArray[i] / 128.0;
		const y = (v * canvas.height) / 2;

		if (i === 0) {
			canvasContext.moveTo(x, y);
		} else {
			canvasContext.lineTo(x, y);
		}

		x += sliceWidth;
	}

	canvasContext.lineTo(canvas.width, canvas.height / 2);
	canvasContext.stroke();
}

// 开始采集音频
async function startAudioCapture(
	audioStream: MediaStream,
	isPlay: boolean = false,
) {
	try {
		// 1. 获取麦克风流（WebRTC）
		stream = audioStream;

		// 2. 创建 AudioContext（必须在用户交互中创建）
		audioContext = new AudioContext();

		// 3. 创建分析器
		analyser = audioContext.createAnalyser();
		analyser.fftSize = 2048;
		const bufferLength = analyser.frequencyBinCount;
		const dataArray = new Uint8Array(bufferLength);

		// 4. 创建音频源（来自麦克风流）
		const source = audioContext.createMediaStreamSource(stream);
		source.connect(analyser);

		// 5. 播放音频（连接到扬声器）
		isPlay && analyser.connect(audioContext.destination);

		// 6. 开始可视化循环
		function draw() {
			requestAnimationFrame(draw);
			analyser.getByteTimeDomainData(dataArray); // 获取波形数据
			// drawWaveform(dataArray);
			drawSpectrum(dataArray);
		}
		draw();

		console.log('✅ 音频采集和播放已启动');
	} catch (err) {
		console.error('❌ 无法访问麦克风:', err);
		// alert('无法访问麦克风: ' + err.message);
	}
}

// 页面加载
const plainOptions = ['video', 'audio'];
const defaultCheckedList = ['video', 'audio'];
const mediaStreamContrains = {
	video: {
		frameRate: { min: 20 },
		width: { min: 640, ideal: 1280 },
		height: { min: 360, ideal: 720 },
		aspectRatio: 16 / 9,

		facingMode: 'user',
		zoom: { ideal: 1.5 },
		brightness: { ideal: 100 },
		echoCancellation: true,
		noiseSuppression: true,
		voiceIsolation: true,
	},
	audio: {
		echoCancellation: true,
		noiseSuppression: true,
		autoGainControl: true,

		sampleRate: 48000,
		channelCount: 1,
	},
};
const BasicPage = () => {
	const vidioRef: RefObject<null | HTMLVideoElement> = useRef(null);
	const canvasRef: RefObject<null | HTMLCanvasElement> = useRef(null);
	const canvasSaveRef: RefObject<null | HTMLCanvasElement> = useRef(null);
	const wrapRef: RefObject<null | HTMLDivElement> = useRef(null);
	const [audios, setAudios] = useState<MediaDeviceInfo[]>([]);
	const [curVideo, setCurVideo] = useState<any>(null);
	const [wh, setWh] = useState([0, 0]);

	const [checkedList, setCheckedList] = useState<string[]>(defaultCheckedList);

	const handlePlay = () => {
		const media: any = {};
		checkedList.forEach((item: string) => {
			media[item] =
				mediaStreamContrains[item as keyof typeof mediaStreamContrains];
		});
		navigator.mediaDevices
			.getUserMedia(media)
			.then((stream) => {
				console.log(stream);

				// setCurVideo(JSON.stringify(stream));
				getAudioDevices();
				if (vidioRef.current) {
					vidioRef.current.srcObject = stream;
				}
				startAudioCapture(stream, !checkedList.includes('video'));
			})
			.catch(() => {
				alert('请允许访问您的媒体设备');
			});
	};
	const handleStop = () => {
		if (vidioRef.current && vidioRef.current.srcObject) {
			const tracks = (vidioRef.current.srcObject as MediaStream).getTracks();
			tracks.forEach((track) => {
				track.stop();
			});
		}
	};
	const getAudioDevices = () => {
		return navigator.mediaDevices.enumerateDevices().then((devices) => {
			setAudios(devices);
			return devices.filter((device) => device.kind === 'videoinput');
		});
	};

	const handleSave = () => {
		if (canvasSaveRef.current) {
			const canvas = new fabric.Canvas(canvasSaveRef.current);
			// const rect = new fabric.Rect({
			// 	left: 0,
			// 	top: 0,
			// 	fill: 'red',
			// 	width: 150,
			// 	height: 150
			// });
			// canvas.add(rect);
			// canvas.renderAll();
			// console.log(canvas);
			// console.log(canvasSaveRef.current)
			const video2 = new fabric.FabricImage(vidioRef.current!, {
				// left: 70,
				// top: 200,
				// left: 0,
				// top: 0,
				width: canvasSaveRef.current.clientWidth,
				height: canvasSaveRef.current.clientHeight,
				angle: 15,
				// originX: 'center',
				// originY: 'center',
				objectCaching: true,
				// scaleX: 0.5,
				// scaleY: 0.5
			});
			console.log(video2);
			canvas.add(video2);
			// canvas.renderAll();
			fabric.util.requestAnimFrame(function render() {
				canvas.renderAll();
				fabric.util.requestAnimFrame(render);
			});
			// canvasSaveRef.current.getContext('2d')!.drawImage(vidioRef.current!, 0, 0, canvas.width, canvas.height);
		}
	};
	useEffect(() => {
		if (wrapRef.current) {
			const tmp = [
				wrapRef.current.clientWidth - 10,
				~~(((wrapRef.current.clientWidth - 10) * 9) / 16),
			];
			mediaStreamContrains.video.width.min = tmp[0];
			mediaStreamContrains.video.height.min = tmp[1];
			mediaStreamContrains.video.width.ideal = tmp[0];
			mediaStreamContrains.video.height.ideal = tmp[1];
			console.log(tmp);
			setWh(tmp);
		}
		getAudioDevices();
		setupCanvas(canvasRef as RefObject<HTMLCanvasElement>);
	}, []);
	return (
		<div>
			{/* <h1 className='border-b-1 border-gray-300 leading-normal m-0 p-3 pl-10'>Basic Page</h1> */}
			<WebrtcNav text={'Basic Page'}></WebrtcNav>
			<div className="flex">
				<div className="flex-1" ref={wrapRef}>
					<div className="shadow-sm">
						<video
							className="border-1 rounded-sm mb-2"
							ref={vidioRef}
							autoPlay
							controls
							muted
							playsInline
							width={wh[0]}
							height={wh[1]}
						></video>
						<div className="flex  space-x-2 md:space-x-4 items-center">
							{/* 自定义复选框组 */}
							<div className="flex space-x-4">
								{plainOptions.map((option) => (
									<label key={option} className="inline-flex items-center">
										<input
											type="checkbox"
											checked={checkedList.includes(option)}
											onChange={(e) => {
												if (e.target.checked) {
													setCheckedList([...checkedList, option]);
												} else {
													setCheckedList(checkedList.filter((item) => item !== option));
												}
											}}
											className="form-checkbox h-4 w-4 text-blue-600"
										/>
										<span className="ml-2 text-gray-700">{option}</span>
									</label>
								))}
							</div>

							<button onClick={handlePlay} className="px-4 py-2 bg-blue-500 text-white rounded hover:bg-blue-600 transition-colors">开始录制</button>
							<button onClick={handleStop} className="px-4 py-2 bg-red-500 text-white rounded hover:bg-red-600 transition-colors">停止录制</button>
							<button onClick={handleSave} className="px-4 py-2 bg-green-500 text-white rounded hover:bg-green-600 transition-colors">保存</button>
						</div>
						<div className="my-2">
							<canvas
								className="border-1 rounded-sm bg-gray-400"
								ref={canvasRef}
								width={wh[0]}
								height={wh[1]}
							></canvas>
						</div>
						<div className="mb-2 border-1 rounded-sm z-10 bg-gray-400">
							<canvas
								className=""
								ref={canvasSaveRef}
								width={wh[0]}
								height={wh[1]}
							></canvas>
						</div>

						<div className="m-2">
							<SyntaxHighlighter language="json">
								{JSON.stringify(curVideo, null, 2)}
							</SyntaxHighlighter>
						</div>
					</div>
				</div>
				<div className="flex-1 sm:block hidden">
					<div className="m-2 p-2 shadow-sm">
						<button onClick={getAudioDevices} className="px-4 py-2 bg-blue-500 text-white rounded hover:bg-blue-600 transition-colors mb-4">获得音频设置</button>
						<ul>
							{audios.map((device, index) => (
								<li
									key={`${device.deviceId}-${index}`}
									// className="border-b-1 border-gray-300 mt-2 p-2"
								>
									<SyntaxHighlighter language="json">
										{JSON.stringify(device, null, 2)}
									</SyntaxHighlighter>
									{/* {Object.keys(device).map((key) => (
										<span key={key}>
                      {key}: {String(device[key as keyof MediaDeviceInfo])}
										</span>
									))} */}
								</li>
							))}
						</ul>
					</div>
				</div>
			</div>
		</div>
	);
};

export default BasicPage;
