import React from 'react';
import PropTypes from 'prop-types';
import ReactTooltip from 'react-tooltip';
import classnames from 'classnames';
import Spinner from 'react-spinner';
import clipboardCopy from 'clipboard-copy';
import hark from 'hark';
import * as faceapi from 'face-api.js';
import Logger from '../Logger';
import * as appPropTypes from './appPropTypes';
import Countdown from './Countdown';
import RoomMain from './RoomMain';
import dayjs from 'dayjs';

const logger = new Logger('PeerView');

const tinyFaceDetectorOptions = new faceapi.TinyFaceDetectorOptions({
	inputSize: 160,
	scoreThreshold: 0.5,
});

export default class PeerView extends React.Component {
	constructor(props) {
		super(props);

		const urlParams = new URLSearchParams(window.location.search);
		const isMaster = urlParams.get('master') === '1';

		this.state = {
			audioVolume: 0, // Integer from 0 to 10.,
			showInfo: window.SHOW_INFO || false,
			videoResolutionWidth: null,
			videoResolutionHeight: null,
			videoCanPlay: false,
			videoElemPaused: false,
			maxSpatialLayer: null,
			countdownStarted: false,
			countdownTime: null,
			recordData: null, // Add a new state property to store fetched data
			isMaster, // Set isMaster based on the query parameter
		};

		this._audioElemRef = React.createRef();
		this._videoElemRef = React.createRef();
		this._canvasElemRef = React.createRef();

		// Latest received video track.
		// @type {MediaStreamTrack}
		this._audioTrack = null;

		// Latest received video track.
		// @type {MediaStreamTrack}
		this._videoTrack = null;

		// Hark instance.
		// @type {Object}
		this._hark = null;

		// Periodic timer for reading video resolution.
		this._videoResolutionPeriodicTimer = null;

		// requestAnimationFrame for face detection.
		this._faceDetectionRequestAnimationFrame = null;
	}

	render() {
		const {
			isMe,
			audioMuted,
			videoVisible,
			videoScore,
		} = this.props;

		const {
			audioVolume,
			videoElemPaused,
			isMaster // Use isMaster from state
		} = this.state;

		return (
			<div data-component="PeerView" style={{ backgroundColor: !isMaster ? '#F4EADE' : 'white' }}>
				<div data-component="Room">
					<RoomMain countdownStarted={this.state.countdownStarted} countdownTime={this.state.countdownTime} />
				</div>

				<audio
					ref={this._audioElemRef}
					autoPlay
					muted={isMe || audioMuted}
					controls={false}
				/>

				{/* <canvas
					ref={this._canvasElemRef}
					className={classnames('face-detection', { 'is-me': isMe })}
				/> */}

				<div className="volume-container">
					<div className={classnames('bar', `level${audioVolume}`)} />
				</div>

				{videoVisible && videoScore < 5 && (
					<div className="spinner-container">
						<Spinner />
					</div>
				)}

				{videoElemPaused && <div className="video-elem-paused" />}
			</div>
		);
	}

	componentDidMount() {
		const { audioTrack, videoTrack, peer } = this.props;

		// 检查是否是触发倒计时的用户
		const urlParams = new URLSearchParams(window.location.search);
		const isCountdownTrigger = urlParams.has('countdown');

		if (isCountdownTrigger && peer.connected) {
			// 通知其他用户开始倒计时（这部分需要通过您的信令系统实现）
			// 例如：room.notify('startCountdown') 或类似的方法
			this.setState({ countdownStarted: true });
		}

		this._setTracks(audioTrack, videoTrack);

		this.setAudioVolume(0.2);
	}

	componentWillUnmount() {
		if (this._hark) this._hark.stop();

		clearInterval(this._videoResolutionPeriodicTimer);
		cancelAnimationFrame(this._faceDetectionRequestAnimationFrame);

		const videoElem = this._videoElemRef.current;

		if (videoElem) {
			videoElem.oncanplay = null;
			videoElem.onplay = null;
			videoElem.onpause = null;
		}
	}

	UNSAFE_componentWillUpdate(nextProps) {
		const { isMe, audioTrack, videoTrack, videoRtpParameters } = this.props;
		const { maxSpatialLayer } = this.state;

		if (isMe && videoRtpParameters && maxSpatialLayer === null) {
			this.setState({
				maxSpatialLayer: videoRtpParameters.encodings.length - 1,
			});
		} else if (isMe && !videoRtpParameters && maxSpatialLayer !== null) {
			this.setState({ maxSpatialLayer: null });
		}

		// 监听触发用户的连接状态
		const urlParams = new URLSearchParams(window.location.search);
		const isCountdownTrigger = urlParams.has('countdown');
		
		if (isCountdownTrigger && 
			!this.props.peer.connected && 
			nextProps.peer.connected) {
			// 当触发用户连接成功时，开始倒计时
			this.setState({ countdownStarted: true });
			// 通知其他用户开始倒计时
		}

		this._setTracks(audioTrack, videoTrack);
	}

	componentDidUpdate(prevProps) {
		const { isOnline } = this.props;

	}

	startCountdown(seconds) {
		// Convert seconds to mm:ss format
		const formattedTime = dayjs().startOf('day').second(seconds).format('mm:ss');
		console.log('Countdown time:', formattedTime);

		// Start the countdown with the calculated time
		this.setState({ countdownTime: formattedTime });
	}

	_setTracks(audioTrack, videoTrack) {
		const { faceDetection } = this.props;

		if (this._audioTrack === audioTrack && this._videoTrack === videoTrack)
			return;

		this._audioTrack = audioTrack;
		this._videoTrack = videoTrack;

		if (this._hark) this._hark.stop();

		this._stopVideoResolution();

		if (faceDetection) this._stopFaceDetection();

		const audioElem = this._audioElemRef.current;

		if (audioTrack) {
			const stream = new MediaStream();

			stream.addTrack(audioTrack);
			audioElem.srcObject = stream;

			audioElem
				.play()
				.catch(error => logger.warn('audioElem.play() failed:%o', error));

			this._runHark(stream);
		} else {
			audioElem.srcObject = null;
		}
	}

	_runHark(stream) {
		if (!stream.getAudioTracks()[0])
			throw new Error('_runHark() | given stream has no audio track');

		this._hark = hark(stream, { play: false });

		// eslint-disable-next-line no-unused-vars
		this._hark.on('volume_change', (dBs, threshold) => {
			// The exact formula to convert from dBs (-100..0) to linear (0..1) is:
			//   Math.pow(10, dBs / 20)
			// However it does not produce a visually useful output, so let exagerate
			// it a bit. Also, let convert it from 0..1 to 0..10 and avoid value 1 to
			// minimize component renderings.
			let audioVolume = Math.round(Math.pow(10, dBs / 85) * 10);

			if (audioVolume === 1) audioVolume = 0;

			if (audioVolume !== this.state.audioVolume)
				this.setState({ audioVolume });
		});
	}

	_startVideoResolution() {
		this._videoResolutionPeriodicTimer = setInterval(() => {
			const { videoResolutionWidth, videoResolutionHeight } = this.state;
			const videoElem = this._videoElemRef.current;

			if (
				videoElem.videoWidth !== videoResolutionWidth ||
				videoElem.videoHeight !== videoResolutionHeight
			) {
				this.setState({
					videoResolutionWidth: videoElem.videoWidth,
					videoResolutionHeight: videoElem.videoHeight,
				});
			}
		}, 500);
	}

	_stopVideoResolution() {
		clearInterval(this._videoResolutionPeriodicTimer);

		this.setState({
			videoResolutionWidth: null,
			videoResolutionHeight: null,
		});
	}

	_startFaceDetection() {
		const videoElem = this._videoElemRef.current;
		const canvasElem = this._canvasElemRef.current;

		const step = async () => {
			// NOTE: Somehow this is critical. Otherwise the Promise returned by
			// faceapi.detectSingleFace() never resolves or rejects.
			if (!this._videoTrack || videoElem.readyState < 2) {
				this._faceDetectionRequestAnimationFrame = requestAnimationFrame(step);

				return;
			}

			const detection = await faceapi.detectSingleFace(
				videoElem,
				tinyFaceDetectorOptions
			);

			if (detection) {
				const width = videoElem.offsetWidth;
				const height = videoElem.offsetHeight;

				canvasElem.width = width;
				canvasElem.height = height;

				// const resizedDetection = detection.forSize(width, height);
				const resizedDetections = faceapi.resizeResults(detection, {
					width,
					height,
				});

				faceapi.draw.drawDetections(canvasElem, resizedDetections);
			} else {
				// Trick to hide the canvas rectangle.
				canvasElem.width = 0;
				canvasElem.height = 0;
			}

			this._faceDetectionRequestAnimationFrame = requestAnimationFrame(() =>
				setTimeout(step, 100)
			);
		};

		step();
	}

	_stopFaceDetection() {
		cancelAnimationFrame(this._faceDetectionRequestAnimationFrame);

		const canvasElem = this._canvasElemRef.current;

		canvasElem.width = 0;
		canvasElem.height = 0;
	}

	_printProducerScore(id, score) {
		const scores = Array.isArray(score) ? score : [score];

		return (
			<React.Fragment key={id}>
				<p>streams:</p>

				{scores
					.filter(v => v)
					.sort((a, b) => {
						if (a.rid) return a.rid > b.rid ? 1 : -1;
						else return a.ssrc > b.ssrc ? 1 : -1;
					})
					.map(
						(
							{ ssrc, rid, score },
							idx // eslint-disable-line no-shadow
						) => (
							<p key={idx} className="indent">
								{rid !== undefined
									? `rid:${rid}, ssrc:${ssrc}, score:${score}`
									: `ssrc:${ssrc}, score:${score}`}
							</p>
						)
					)}
			</React.Fragment>
		);
	}

	_printConsumerScore(id, score) {
		return (
			<p key={id}>
				{`score:${score.score}, producerScore:${score.producerScore}, producerScores:[${score.producerScores}]`}
			</p>
		);
	}

	handleRecordData(data) {
		const closeTime = data.closeTime; // Assuming 'closeTime' is the field name
		const currentTime = dayjs();
		const countdownTime = dayjs(closeTime).diff(currentTime, 'second');

		if (countdownTime > 600) { // 600 seconds = 10 minutes
			alert('The event is more than 10 minutes away. Please check back later.');
		} else if (countdownTime > 0) {
			this.setState({ countdownStarted: true });
			this.startCountdown(countdownTime);
		}
	}

	// updateAudioVolume(volume = 0.5) {
	// 	console.log('updateAudioVolume', volume)
	// 	const audioElem = this._audioElemRef.current;
	// 	if (audioElem) {
	// 		audioElem.volume = volume;
	// 	} else {
	// 		console.warn('Audio element not found'); // 调试日志
	// 	}
	// }

	setAudioVolume(volume) {
		console.log('Setting all audio volumes to', volume);
		const audioElements = document.querySelectorAll('audio');
		audioElements.forEach(audioElem => {
			audioElem.volume = volume;
		});
	}
}

PeerView.propTypes = {
	isMe: PropTypes.bool,
	peer: PropTypes.oneOfType([appPropTypes.Me, appPropTypes.Peer]).isRequired,
	audioProducerId: PropTypes.string,
	videoProducerId: PropTypes.string,
	audioConsumerId: PropTypes.string,
	videoConsumerId: PropTypes.string,
	audioRtpParameters: PropTypes.object,
	videoRtpParameters: PropTypes.object,
	consumerSpatialLayers: PropTypes.number,
	consumerTemporalLayers: PropTypes.number,
	consumerCurrentSpatialLayer: PropTypes.number,
	consumerCurrentTemporalLayer: PropTypes.number,
	consumerPreferredSpatialLayer: PropTypes.number,
	consumerPreferredTemporalLayer: PropTypes.number,
	consumerPriority: PropTypes.number,
	audioTrack: PropTypes.any,
	videoTrack: PropTypes.any,
	audioMuted: PropTypes.bool,
	videoVisible: PropTypes.bool.isRequired,
	videoMultiLayer: PropTypes.bool,
	audioCodec: PropTypes.string,
	videoCodec: PropTypes.string,
	audioScore: PropTypes.any,
	videoScore: PropTypes.any,
	faceDetection: PropTypes.bool.isRequired,
	onChangeDisplayName: PropTypes.func,
	onChangeMaxSendingSpatialLayer: PropTypes.func,
	onChangeVideoPreferredLayers: PropTypes.func,
	onChangeVideoPriority: PropTypes.func,
	onRequestKeyFrame: PropTypes.func,
	onStatsClick: PropTypes.func.isRequired,
	isOnline: PropTypes.bool,
};
