import React, { useEffect } from 'react';
import DescMask from '@/components/DescMask';
import AudioMedia from '@/components/AudioMedia';
import styled from '@emotion/styled';
import useSyncState from '../hooks/useSyncState';
import VideoTool, { ToolPrpos } from '../VideoContainer/VideoTool';
import { cancelFullScreen, fullscreenEnabled, launchFullScreen } from '@/utils';
import Message from '@/utils/message';
import { useTranslation } from 'react-i18next';

interface VideoProps {
	attributes: any;
	audioOnly: boolean;
	streamId: string;
	irtcClient: any;
	setRenderStream: (stream: any) => void;
	renderStream: any[];
	stream: any;
}

const Video = styled.video`
	width: 100%;
	height: 100%;
	pointer-events: none;
	background-color: #000000;
	/*video隐藏全屏按钮*/
	::-webkit-media-controls-fullscreen-button {
		display: none !important;
	}
	/*video隐藏音量控制*/
	::-webkit-media-controls-volume-control-container {
		display: none !important;
	}
	::-webkit-media-controls-start-playback-button {
		display: none;
	}
`;

const index = (props: VideoProps) => {
	const { t } = useTranslation();
	const { attributes, audioOnly, streamId, irtcClient, setRenderStream, renderStream, stream } = props;
	const [audioLevel, setAudioLevel] = useSyncState<number>(0);
	const [isMuteAudio, setIsMuteAudio] = useSyncState<boolean>(false);
	const [audioContent, setAudioContent] = useSyncState<any>('');
	const [tools, setTools] = useSyncState<ToolPrpos[]>([
		{
			id: 1,
			icon: 'icon-widget-shengyinkai',
			text: '',
			isActive: false,
			activeIcon: 'icon-widget-shengyinguan',
			mode: 'unmute'
		},
		{
			id: 2,
			icon: 'icon-widget-quanping3',
			text: '',
			isActive: false,
			activeIcon: 'icon-widget-suoxiao',
			mode: 'scale'
		}
	]);

	useEffect(() => {
		onScreenChange();
		return () => {
			audioContent() && audioContent().close();
		};
	}, []);

	useEffect(() => {
		if (renderStream.find((ele: any) => ele.id === streamId)) {
			renderStream.forEach((item: any) => {
				if (item.id === streamId) {
					let video = document.getElementById(streamId) as HTMLVideoElement;
					video!.srcObject = item.mediaStream;
					video!.play();
					getAudioLevelFunction();
				}
			});
			return;
		}
		substream(stream);
	}, [streamId]);

	const videoToolClick = (data: ToolPrpos) => {
		console.log(data, 'ToolPrpos', streamId);
		switch (data.mode) {
			case 'unmute':
				setMute(data);
				break;
			case 'scale':
				setScreenScale(data);
				break;
			default:
				break;
		}
		const list = [...tools()];
		list.forEach((item: ToolPrpos) => {
			if (item.id === data.id) {
				item.isActive = !item.isActive;
			}
		});
		setTools([...list]);
	};

	const setMute = (data: ToolPrpos) => {
		const video: HTMLVideoElement = document.getElementById(streamId) as HTMLVideoElement;
		video.volume = data.isActive ? 1 : 0;
		setIsMuteAudio(video.volume === 1 ? false : true);
	};

	const onFullscreenchange = () => {
		if (checkIsFullScreen()) {
			let list = [...tools()];
			list.forEach((item: ToolPrpos) => {
				if (item.mode === 'scale') {
					item.isActive = true;
				}
			});
			setTools([...list]);
		} else {
			let list = [...tools()];
			list.forEach((item: ToolPrpos) => {
				if (item.mode === 'scale') {
					item.isActive = false;
				}
			});
			setTools([...list]);
		}
	};

	const checkIsFullScreen = () => {
		const isFullScreen = document.fullscreen || document.mozFullScreen || document.webkitIsFullScreen;
		return isFullScreen == undefined ? false : isFullScreen;
	};

	// 监听全屏或退出全屏
	const onScreenChange = () => {
		document.getElementById(`video-box-${streamId}`)!.addEventListener('fullscreenchange', function () {
			onFullscreenchange();
		});

		/* Firefox */
		document.getElementById(`video-box-${streamId}`)!.addEventListener('mozfullscreenchange', function () {
			onFullscreenchange();
		});

		/* Chrome, Safari 和 Opera */
		document.getElementById(`video-box-${streamId}`)!.addEventListener('webkitfullscreenchange', function () {
			onFullscreenchange();
		});

		/* IE / Edge */
		document.getElementById(`video-box-${streamId}`)!.addEventListener('msfullscreenchange', function () {
			onFullscreenchange();
		});
		document.getElementById(`video-box-${streamId}`)!.addEventListener('webkitbeginfullscreen', () => {
			const list = [...tools()];
			list.forEach((item: ToolPrpos) => {
				if (item.mode === 'scale') {
					item.isActive = true;
				}
			});
			setTools([...list]);
		});
		document.getElementById(`video-box-${streamId}`)!.addEventListener('webkitendfullscreen', () => {
			const list = [...tools()];
			list.forEach((item: ToolPrpos) => {
				if (item.mode === 'scale') {
					item.isActive = false;
				}
			});
			setTools([...list]);
		});
	};

	const setScreenScale = (data: ToolPrpos) => {
		let ele = document.getElementById(`video-box-${streamId}`);
		if (fullscreenEnabled() && ele) {
			if (data.isActive) {
				cancelFullScreen();
			} else {
				launchFullScreen(ele);
			}
		}
	};

	const getIsJsonStringfy = (userInfo: any) => {
		if (userInfo && typeof userInfo === 'string') {
			return JSON.parse(userInfo);
		} else {
			return userInfo;
		}
	};

	const getStreamName = (attributes: any) => {
		const client = t('client');
		const agent = t('agent');
		if (attributes && attributes.type && attributes.type.indexOf('agent') > -1) {
			return `${agent}<${getIsJsonStringfy(attributes.userInfo).userName}>`;
		} else if (attributes && attributes.type && attributes.type.indexOf('customer') > -1) {
			return `${client}<${getIsJsonStringfy(attributes.userInfo).userName}>`;
		} else {
			return `${agent}<${getIsJsonStringfy(attributes.userInfo).userName}>`;
		}
	};

	/*订阅当前房间里面的流*/
	const substream = (stream: any) => {
		console.log('开始订阅', stream, stream.settings);
		//判断流的source
		let audio = true;
		let video = true;
		if (!stream.settings.audio) {
			audio = false;
		}
		if (!stream.settings.video) {
			video = false;
		}
		let options = {
			audio: audio,
			video: video
		};
		console.log(irtcClient, options, stream);
		irtcClient.subscribe(stream, options).then(
			(subscription: any) => {
				console.log(subscription, '订阅成功', stream.id, stream);
				setRenderStream([...renderStream, stream]);
				let video = document.getElementById(stream.id) as HTMLVideoElement;
				video!.srcObject = stream.mediaStream;
				getAudioLevelFunction();
			},
			(err: any) => {
				const subscription = t('subscription');
				const failed = t('failed');
				Message.error(`${subscription}${stream.id}${failed}`);
				console.log('subscribe failed');
				console.log(`subscribe ${stream.id} failed: ${err}`);
			}
		);
	};

	const getAudioLevelFunction = () => {
		getAudioLevel(
			streamId,
			success => {
				// console.log('getAudioLevel:', success);
			},
			error => {
				// console.log('getAudioLevel:', error);
			},
			volume => {
				// console.log('AudioLevel:', streamId, volume);
				setAudioLevel(volume);
			}
		);
	};

	/**
	 * 获取流音量大小，根据audio
	 * @param {string} videoId audio&video标签
	 * @param {function} onSuccess 请求成功
	 * @param {function} onFailure {code,message} '500':'videoId不存在元素','501':'videoId元素无srcObject'，‘502’:'缺少参数，onSuccess或者onFailure或者volume'
	 * @param {function} volume  返回音量大小 (0-100);
	 */
	const getAudioLevel = (videoId: string, onSuccess: (success: any) => void, onFailure: (error: { code: string; message: string }) => void, volume: (value: number) => void) => {
		let videoElement = document.getElementById(videoId) as HTMLVideoElement;
		if (!videoElement) {
			console.error(`${videoId} dom not exist`);
			onFailure({ code: '500', message: `${videoId} dom not exist` });
			return;
		}
		if (!videoElement.srcObject) {
			console.error(`${videoId} dom srcObject is null`);
			onFailure({ code: '501', message: `${videoId} dom srcObject is null` });
			return;
		}
		if (!onSuccess || !onFailure || !volume) {
			console.error(`onSuccess or onFailure, or cb, is undefined`);
			onFailure({ code: '502', message: `onSuccess or onFailure, or cb, is undefined` });
			return;
		}
		// Try to fix : DOMException: Failed to execute 'createMediaStreamSource' on 'AudioContext': MediaStream has no audio track
		try {
			// const audioContext;
			let audioContext = new (window.AudioContext || window.webkitAudioContext)();
			// 将麦克风的声音输入这个对象
			const mediaStreamSource = audioContext.createMediaStreamSource(videoElement.srcObject as MediaStream);
			// 创建一个音频分析对象，采样的缓冲区大小为4096，输入和输出都是单声道
			const scriptProcessor = audioContext.createScriptProcessor(4096, 1, 1);
			// 将该分析对象与麦克风音频进行连接
			mediaStreamSource.connect(scriptProcessor);
			// Returns an AudioDestinationNode representing the final destination of all audio in the context. It can be thought of as the audio-rendering device.
			// https://developer.mozilla.org/zh-CN/docs/Web/API/BaseAudioContext
			scriptProcessor.connect(audioContext.destination);
			// 开始处理音频
			scriptProcessor.onaudioprocess = (e: any) => {
				// 获得缓冲区的输入音频，转换为包含了PCM通道数据的32位浮点数组
				let buffer = e && e.inputBuffer && e.inputBuffer.getChannelData(0);
				// 获取缓冲区中最大的音量值
				let maxVal = Math.max.apply(Math, buffer);
				maxVal ? volume(Math.round(maxVal * 100)) : volume(0);
				onSuccess('ok');
				setAudioContent(audioContext);
			};
		} catch (error) {
			console.error('get audio level error : ', error);
		}
	};

	return (
		<>
			<DescMask desc={getStreamName(attributes)}></DescMask>
			<Video x5-video-player-type="h5" controlsList="nodownload nofullscreen" playsInline autoPlay id={streamId}></Video>
			{audioOnly ? (
				<AudioMedia
					classname="audio"
					iconClassName={checkIsFullScreen() ? 'big-icon' : 'small-icon'}
					iconWidth={checkIsFullScreen() ? '64px' : '32px'}
					iconHeight={checkIsFullScreen() ? '64px' : '32px'}
					audioLevel={audioLevel()}
					isMuteAudio={isMuteAudio()}
				/>
			) : undefined}
			<VideoTool tools={tools()} onclick={videoToolClick}></VideoTool>
		</>
	);
};

export default index;
