/* eslint-disable no-mixed-spaces-and-tabs */
import Logger from '../base/logger';
import { addEventListener, dispatchEvent, removeEventListener, systemErrorCode, systemErrorName } from '../base/event';
import { deepCopy, isJsonString, removeObjectWithId } from './tool';
import { IvcsError } from '../base/error';
var IRTC_CUSTOMEREVENT = {
	PUBLICATIONEND: 'irtc_publicationend',
	PUBLICATIOUNMUTE: 'irtc_publicationunmute',
	PUBLICATIONMUTE: 'irtc_publicationmute',
	SUBSCRIPTIONEND: 'irtc_subscriptionend',
	SUBSCRIPTIONUNMUTE: 'irtc_subscriptionunmute',
	SUBSCRIPTIONMUTE: 'irtc_subscriptionmute',
	STREAMADD: 'irtc_streamadd',
	STREAMENDED: 'irtc_streamended',
	STREAMUPDATED: 'irtc_streamUpdated',
	PARTICIPANTADD: 'irtc_participantadd',
	PARTICIPANTLEFT: 'irtc_participantleft',
	CHATMESSAGE: 'irtc_chatmessage',
	SERVERDISCONNECTED: 'serverDisconnected',
	PUBLISHSUCCESS: 'publishSuccess',
	PUBLISHFAILED: 'publishFailed',
	SUBSCRIBESUCCESS: 'subscribeSuccess',
	SUBSCRIBEFAILED: 'subscribeFailed',
	LEAVE: 'leaveroom',
	RoomDeleted: 'roomDeleted',
	ASR: 'ASREVENT',
	ttsStreamSpeaking: 'tts_stream_speaking',
	ttsStreamSpeakDone: 'tts_stream_speakDone',
	subscribeStreamStatus: 'subscribe_stream_status',
	publishStreamStatus: 'publish_stream_status',
	remoteStreamStatus: 'remote_stream_status',
	activeAudioInputStream: 'active_audio_input_stream',
	// sessionRecordCrashEvent: 'session_record_crash_event',
	sessionOnError: 'session_on_error',
	sessionOnEnd: 'session_on_end',
};
import { ivcsErrorCodeEnum } from '../base/const';
import { publicationGetStats, subscriptionGetStats } from '../base/webrtcStats';
import { setSessionStreamQuality, setSesssionCallQuality, updateRecordStatus } from './api';
const IRtc = require('./irtc');

class IRtcClient {
	/**
	 * 构造函数，初始化
	 * @param {string} turnServerUrl turn server url
	 * @param {string} turnServerCredential  turn server password
	 * @param {string} turnServerUsername  turn server username
	 */
	constructor(turnServerUrl, turnServerCredential, turnServerUsername) {
		let { ConferenceClient } = IRtc.Conference;
		if (turnServerUrl) {
			let ccOptions = {
				rtcConfiguration: {
					iceServers: [
						{
							urls: [turnServerUrl],
							credential: turnServerCredential,
							username: turnServerUsername,
						},
					],
				},
			};
			this.irtcClient = new ConferenceClient(ccOptions);
		} else {
			this.irtcClient = new ConferenceClient();
		}
		this.subscriptionArr = [];
		this.participants = [];
		this.publicationArr = [];
		this.sessions = []; // 加入房间后监听房间的session变化，目前主要处理record录像的变化
		this.createStreams = [];
		this.remoteStreams = [];
		this.localRecordList = new Map();
		// 获取音量大小的context集合
		this.audioContextList = new Map();
		// 获取音量大小的定时器集合
		this.audioContextIntervalList = new Map();
		// irtc对应的session用户信息
		this.clientInfo = { clientId: '', userName: '', role: '' };
		// irtc对应的sessionId
		this.sessionId = '';
		// irtc对应的api请求的地址
		this.backendUrl = '';
		// irtc对应的api请求鉴权的token
		this.accessToken = '';
		// 计算开画时间，升降级下的video集合
		this.calcOpenTimeVideo = [];
	}

	/**
	 * 创建一个流
	 * @param {object} audioConstraints 音频约束参数
	 * @param {object} videoConstraints 视频约束参数
	 * @param {boolean} haveAudio 是否开启声音
	 * @param {boolean} haveVideo 是否开启视频
	 * @param {object} attributes 流的自定义属性参数，可用于业务方判断流的类型。如{type:"screen"}
	 * @returns IRtc.Base.LocalStream
	 */
	createCameraMicStream(audioConstraints, videoConstraints, haveAudio = true, haveVideo = true, attributes = {}) {
		let { StreamSourceInfo } = IRtc.Base;
		let videoSource = haveVideo ? 'camera' : undefined;
		let audioSource = haveAudio ? 'mic' : undefined;
		let streamSourceInfo = new StreamSourceInfo(audioSource, videoSource);
		let mediaStreamDeviceConstraints = {
			audio: audioConstraints,
			video: haveVideo ? videoConstraints : false,
		};
		Logger.info('ivcs-js-sdk:stream constraints is', mediaStreamDeviceConstraints);
		return this.createWebRtcMediaStream(mediaStreamDeviceConstraints, streamSourceInfo, attributes);
	}

	/**
	 * 创建屏幕共享流
	 * @param {object} audioConstraints 音频约束参数
	 * @param {object} videoConstraints 视频约束参数
	 * @param {object} attributes 流的自定义属性参数
	 * @returns IRtc.Base.LocalStream
	 */
	createScreenStream(audioConstraints, videoConstraints, attributes = {}) {
		let { StreamSourceInfo } = IRtc.Base;
		let videoSource = 'screen-cast';
		let audioSource = 'screen-cast';
		let streamSourceInfo = new StreamSourceInfo(audioSource, videoSource);
		let mediaStreamDeviceConstraints = {
			audio: audioConstraints,
			video: videoConstraints,
		};
		Logger.info('ivcs-js-sdk:screen stream constraints is', mediaStreamDeviceConstraints);
		return this.createWebRtcMediaStream(mediaStreamDeviceConstraints, streamSourceInfo, attributes);
	}

	/**
	 * 创建一个mediaStream
	 * @param {IRtc.Base.StreamConstraints} mediaStreamConstraints 根据指定的配置值创建MediaStream媒体流。如果你需要创建一个投屏的媒体流，请确保音频和视频都选用"screen-cast"作为他们的媒体源。
	 * @param {IRtc.Base.StreamSourceInfo} streamSourceInfo 音频源信息和视频源信息
	 * @param {object} attributes 媒体流的自定义属性
	 * @param {boolean} upgrade 是否允许升级，切换音视频的时候创建的流，需要设置成false。
	 * @param {number} maxRetries 最大尝试次数
	 * @returns Promise.<IRtc.Base.LocalStream, Error>
	 */
	createWebRtcMediaStream(mediaStreamConstraints, streamSourceInfo, attributes, upgrade = true, maxRetries = 3) {
		let { LocalStream, MediaStreamFactory } = IRtc.Base;
		function modifyConstraintsForRetries(constraints) {
			const predefinedResolutions = {
				vga: { width: 640, height: 480 },
				svga: { width: 800, height: 600 },
				xga: { width: 1024, height: 768 },
				r480x360: { width: 480, height: 360 },
				sif: { width: 320, height: 240 },
				hd720p: { width: 1280, height: 720 },
				hd1080p: { width: 1920, height: 1080 },
			};
			const targetWidths = [1920, 1280, 1024, 800, 640, 480, 320];

			if (constraints.video && constraints.video.resolution) {
				// 确保 resolution 是一个对象
				if (typeof constraints.video.resolution !== 'object') {
					throw new Error('Resolution must be an object.');
				}

				// 查找下一个较小的预定义分辨率
				let newResolution = null;
				for (let i = 0; i < targetWidths.length; i++) {
					const targetWidth = targetWidths[i];
					if (targetWidth < constraints.video.resolution.width) {
						const resolutionKey = Object.keys(predefinedResolutions).find(key => predefinedResolutions[key].width === targetWidth);
						if (resolutionKey) {
							newResolution = predefinedResolutions[resolutionKey];
							break; // 找到后立即退出循环
						}
					}
				}

				// 如果没有找到，则使用列表中的最小宽度对应的预定义分辨率
				if (!newResolution) {
					const smallestWidth = targetWidths[targetWidths.length - 1];
					const resolutionKey = Object.keys(predefinedResolutions).find(key => predefinedResolutions[key].width === smallestWidth);
					newResolution = predefinedResolutions[resolutionKey];
				}

				// 更新 constraints 对象
				constraints.video.resolution = newResolution;
			}
			return constraints;
		}

		// fix 不同平台下attributes value必须是字符串
		function convertAttributesToString(attr) {
			if (typeof attr !== 'string') {
				for (let key in attr) {
					if (typeof attr[key] === 'object') {
						attr[key] = JSON.stringify(attr[key]);
					}
				}
				return attr;
			}
			return attr;
		}
		return new Promise((resolve, reject) => {
			let retries = 0;
			let self = this;
			function attemptCreateStream() {
				MediaStreamFactory.createMediaStream(mediaStreamConstraints)
					.then(mediaStream => {
						let stream = new LocalStream(mediaStream, streamSourceInfo, attributes, upgrade);
						if (stream && stream.id) {
							self.createStreams.push(stream);
						}
						resolve(stream);
					})
					.catch(error => {
						let exceptionMsg = error.name || error.message || error;
						if (exceptionMsg.toLowerCase().indexOf('overconstrainederror') != -1 && retries < maxRetries) {
							// Modify mediaStreamConstraints and retry
							mediaStreamConstraints = modifyConstraintsForRetries(mediaStreamConstraints);
							retries++;
							Logger.warning('ivcs-js-sdk:attemptCreateStream retries', retries, mediaStreamConstraints);
							attemptCreateStream();
						} else {
							Logger.warning(`ivcs-js-sdk:create Local stream error,${error},${new Date().toISOString()},${exceptionMsg}`);
							let errorCode = ivcsErrorCodeEnum.ERROR_ZMS_CREATESTREAM_FAILED;
							if (exceptionMsg.toLowerCase().indexOf('aborterror') != -1) {
								errorCode = ivcsErrorCodeEnum.ERROR_ZMS_ABORTERROR;
							} else if (exceptionMsg.toLowerCase().indexOf('notallowederror') != -1) {
								errorCode = ivcsErrorCodeEnum.ERROR_ZMS_NOTALLOWEDERROR;
							} else if (exceptionMsg.toLowerCase().indexOf('notfounderror') != -1) {
								errorCode = ivcsErrorCodeEnum.ERROR_ZMS_NOTFOUNDERROR;
							} else if (exceptionMsg.toLowerCase().indexOf('notreadableerror') != -1) {
								errorCode = ivcsErrorCodeEnum.ERROR_ZMS_NOTREADABLEERROR;
							} else if (exceptionMsg.toLowerCase().indexOf('securityerror') != -1) {
								errorCode = ivcsErrorCodeEnum.ERROR_ZMS_SECURITYERROR;
							} else if (exceptionMsg.toLowerCase().indexOf('typeerror') != -1) {
								errorCode = ivcsErrorCodeEnum.ERROR_ZMS_TYPEERROR;
							} else if (exceptionMsg.includes('only support current tab')) {
								// 屏幕区域录制不支持非标签页的共享
								errorCode = ivcsErrorCodeEnum.ERROR_ZMS_SCREEN_NOT_SUPPORT_TAB;
							}
							let response = new IvcsError(errorCode, `create stream failed,${error}`);
							reject(response);
						}
					});
			}
			attributes = convertAttributesToString(attributes);
			attemptCreateStream();
		});
	}

	/**
	 * 创建一个语音流
	 * @param {object} mediaStreamConstraints  @param {IRtc.Base.StreamConstraints} mediaStreamConstraints 根据指定的配置值创建MediaStream媒体流
	 * @param {object} attributes 媒体流的自定义属性
	 * @param {boolean} upgrade 是否允许升级，切换音视频的时候创建的流，需要设置成false。
	 */
	createAudioMediaStream(mediaStreamConstraints, attributes, upgrade = false) {
		let { StreamSourceInfo } = IRtc.Base;
		let videoSource = undefined;
		let audioSource = 'mic';
		let streamSourceInfo = new StreamSourceInfo(audioSource, videoSource);
		return this.createWebRtcMediaStream(mediaStreamConstraints, streamSourceInfo, attributes, upgrade);
	}

	/**
	 * 创建一个视频流
	 * @param {object} mediaStreamConstraints  @param {IRtc.Base.StreamConstraints} mediaStreamConstraints 根据指定的配置值创建MediaStream媒体流
	 * @param {object} attributes 媒体流的自定义属性
	 * @param {boolean} upgrade 是否允许升级，切换音视频的时候创建的流，需要设置成false。
	 */
	createVideoMediaStream(mediaStreamConstraints, attributes, upgrade = false) {
		let { StreamSourceInfo } = IRtc.Base;
		let videoSource = 'camera';
		let audioSource = undefined;
		let streamSourceInfo = new StreamSourceInfo(audioSource, videoSource);
		return this.createWebRtcMediaStream(mediaStreamConstraints, streamSourceInfo, attributes, upgrade);
	}

	/**
	 * 加入房间
	 * @param {string} roomToken 加入房间的鉴权token
	 * @returns Promise.<object, Error>
	 */
	joinRoom(roomToken) {
		return new Promise((resolve, reject) => {
			var self = this;
			this.irtcClient
				.join(roomToken)
				.then(function (resp) {
					self.setRoomInfo(resp);
					self.clientAddListener();
					resolve(resp);
				})
				.catch(function (error) {
					reject(error);
				});
		});
	}

	/**
	 * 加入房间后，通过房间信息设置本地变量，如房间人数，已经存在的远端流等
	 * @param {object} data joinRoom返回的数据
	 */
	setRoomInfo(data) {
		this.participants = data.participants;
		this.self = data.self;
		this.id = data.id;
		this.remoteStreams = data.remoteStreams;
		this.whiteboards = data.whiteboards;
		this.mixStreamId = this.remoteStreams[0] ? this.remoteStreams[0].id : undefined;
		for (let i = 0; i < this.participants.length; i++) {
			let participant = this.participants[i];
			this.participantAddListener(participant);
		}
		for (let i = 0; i < this.remoteStreams.length; i++) {
			let stream = this.remoteStreams[i];
			this.streamAddListener(stream);
		}
	}

	/**
	 * 获取流信息
	 * @param {MediaStream} stream
	 * @return { id,streamLabel,audioOnly,isAudioMute,isVideoMute,userInfo,attributes }
	 */
	getStreamInfo(stream) {
		let id = stream.id;
		// 处理安卓/ios/web各端的attribute为json字符串问题
		if (stream.attributes && typeof stream.attributes === 'string') {
			if (isJsonString(stream.attributes)) {
				stream.attributes = JSON.parse(stream.attributes);
				if (stream.attributes && stream.attributes.userInfo && isJsonString(stream.attributes.userInfo)) {
					stream.attributes.userInfo = JSON.parse(stream.attributes.userInfo);
				}
			}
		}
		let streamLabel = stream.attributes && stream.attributes.type ? stream.attributes.type : stream.id;
		// 是否是纯语音流
		let audioOnly = false;
		// media:{audio:{source:'mic',status:'active || inactive'},video:{source:'camera',status:'active || inactive' || boolean}}
		if (stream.media && stream.media.video && !stream.media.video.status) {
			audioOnly = true;
		} else if (stream.settings && !stream.settings.video) {
			audioOnly = true;
		}
		let isVideoMute = undefined;
		if (stream.media && stream.media.video && stream.media.video.status === 'active') {
			isVideoMute = false;
		} else if (stream.media && stream.media.video && stream.media.video.status === 'inactive') {
			isVideoMute = true;
		} else if (stream.settings.video) {
			isVideoMute = false;
		}
		let isAudioMute = undefined;
		if (stream.media && stream.media.audio && stream.media.audio.status === 'active') {
			isAudioMute = false;
		} else if (stream.media && stream.media.audio && stream.media.audio.status === 'inactive') {
			isAudioMute = true;
		} else if (stream.settings.audio) {
			isAudioMute = false;
		}
		// 处理安卓/ios/web 的attribute下的userInfo为json字符串问题
		if (stream.attributes && stream.attributes.userInfo && isJsonString(stream.attributes.userInfo)) {
			stream.attributes.userInfo = JSON.parse(stream.attributes.userInfo);
		}
		let userInfo = stream.attributes && stream.attributes.userInfo ? stream.attributes.userInfo : {};
		let attributes = stream.attributes;
		return { id, streamLabel, audioOnly, isAudioMute, isVideoMute, userInfo, attributes };
	}

	/**
	 * 添加conferenceClient的事件监听
	 */
	clientAddListener() {
		this.streamAddImplementation = eve => {
			let remoteStream = eve.stream;
			this.remoteStreams.push(remoteStream);
			this.streamAddListener(remoteStream);
			let result = this.getStreamInfo(remoteStream);
			dispatchEvent(IRTC_CUSTOMEREVENT.STREAMADD, null, {
				id: result.id,
				streamLabel: result.streamLabel,
				audioOnly: result.audioOnly,
				userInfo: result.userInfo,
				isAudioMute: result.isAudioMute,
				isVideoMute: result.isVideoMute,
			});
			Logger.info('ivcs-js-sdk:new stream add', remoteStream, new Date().toISOString());
		};
		this.participantJoinedImplementation = eve => {
			let participant = eve.participant;
			this.participants.push(participant);
			this.participantAddListener(participant);
			dispatchEvent(IRTC_CUSTOMEREVENT.PARTICIPANTADD, null, participant);
			Logger.info('ivcs-js-sdk:new participant joined', participant, new Date().toISOString());
		};
		this.serverDisconnectedImplementation = () => {
			dispatchEvent(IRTC_CUSTOMEREVENT.SERVERDISCONNECTED, null, null);
			Logger.info('ivcs-js-sdk:irtc server disconnected', new Date().toISOString());
		};
		this.serverReconnectedImplementation = () => {
			Logger.info('ivcs-js-sdk:irtc server reconnected', new Date().toISOString());
		};
		this.messageReceivedImplementation = eve => {
			Logger.info('ivcs-js-sdk:receive message', eve, new Date().toISOString());
		};
		this.sessionAddImplementation = eve => {
			Logger.info('ivcs-js-sdk:new session add', eve, new Date().toISOString());
			let session = eve.session;
			Logger.info('ivcs-js-sdk:new session session', session, new Date().toISOString());
		};
		this.sessionOnErrorEvent = eve => {
			let session = eve.session;
			Logger.info(`ivcs-js-sdk:zms session ${session.id} error,time:${new Date().toISOString()}`);
			dispatchEvent(IRTC_CUSTOMEREVENT.sessionOnError, null, {
				sessionId: session.id,
			});
		};
		this.sessionOnEndEvent = eve => {
			let session = eve.session;
			Logger.info(`ivcs-js-sdk:zms session ${session.id} end,time:${new Date().toISOString()}`);
			dispatchEvent(IRTC_CUSTOMEREVENT.sessionOnEnd, null, {
				sessionId: session.id,
			});
		};
		this.recordingAddedListener = eve => {
			let recording = eve.recording;
			Logger.info(`ivcs-js-sdk:zms recordingAddedListener ${recording.id} error,time:${new Date().toISOString()}`);
		};
		this.recordingErrorListener = eve => {
			let recording = eve.recording;
			Logger.info(`ivcs-js-sdk:zms recordingErrorListener ${recording.id} error,time:${new Date().toISOString()}`);
			dispatchEvent(IRTC_CUSTOMEREVENT.sessionOnError, null, {
				sessionId: recording.id,
			});
		};
		this.recordingEndListener = eve => {
			let recording = eve.recording;
			Logger.info(`ivcs-js-sdk:zms recordingEndListener ${recording.id} end,time:${new Date().toISOString()}`);
			dispatchEvent(IRTC_CUSTOMEREVENT.sessionOnEnd, null, {
				sessionId: recording.id,
			});
		};
		this.irtcClient.addEventListener('recordingadded', this.recordingAddedListener);
		this.irtcClient.addEventListener('recordingerror', this.recordingErrorListener);
		this.irtcClient.addEventListener('recordingend', this.recordingEndListener);
		this.irtcClient.addEventListener('streamadded', this.streamAddImplementation);
		this.irtcClient.addEventListener('participantjoined', this.participantJoinedImplementation);
		this.irtcClient.addEventListener('serverdisconnected', this.serverDisconnectedImplementation);
		this.irtcClient.addEventListener('serverreconnected', this.serverReconnectedImplementation);
		this.irtcClient.addEventListener('messagereceived', this.messageReceivedImplementation);
		this.irtcClient.addEventListener('sessionadded', this.sessionAddImplementation);
		this.irtcClient.addEventListener('sessionerror', this.sessionOnErrorEvent);
		this.irtcClient.addEventListener('sessionend', this.sessionOnEndEvent);
	}

	/**
	 * 发送文字消息
	 * @param {string} message 文字消息
	 * @param {string} participantId  对方的id，'all'表示发给所有人
	 * @returns Promise.<void, Error>
	 */
	sendMessage(message, participantId) {
		return new Promise((resolve, reject) => {
			this.irtcClient
				.send(message, participantId)
				.then(function (resp) {
					resolve(resp);
				})
				.catch(function (error) {
					Logger.warning('ivcs-js-sdk:send message error:' + error + ' time:', new Date().toISOString());
					reject(error);
				});
		});
	}

	/**
	 * 离开房间
	 */
	leaveRoom() {
		for (let i = 0; i < this.participants.length; i++) {
			let participant = this.participants[i];
			this.participantRemoveListener(participant);
		}
		for (let i = 0; i < this.sessions.length; i++) {
			let session = this.sessions[i];
			this.sessionRemoveListener(session);
		}
		for (let i = 0; i < this.remoteStreams.length; i++) {
			let stream = this.remoteStreams[i];
			this.streamRemoveListener(stream);
		}
		for (let i = 0; i < this.publicationArr.length; i++) {
			let publication = this.publicationArr[i];
			publication && publication.stop();
			this.publicationRemoveListener(publication);
			//check whether there is a scheduled task to get publication stats. If yes, cancel the scheduled task
			if (publication.getStatsIntervalId) {
				clearInterval(publication.getStatsIntervalId);
				publication.getStatsIntervalId = null;
			}
		}
		this.calcOpenTimeVideo.forEach(video => {
			video.srcObject = null; // 断开视频流的绑定
			video.pause(); // 暂停播放
			URL.revokeObjectURL(video.src); // 释放使用的 URL 资源
		});
		this.calcOpenTimeVideo = [];

		this.setAllSubscribeCallQuality();
		for (let i = 0; i < this.subscriptionArr.length; i++) {
			let subscription = this.subscriptionArr[i];
			this.subscriptionRemoveListener(subscription);
		}
		this.subscriptionArr = [];
		for (let i = 0; i < this.createStreams.length; i++) {
			let stream = this.createStreams[i];
			stopStream(stream);
		}
		// 移除音量大小监听
		for (const [key, value] of this.audioContextList) {
			let audioContext = value;
			// 关闭一个音频环境，释放任何正在使用系统资源的音频。
			audioContext.close();
			this.audioContextList.delete(key);
		}
		// 移除音量大小监听定时器
		for (const [key, value] of this.audioContextIntervalList) {
			let interval = value;
			clearInterval(interval);
			console.log('clearInterval audioText', value);
			this.audioContextIntervalList.delete(key);
		}
		this.clientRemoveListener();
		return new Promise((resolve, reject) => {
			this.irtcClient
				.leave()
				.then(function (resp) {
					Logger.info(`ivcs-js-sdk:leave room success',${resp},${new Date().toISOString()}`);
					resolve(resp);
				})
				.catch(function (error) {
					Logger.info(`ivcs-js-sdk:leave room error',${error},${new Date().toISOString()}`);
					reject(error);
				});
		});
	}

	/**
	 * 订阅流
	 * @param {IRtc.Base.RemoteStream} stream 要订阅的媒体流
	 * @returns Promise.<IRtc.Conference.Subscription, Error>
	 */
	subscribeStream(stream) {
		let [audio, video] = [stream.settings.audio !== undefined, stream.settings.video !== undefined];
		let option = {
			audio: audio,
			video: video,
		};
		console.log(`ivcs-js-sdk-log:subscribe stream,streamId:${stream.id} option:${JSON.stringify(option)}}`);
		return new Promise((resolve, reject) => {
			this.irtcClient
				.subscribe(stream)
				.then(subscription => {
					console.log(`ivcs-js-sdk-log:subscribe stream,streamId:${stream.id} success`, subscription);
					subscription.originId = stream.id;
					subscription.toclientId = this.clientInfo.clientId; // 接收端clientId
					subscription.toclientName = this.clientInfo.userName; // 接收端clientName
					subscription.toclientRole = this.clientInfo.role; // 接收端clientRole
					const result = this.getStreamInfo(stream);
					subscription.fromClientId = result.userInfo.clientId; // 发送端clientId
					subscription.fromClientName = result.userInfo.userName; // 发送端clientName
					subscription.fromClientRole = result.userInfo.role; // 发送端clientRole
					subscription.fromInfo = {
						fromClientId: result.userInfo.clientId,
						fromClientName: result.userInfo.userName,
						fromClientRole: result.userInfo.role,
					};
					this.subscriptionArr.push(subscription);
					this.subscriptionAddListener(subscription);
					resolve(subscription);
				})
				.catch(error => {
					console.log(`ivcs-js-sdk-log:subscribe stream,streamId:${stream.id} error`, error);
					const result = this.getStreamInfo(stream);
					dispatchEvent(IRTC_CUSTOMEREVENT.SUBSCRIBEFAILED, null, {
						value: stream.id,
						streamInfo: result,
					});
					reject(error);
				});
		});
	}

	/**
	 * 获取订阅的通话质量
	 * @param {subscription} subscription
	 * @returns {any} or null averageDelay: 61;frozenPercentage:0
	 */
	subscriptionGetConnectionSummary(subscription) {
		// let subscription = this.subscriptionArr.find(item => {
		// 	return item.id === subscribeId;
		// });
		if (subscription) {
			const result = subscription.getConnectionSummary();
			console.log(`ivcs-js-sdk:getConnectionSummary subscription,${JSON.stringify(subscription)},result,${JSON.stringify(result)}`);
			return result;
		} else {
			return null;
		}
	}

	/**
	 * 设置会话中所有的订阅对象的通话质量，并且是通过video渲染的。
	 * 一般用于session场景转接 or 挂断的时候离开会话前入库
	 * @param {string} sessionId 场景转接前的sessionId ，或者挂断时候的sessionId可以不传
	 */
	setAllSubscribeCallQuality(sessionId) {
		this.subscriptionArr.forEach(subscription => {
			if (subscription && subscription.videoId) {
				const result = this.subscriptionGetConnectionSummary(subscription);
				if (result) {
					this.setSessionStreamCallQuality(result.averageDelay, result.averageDelay, result.frozenPercentage, subscription.id, sessionId);
				}
			}
		});
	}

	/**
	 * 对订阅流的事件监听
	 * @param {IRtc.Conference.Subscription} subscription
	 */
	subscriptionAddListener(subscription) {
		let subscribe_stream = this.remoteStreams.find(item => {
			return item.id == subscription.originId;
		});
		let streamInfo = subscribe_stream ? this.getStreamInfo(subscribe_stream) : { streamLabel: 'unknown' };
		this.subscriptionEndedImplementation = event => {
			// fix stream is normal end,don't dispatch event
			setTimeout(() => {
				dispatchEvent(IRTC_CUSTOMEREVENT.SUBSCRIPTIONEND, null, {
					subscriptionId: subscription.id,
					streamLabel: streamInfo.streamLabel,
					streamInfo: streamInfo,
				});
			}, 500);
			Logger.info(`ivcs-js-sdk:subscription is end with ${JSON.stringify(subscription)},subscriptionId,${subscription.id},originStreamId,${subscription.originId},event kind,${event.kind},time:${new Date().toISOString()}`);
			// sub end 统计会话质量
			if (subscription && subscription.videoId) {
				const result = this.subscriptionGetConnectionSummary(subscription);
				if (result) {
					this.setSessionStreamCallQuality(result.averageDelay, result.averageDelay, result.frozenPercentage, subscription.id);
				}
			}
			removeObjectWithId(this.subscriptionArr, subscription.id);
		};
		this.subscriptionMuteImplementation = event => {
			dispatchEvent(IRTC_CUSTOMEREVENT.SUBSCRIPTIONMUTE, null, {
				value: subscription,
				kind: event.kind,
			});
			Logger.info(`ivcs-js-sdk:subscription is muted with subscriptionId,${subscription.id},originStreamId,${subscription.originId},event kind,${event.kind},time:${new Date().toISOString()}`);
		};
		this.subscriptionUnmuteImplementation = event => {
			dispatchEvent(IRTC_CUSTOMEREVENT.SUBSCRIPTIONUNMUTE, null, {
				value: subscription,
				kind: event.kind,
			});
			Logger.info(`ivcs-js-sdk:subscription is unmuted with subscriptionId:${subscription.id},originStreamId:${subscription.originId},event kind:${event.kind},time:${new Date().toISOString()}`);
		};
		this.subscriptionIceConnectingImplementation = event => {
			// Logger.info(`subscription ice connecting subscriptionId:${subscription.id},originStreamId:${subscription.originId},event: ${JSON.stringify(event)},time:${new Date().toISOString()}`);
			dispatchEvent(IRTC_CUSTOMEREVENT.subscribeStreamStatus, null, {
				streamLabel: streamInfo.streamLabel,
				userInfo: streamInfo.userInfo,
				renderStatus: {
					status: 'connecting',
				},
			});
		};

		this.subscriptionIceConnectedImplementation = event => {
			// Logger.info(`subscription ice connected subscriptionId:${subscription.id},originStreamId:${subscription.originId},event: ${JSON.stringify(event)},time:${new Date().toISOString()}`);
			dispatchEvent(IRTC_CUSTOMEREVENT.subscribeStreamStatus, null, {
				streamLabel: streamInfo.streamLabel,
				userInfo: streamInfo.userInfo,
				renderStatus: {
					status: 'connected',
				},
			});
		};

		this.subscriptionIceDisconnectedImplementation = event => {
			// Logger.info(`subscription ice disconnected subscriptionId:${subscription.id},originStreamId:${subscription.originId},event: ${JSON.stringify(event)},time:${new Date().toISOString()}`);
			dispatchEvent(IRTC_CUSTOMEREVENT.subscribeStreamStatus, null, {
				streamLabel: streamInfo.streamLabel,
				userInfo: streamInfo.userInfo,
				renderStatus: {
					status: 'disconnected',
				},
			});
		};

		this.subscriptionIceFailedImplementation = event => {
			// Logger.info(`subscription ice failed subscriptionId:${subscription.id},originStreamId:${subscription.originId},event: ${JSON.stringify(event)},time:${new Date().toISOString()}`);
			dispatchEvent(IRTC_CUSTOMEREVENT.subscribeStreamStatus, null, {
				streamLabel: streamInfo.streamLabel,
				userInfo: streamInfo.userInfo,
				renderStatus: {
					status: 'failed',
				},
			});
		};

		this.subscriptionQualityReportImplementation = event => {
			// event.report = {status:'good',quality:100,info:{packetloss:0},criterion:{packetloss:1}}
			// Logger.info(`subscription quality-report subscriptionId:${subscription.id},originStreamId:${subscription.originId},event: ${JSON.stringify(event)},data:${event.report},time:${new Date().toISOString()}`);
			dispatchEvent(IRTC_CUSTOMEREVENT.subscribeStreamStatus, null, {
				streamLabel: streamInfo.streamLabel,
				userInfo: streamInfo.userInfo,
				renderStatus: {
					status: 'quality-report',
					data: event.report,
				},
			});
		};
		subscription.addEventListener('ended', this.subscriptionEndedImplementation);
		subscription.addEventListener('mute', this.subscriptionMuteImplementation);
		subscription.addEventListener('unmute', this.subscriptionUnmuteImplementation);
		subscription.addEventListener('connecting', this.subscriptionIceConnectingImplementation);
		subscription.addEventListener('connected', this.subscriptionIceConnectedImplementation);
		subscription.addEventListener('disconnected', this.subscriptionIceDisconnectedImplementation);
		subscription.addEventListener('failed', this.subscriptionIceFailedImplementation);
		subscription.addEventListener('quality-report', this.subscriptionQualityReportImplementation);
	}

	/**
	 * 移除订阅流的事件监听
	 * @param {IRtc.Conference.Subscription} subscription
	 */
	subscriptionRemoveListener(subscription) {
		if (subscription) {
			subscription.clearEventListener('ended');
			subscription.clearEventListener('mute');
			subscription.clearEventListener('unmute');
			subscription.clearEventListener('connecting');
			subscription.clearEventListener('connected');
			subscription.clearEventListener('disconnected');
			subscription.clearEventListener('failed');
			subscription.clearEventListener('quality-report');
			subscription.removeEventListener('ended', this.subscriptionEndedImplementation);
			subscription.removeEventListener('mute', this.subscriptionMuteImplementation);
			subscription.removeEventListener('unmute', this.subscriptionUnmuteImplementation);
			subscription.addEventListener('connecting', this.subscriptionIceConnectingImplementation);
			subscription.addEventListener('connected', this.subscriptionIceConnectedImplementation);
			subscription.addEventListener('disconnected', this.subscriptionIceDisconnectedImplementation);
			subscription.addEventListener('failed', this.subscriptionIceFailedImplementation);
			subscription.addEventListener('quality-report', this.subscriptionQualityReportImplementation);
			Logger.info(`ivcs-js-sdk:subscription  subscriptionId:${subscription.id},originStreamId:${subscription.originId}, remove all listener,time:${new Date().toISOString()}`);
		}
	}

	/**
	 * 发布一条本地媒体流到会议服务器。当发布成功后其他与会者可以订阅这条媒体流。
	 * @param {IRtc.Base.LocalStream} stream 要发布的媒体流
	 * @param {IRtc.Base.PublishOptions} option 发布时的参数。
	 * @returns Promise.<IRtc.Base.Publication, Error>
	 */
	publishStream(stream, option) {
		return new Promise((resolve, reject) => {
			var self = this;
			var audioTracks = stream.mediaStream ? stream.mediaStream.getAudioTracks() : stream.getAudioTracks();
			var videoTracks = stream.mediaStream ? stream.mediaStream.getVideoTracks() : stream.getVideoTracks();
			if (!audioTracks || audioTracks.length <= 0) {
				option.audio = false;
			}
			if (!videoTracks || videoTracks.length <= 0) {
				option.video = false;
			}
			this.irtcClient
				.publish(stream)
				.then(function (publication) {
					publication.originId = stream.id;
					self.publicationArr.push(publication);
					self.publicationAddListener(publication);
					resolve(publication);
				})
				.catch(function (error) {
					dispatchEvent(IRTC_CUSTOMEREVENT.PUBLISHFAILED, null, {
						value: stream.id,
					});
					reject(error);
				});
		});
	}

	/**
	 * 流的视频操作
	 * @param {string} type 'audio'声音 ‘video’ 视频
	 * @param {boolean} status true = 打开 false = 关闭
	 * @param {IRtc.Base.Publication} publication 发布表示一个流发送者发布一条音视频流。它处理了将一条本地音视频流发布到一个视频会议的各个动作
	 * @returns Promise.<void, Error>
	 */
	publicationMediaOperate(type, status, publication) {
		if (status) {
			return new Promise((resolve, reject) => {
				if (type != 'audio' || type != 'video') {
					reject('The parameter type is invalid. Only Audio and video are supported');
					return;
				}
				publication
					.mute(type)
					.then(function (resp) {
						resolve(resp);
					})
					.catch(function (error) {
						reject(error);
					});
			});
		} else {
			return new Promise((resolve, reject) => {
				if (type != 'audio' || type != 'video') {
					reject('The parameter type is invalid. Only Audio and video are supported');
					return;
				}
				publication
					.unmute(type)
					.then(function (resp) {
						resolve(resp);
					})
					.catch(function (error) {
						reject(error);
					});
			});
		}
	}

	/**
	 * 对发布流的事件监听
	 * @param {IRtc.Base.Publication} publication
	 */
	publicationAddListener(publication) {
		let publish_stream = this.remoteStreams.find(item => {
			return item.id == publication.id;
		});
		// fix publish success while stream not Add
		let streamInfo = publish_stream ? this.getStreamInfo(publish_stream) : { streamLabel: 'unknown' };
		if (!publish_stream) {
			setTimeout(() => {
				let publish_stream = this.remoteStreams.find(item => {
					return item.id == publication.id;
				});
				streamInfo = publish_stream ? this.getStreamInfo(publish_stream) : { streamLabel: 'unknown' };
			}, 1000);
		}
		this.publicationEndImplementation = event => {
			removeObjectWithId(this.publicationArr, publication.id);
			dispatchEvent(IRTC_CUSTOMEREVENT.PUBLICATIONEND, null, {
				publicationId: publication.id,
				streamLabel: streamInfo.streamLabel,
				streamInfo: streamInfo,
			});
			let streamId = '';
			for (let i = 0; i < this.createStreams.length; i++) {
				let stream = this.createStreams[i];
				if (stream.id === publication.originId) {
					/** republish 不能停止 MediaStream tracks */
					// stopStream(stream);
					streamId = stream.id;
				}
			}
			removeObjectWithId(this.createStreams, streamId);
			Logger.info(`ivcs-js-sdk:publication end,publicationId:${publication.id},originStreamId:${publication.originId},event ${JSON.stringify(event)},time:${new Date().toISOString()}`);
		};

		this.publicationMuteImplementation = event => {
			dispatchEvent(IRTC_CUSTOMEREVENT.PUBLICATIONMUTE, null, {
				value: publication,
				kind: event.kind,
			});
			Logger.info(`ivcs-js-sdk:publication muted publicationId:${publication.id},originStreamId:${publication.originId},event:${event.kind},time:${new Date().toISOString()}`);
		};

		this.publicationUnmuteImplementation = event => {
			dispatchEvent(IRTC_CUSTOMEREVENT.PUBLICATIOUNMUTE, null, {
				value: publication,
				kind: event.kind,
			});
			Logger.info(`ivcs-js-sdk:publication unmute publicationId:${publication.id},originStreamId:${publication.originId},event:${event.kind},time:${new Date().toISOString()}`);
		};

		this.publicationIceConnectingImplementation = event => {
			Logger.info(`ivcs-js-sdk:publication ice connecting publicationId:${publication.id},originStreamId:${publication.originId},event: ${JSON.stringify(event)},time:${new Date().toISOString()}`);
			dispatchEvent(IRTC_CUSTOMEREVENT.publishStreamStatus, null, {
				streamLabel: streamInfo.streamLabel,
				userInfo: streamInfo.userInfo,
				publishStatus: {
					status: 'connecting',
				},
			});
		};

		this.publicationIceConnectedImplementation = event => {
			Logger.info(`ivcs-js-sdk:publication ice connected publicationId:${publication.id},originStreamId:${publication.originId},event: ${JSON.stringify(event)},time:${new Date().toISOString()}`);
			dispatchEvent(IRTC_CUSTOMEREVENT.publishStreamStatus, null, {
				streamLabel: streamInfo.streamLabel,
				userInfo: streamInfo.userInfo,
				publishStatus: {
					status: 'connected',
				},
			});
		};

		this.publicationIceDisconnectedImplementation = event => {
			Logger.info(`ivcs-js-sdk:publication ice disconnected publicationId:${publication.id},originStreamId:${publication.originId},event: ${JSON.stringify(event)},time:${new Date().toISOString()}`);
			dispatchEvent(IRTC_CUSTOMEREVENT.publishStreamStatus, null, {
				streamLabel: streamInfo.streamLabel,
				userInfo: streamInfo.userInfo,
				publishStatus: {
					status: 'disconnected',
				},
			});
		};

		this.publicationIceFailedImplementation = event => {
			Logger.info(`ivcs-js-sdk:publication ice failed publicationId:${publication.id},originStreamId:${publication.originId},event: ${JSON.stringify(event)},time:${new Date().toISOString()}`);
			dispatchEvent(IRTC_CUSTOMEREVENT.publishStreamStatus, null, {
				streamLabel: streamInfo.streamLabel,
				userInfo: streamInfo.userInfo,
				publishStatus: {
					status: 'failed',
				},
			});
		};

		this.publicationQualityReportImplementation = event => {
			// event.report = {status:'good',quality:100,info:{packetloss:0},criterion:{packetloss:1}}
			// Logger.info(`publication quality-report publicationId:${publication.id},originStreamId:${publication.originId},event: ${JSON.stringify(event)},data:${event.report},time:${new Date().toISOString()}`);
			dispatchEvent(IRTC_CUSTOMEREVENT.publishStreamStatus, null, {
				streamLabel: streamInfo.streamLabel,
				userInfo: streamInfo.userInfo,
				publishStatus: {
					status: 'quality-report',
					data: event.report,
				},
			});
		};
		publication.addEventListener('connecting', this.publicationIceConnectingImplementation);
		publication.addEventListener('connected', this.publicationIceConnectedImplementation);
		publication.addEventListener('disconnected', this.publicationIceDisconnectedImplementation);
		publication.addEventListener('failed', this.publicationIceFailedImplementation);
		publication.addEventListener('mute', this.publicationMuteImplementation);
		publication.addEventListener('unmute', this.publicationUnmuteImplementation);
		publication.addEventListener('ended', this.publicationEndImplementation);
		publication.addEventListener('quality-report', this.publicationQualityReportImplementation);
	}

	/**
	 * 移除发布流的事件监听
	 * @param {IRtc.Base.Publication} publication
	 */
	publicationRemoveListener(publication) {
		if (publication) {
			publication.clearEventListener('ended');
			publication.clearEventListener('unmute');
			publication.clearEventListener('mute');
			publication.clearEventListener('failed');
			publication.clearEventListener('connecting');
			publication.clearEventListener('connected');
			publication.clearEventListener('disconnected');
			publication.clearEventListener('quality-report');
			publication.removeEventListener('ended', this.publicationEndImplementation);
			publication.removeEventListener('mute', this.publicationMuteImplementation);
			publication.removeEventListener('unmute', this.publicationUnmuteImplementation);
			publication.removeEventListener('connecting', this.publicationIceConnectingImplementation);
			publication.removeEventListener('connected', this.publicationIceConnectedImplementation);
			publication.removeEventListener('disconnected', this.publicationIceDisconnectedImplementation);
			publication.removeEventListener('failed', this.publicationIceFailedImplementation);
			publication.removeEventListener('quality-report', this.publicationQualityReportImplementation);
			Logger.info(`ivcs-js-sdk:publicationId:${publication.id},originStreamId:${publication.originId} remove all listener,time:${new Date().toISOString()}`);
		}
	}

	/**
	 * 远端流的事件监听
	 * @param {IRtc.Base.RemoteStream} stream 远端媒体流
	 */
	streamAddListener(stream) {
		let streamInfo = this.getStreamInfo(stream);
		this.streamEndedImplementation = event => {
			removeObjectWithId(this.remoteStreams, stream.id);
			dispatchEvent(IRTC_CUSTOMEREVENT.STREAMENDED, null, {
				streamId: stream.id,
				streamInfo: streamInfo,
			});
			Logger.info(`ivcs-js-sdk:stream end,streamId: ${stream.id},event:${JSON.stringify(event)},time:${new Date().toISOString()}`);
			// stream end stop ttsstreamspeakDone event
			let result = deepCopy(this.getStreamInfo(stream));
			dispatchEvent(IRTC_CUSTOMEREVENT.ttsStreamSpeakDone, null, {
				streamId: stream.id,
				streamLabel: result.streamLabel,
				userInfo: result.userInfo,
				content: 'ok',
			});
		};
		this.streamAsrEvent = event => {
			// let streamLabel = stream.attributes && stream.attributes.type ? stream.attributes.type : stream.id;
			dispatchEvent(IRTC_CUSTOMEREVENT.ASR, null, {
				id: stream.id,
				attributes: streamInfo.attributes,
				content: event && event.info && event.info.content ? event.info.content : '',
				streamLabel: streamInfo.streamLabel,
				clientId: streamInfo.attributes && streamInfo.attributes.clientId ? streamInfo.attributes.clientId : '',
				owner: streamInfo.attributes && streamInfo.attributes.owner ? streamInfo.attributes.owner : '',
			});
			Logger.info('ivcs-js-sdk:stream asr event triggered: ', event, stream.id, new Date().toISOString());
		};
		this.streamUpdatedEvent = async event => {
			// Logger.info('stream update eventInfo:', event, 'with streamId:', stream.id, '&streamInfo:', stream, 'time:', new Date().toISOString());
			if (event && event.type === 'updated' && event.info && event.info.length > 0) {
				// info:{field:'media.video.status',value:boolean}
				// let streamLabel = stream.attributes && stream.attributes.type ? stream.attributes.type : stream.id;
				// let userInfo = stream.attributes && stream.attributes.userInfo ? stream.attributes.userInfo : {};
				dispatchEvent(IRTC_CUSTOMEREVENT.STREAMUPDATED, null, {
					id: stream.id,
					streamLabel: streamInfo.streamLabel,
					userInfo: streamInfo.userInfo,
					info: event.info,
				});
				// 升降级，升级则统计是否开画，开画时间数据
				const info = event.info[0];
				if (info.field === 'media.video.status' && info.value === 'active') {
					let subscription = this.subscriptionArr.find(item => {
						return item.originId === stream.id;
					});
					if (subscription) {
						if (subscription.videoId) {
							try {
								const videoId = subscription.videoId;
								subscription.fromClientRole && (await this.setSessionStreamVideoQuality(undefined, undefined, true, subscription.id, subscription.fromInfo));
								// const track = stream.getVideoTracks()[0];
								const video = document.createElement('video');
								const renderVideo = document.getElementById(videoId);
								const startTimestamp = Date.now(); // 计算开画时间
								try {
									const track = renderVideo?.captureStream().getVideoTracks()[0];
									video.srcObject = new MediaStream([track]);
									video.muted = true; // 设置静音
									video.setAttribute('muted', true);
									video.autoplay = true;
									video.setAttribute('autoplay', true);
									video.load();
									video.play();
									this.calcOpenTimeVideo.push(video);
									video.onloadedmetadata = e => {
										console.log('audio-to-video onloadedmetadata', e);
										if (video.videoWidth > 0 && video.videoHeight > 0) {
											const onCanplayTimestamp = Date.now();
											const waitTime = onCanplayTimestamp - startTimestamp;
											Logger.info(`ivcs-js-sdk:voice to video get opentime videoId ${videoId} is onloadmetadata,${waitTime} ms`, e);
											subscription.fromClientRole && this.setVideoStreamCallQuality(true, waitTime, subscription.id);
											// 如果不再需要使用 video 元素，可以释放使用的资源
											video.srcObject = null; // 断开视频流的绑定
											video.pause(); // 暂停播放
											URL.revokeObjectURL(video.src); // 释放使用的 URL 资源
											this.calcOpenTimeVideo = this.calcOpenTimeVideo.filter(item => item !== video);
										}
									};
									video.onloadeddata = e => {
										console.log('ivcs-js-sdk:audio-to-video onloadeddata', e);
									};
									video.onwaiting = e => {
										console.log('ivcs-js-sdk:audio-to-video onwaiting', e);
									};
									video.onplaying = e => {
										console.log('ivcs-js-sdk:audio-to-video onplaying', e);
									};
									video.onplay = e => {
										console.log('ivcs-js-sdk:audio-to-video onplay', e);
									};
									video.onloadstart = e => {
										console.log('ivcs-js-sdk:audio-to-video onloadstart', e);
									};
									video.oncanplay = e => {
										console.log('ivcs-js-sdk:audio-to-video oncanplay', e);
									};
									video.onratechange = e => {
										console.log('ivcs-js-sdk:audio-to-video onratechange', e);
									};
									video.onseeked = e => {
										console.log('ivcs-js-sdk:-to-video onseeked', e);
									};
								} catch (error) {
									// ios safari wechat error
									renderVideo.pause();
									renderVideo.play();
									renderVideo.onplaying = e => {
										console.log('ivcs-js-sdk: audio-to-video onplaying', e);
									};
									renderVideo.onplay = e => {
										console.log('ivcs-js-sdk:renderVideo audio-to-video onplay', e, renderVideo.videoHeight, renderVideo.videoWidth);
										if (renderVideo.videoWidth > 0 && renderVideo.videoHeight > 0) {
											const onCanplayTimestamp = Date.now();
											const waitTime = onCanplayTimestamp - startTimestamp;
											Logger.info(`ivcs-js-sdk:voice to video get opentime videoId ${videoId} is onloadmetadata,${waitTime} ms`, e);
											subscription.fromClientRole && this.setVideoStreamCallQuality(true, waitTime, subscription.id);
										}
									};
									renderVideo.onpause = e => {
										console.log('ivcs-js-sdk:renderVideo audio-to-video onpause', e);
									};
								}
							} catch (error) {
								Logger.info(`ivcs-js-sdk:voice to video push sessionStreamVideoQuality error`, error, new Date().toISOString());
							}
						}
					}
				}
			}
		};
		this.streamSpeakEvent = event => {
			// info:{data,status,streamId,type:'tts'},type:'speaking'
			Logger.info(`ivcs-js-sdk:stream speaking,${JSON.stringify(event)},streamId,${stream.id},time:${new Date().toISOString()}`);
			let result = deepCopy(this.getStreamInfo(stream));
			if (event && event.type === 'speaking' && event.info && event.info.type === 'tts' && event.info.status === 'start') {
				dispatchEvent(IRTC_CUSTOMEREVENT.ttsStreamSpeaking, null, {
					streamId: stream.id,
					streamLabel: result.streamLabel,
					userInfo: result.userInfo,
					content: event.info.data,
				});
			} else if (event && event.type === 'speaking' && event.info && event.info.type === 'tts' && event.info.status === 'done') {
				dispatchEvent(IRTC_CUSTOMEREVENT.ttsStreamSpeakDone, null, {
					streamId: stream.id,
					streamLabel: result.streamLabel,
					userInfo: result.userInfo,
					content: event.info.data,
				});
			}
		};
		this.streamQualityReportEvent = event => {
			// event.report = {status:'good',quality:NAN,info:{packetloss:0},criterion:{packetloss:1}} NAN的话就是断流了
			// Logger.info(`remote stream quality-report,streamId:${stream.id},event:${JSON.stringify(event.report)},time:${new Date().toISOString()}`);
			dispatchEvent(IRTC_CUSTOMEREVENT.remoteStreamStatus, null, {
				streamLabel: streamInfo.streamLabel,
				userInfo: streamInfo.userInfo,
				renderStatus: {
					status: 'quality-report',
					data: event.report,
				},
			});
		};
		this.streamConnectStatusEvent = event => {
			// event:{type:'connectionstate',state:'disconnected'}
			// Logger.info(`remote stream connectionstate,streamId:${stream.id},event:${JSON.stringify(event)},time:${new Date().toISOString()}`);
			dispatchEvent(IRTC_CUSTOMEREVENT.remoteStreamStatus, null, {
				streamLabel: streamInfo.streamLabel,
				userInfo: streamInfo.userInfo,
				renderStatus: {
					status: event.state,
				},
			});
		};
		// mix流才触发的事件
		this.streamActiveAudioInputEvent = event => {
			// activeAudioInputStreamId: "368293474855211140",type: "activeaudioinputchange"
			// Logger.info('activeaudioinputchange event triggered: ', event);
			const resultStream = this.remoteStreams.filter(stream => {
				return event && stream.id === event.activeAudioInputStreamId;
			});
			if (resultStream && resultStream.length > 0) {
				let activeStream = resultStream[0];
				// Logger.info('activeaudioinputchange activeStream: ', activeStream);
				let streamInfo = this.getStreamInfo(activeStream);
				dispatchEvent(IRTC_CUSTOMEREVENT.activeAudioInputStream, null, {
					streamId: streamInfo.id,
					streamLabel: streamInfo.streamLabel,
					userInfo: streamInfo.userInfo,
					audioOnly: streamInfo.audioOnly,
				});
			}
		};
		stream.addEventListener('ended', this.streamEndedImplementation);
		stream.addEventListener('asr', this.streamAsrEvent);
		stream.addEventListener('updated', this.streamUpdatedEvent);
		stream.addEventListener('speaking', this.streamSpeakEvent);
		stream.addEventListener('quality-report', this.streamQualityReportEvent);
		stream.addEventListener('connectionstate', this.streamConnectStatusEvent);
		stream.addEventListener('activeaudioinputchange', this.streamActiveAudioInputEvent);
	}

	/**
	 * 移除远端流的监听
	 * @param {IRtc.Base.RemoteStream} stream 远端媒体流
	 */
	streamRemoveListener(stream) {
		if (stream) {
			stream.removeEventListener('ended', this.streamEndedImplementation);
			stream.clearEventListener('ended');
			stream.removeEventListener('asr', this.streamAsrEvent);
			stream.clearEventListener('asr');
			stream.removeEventListener('updated', this.streamUpdatedEvent);
			stream.clearEventListener('updated');
			stream.removeEventListener('speaking', this.streamSpeakEvent);
			stream.clearEventListener('speaking');
			stream.removeEventListener('quality-report', this.streamQualityReportEvent);
			stream.clearEventListener('quality-report');
			stream.removeEventListener('connectionstate', this.streamConnectStatusEvent);
			stream.clearEventListener('connectionstate');
			stream.removeEventListener('activeaudioinputchange', this.streamActiveAudioInputEvent);
			stream.clearEventListener('activeaudioinputchange');
			Logger.info(`ivcs-js-sdk:stream streamId:${stream.id} remove all listener,time:${new Date().toISOString()}`);
		}
	}

	/**
	 * 对房间里面用户离开事件监听
	 * @param {IRtc.Conference.Participant} participant 房间中的一个与会者。
	 */
	participantAddListener(participant) {
		this.participantLeftImplementation = event => {
			removeObjectWithId(this.participants, participant.id);
			dispatchEvent(IRTC_CUSTOMEREVENT.PARTICIPANTLEFT, null, participant);
			Logger.info(`ivcs-js-sdk:participant:${participant.id} is left,${JSON.stringify(event)},:${new Date().toISOString()}`);
		};
		participant.addEventListener('left', this.participantLeftImplementation);
	}

	/**
	 * 移除用户监听
	 * @param {IRtc.Conference.Participant} participant 房间中的一个与会者。
	 */
	participantRemoveListener(participant) {
		if (participant) {
			participant.removeEventListener('left', this.participantLeftImplementation);
			participant.clearEventListener('left');
			Logger.info(`ivcs-js-sdk:${JSON.stringify(participant)} remove left listener,:${new Date().toISOString()}`);
		}
	}

	/**
	 * 对session增加监听
	 * @param {any} session
	 */
	sessionAddListener(session) {
		this.sessionErrorEvent = () => {
			Logger.info(`ivcs-js-sdk:session ${session.id} error,time:${new Date().toISOString()}`);
			dispatchEvent(IRTC_CUSTOMEREVENT.sessionOnError, null, {
				sessionId: session.id,
			});
		};
		session.addEventListener('error', this.sessionErrorEvent);
		this.sessionEndEvent = () => {
			Logger.info(`ivcs-js-sdk:session ${session.id} end,time:${new Date().toISOString()}`);
			dispatchEvent(IRTC_CUSTOMEREVENT.sessionOnEnd, null, {
				sessionId: session.id,
			});
		};
		session.addEventListener('end', this.sessionEndEvent);
	}

	/**
	 * 移除session监听
	 * @param {any} session
	 */
	sessionRemoveListener(session) {
		if (session) {
			session.removeEventListener('error', this.sessionErrorEvent);
			session.clearEventListener('error');
			session.removeEventListener('end', this.sessionEndEvent);
			session.clearEventListener('end');
			Logger.info(`ivcs-js-sdk:session ${session.id} remove all listener,time:${new Date().toISOString()}`);
		}
	}

	/**
	 * 移除conferenceClient的事件监听
	 */
	clientRemoveListener() {
		this.irtcClient.clearEventListener('streamadded');
		this.irtcClient.clearEventListener('serverdisconnected');
		this.irtcClient.clearEventListener('participantjoined');
		this.irtcClient.clearEventListener('messagereceived');
		this.irtcClient.clearEventListener('sessionadded');
		this.irtcClient.clearEventListener('sessionerror');
		this.irtcClient.clearEventListener('sessionend');
		this.irtcClient.clearEventListener('recordingadded');
		this.irtcClient.clearEventListener('recordingerror');
		this.irtcClient.clearEventListener('recordingend');
		this.irtcClient.removeEventListener('recordingadded', this.recordingAddedListener);
		this.irtcClient.removeEventListener('recordingerror', this.recordingErrorListener);
		this.irtcClient.removeEventListener('recordingend', this.recordingEndListener);
		this.irtcClient.removeEventListener('streamadded', this.streamAddImplementation);
		this.irtcClient.removeEventListener('participantjoined', this.participantJoinedImplementation);
		this.irtcClient.removeEventListener('serverdisconnected', this.serverDisconnectedImplementation);
		this.irtcClient.removeEventListener('serverreconnected', this.serverReconnectedImplementation);
		this.irtcClient.removeEventListener('messagereceived', this.messageReceivedImplementation);
		this.irtcClient.removeEventListener('sessionadded', this.sessionAddImplementation);
		this.irtcClient.removeEventListener('sessionerror', this.sessionOnErrorEvent);
		this.irtcClient.removeEventListener('sessionend', this.sessionOnEndEvent);
	}

	/**
	 * 判断是否是自己推送的流
	 * @param {IRtc.Base.RemoteStream} stream
	 * @returns {boolean}
	 */
	isMePublishStream(stream) {
		return stream.origin === this.self.id;
	}

	/**
	 * 开始录制
	 * @param {MediaStream} stream
	 * @param {string} options  Array["video/webm", "audio/webm", "video/webm;codecs=vp8", "video/webm;codecs=vp9", "video/webm;codecs=h264", "video/webm;codecs=avc1", "audio/webm;codecs=opus", "video/webm;codecs=vp8,opus", "video/webm;codecs=vp9,opus", "video/webm;codecs=vp8,vp9,opus", "video/webm;codecs=h264,opus", "video/webm;codecs=h264,vp9,opus", "video/x-matroska;codecs=avc1"]
	 * @returns {IRtc.Base.LocalRecorder} localRecord,可用于停止和下载的调用
	 */
	startLocalRecord(stream, options, onSuccess) {
		const { LocalRecorder, RecordOptions } = IRtc.Base;
		let recordOptions = new RecordOptions(options);
		let localRecord = new LocalRecorder(stream, recordOptions);
		localRecord.addEventListener('onstart', () => {
			Logger.info('ivcs-js-sdk:start record stream:', stream.id);
			onSuccess(stream.id);
		});
		localRecord.addEventListener('onpause', () => {
			Logger.info('ivcs-js-sdk:pause record stream:', stream.id);
		});
		localRecord.addEventListener('onresume', () => {
			Logger.info('ivcs-js-sdk:resume record stream:', stream.id);
		});
		localRecord.start();
		return localRecord;
	}

	/**
	 * 停止本地录制
	 * @param {IRtc.Base.LocalRecorder} localRecord
	 */
	stopLcoalRecord(localRecord) {
		localRecord.stop();
	}

	/**
	 * 下载本地录制
	 * @param {IRtc.Base.LocalRecorder} localRecord
	 * @param {string} fileName
	 */
	downloadLocalRecord(localRecord, fileName) {
		localRecord.download(fileName);
	}

	/**
	 * 获取本地录制支持的类型
	 */
	getLocalRecordSupportType() {
		let { LocalRecorder } = IRtc.Base;
		let result = [];
		LocalRecorder.getSupportedType().forEach(type => {
			result.push(type);
		});
		return result;
	}
	/**
	 * 返回访客推流的状态数据，可根据2次间隔之间的指标参数变化判断网络状态
	 * @param {number} milliseconds 返回的时间间隔，单位(毫秒)，默认3000毫秒。即每隔3秒返回实时流的相关状态数据
	 * @param {function} onSuccess {actualBitrate:实际码率(单位kbps),actualHeight:发送的视频分辨率的高,actualWidth:发送的视频分辨率的宽,bytesSent:一共发送的的字节数,packetsSent:一共发送的数据包数,packetsLost:一共丢的数据包数,rate:实时的视频帧率,packetsLostRate:丢包率}
	 * @param {function} onFailure code 错误说明{401:publication无效,402:getPublicationStats任务已经在进行中,无须再次调用,403:publication.getStats失败}
	 * @returns {number} intervalId 返回定时任务id,作为移除该任务的参数。
	 */
	getPublicationStats(publication, milliseconds = 3000, onSuccess, onFailure) {
		if (!publication) {
			onFailure(new IvcsError('401', 'publication is null'));
			return -1;
		}
		if (publication.getStatsIntervalId && publication.getStatsIntervalId != -1) {
			onFailure(new IvcsError('402', 'getPublicationStats is in progress'));
			return publication.getStatsIntervalId;
		}
		publication.publishPacketsSent ? publication.publishPacketsSent : (publication.publishPacketsSent = 0);
		publication.publishPacketsLost ? publication.publishPacketsLost : (publication.publishPacketsLost = 0);
		Logger.debug('ivcs-js-sdk:PacketsSent', publication.publishPacketsSent, 'PacketsLost', publication.publishPacketsLost);
		publication.getStatsIntervalId = publicationGetStats(
			publication,
			milliseconds,
			data => {
				let lostPackets = Number(data.packetsLost) - Number(publication.publishPacketsLost);
				let sendPakcets = Number(data.packetsSent) - Number(publication.publishPacketsSent);
				data.packetsLostRate = (lostPackets / (lostPackets + sendPakcets)).toFixed(2);
				//fix the number of lost packets is smaller than the previous value
				publication.publishPacketsLost = Number(data.packetsLost) < Number(publication.publishPacketsLost) ? Number(publication.publishPacketsLost) : Number(data.packetsLost);
				publication.publishPacketsSent = Number(data.packetsSent);
				onSuccess(data);
			},
			e => {
				onFailure(new IvcsError('403', e));
			}
		);
		return publication.getStatsIntervalId;
	}

	/**
	 * 停止获取推流的状态数据
	 * @param {number} intervalId 开启时定时任务返回的id
	 * @param {function} onSuccess 'ok'
	 * @param {function} onFailure code 错误说明{401:publication无效，402:intervalId无效}
	 */
	stopPublicationStats(publication, intervalId, onSuccess, onFailure) {
		if (!publication) {
			onFailure(new IvcsError('401', 'publication is null'));
			return -1;
		}
		if (intervalId !== publication.getStatsIntervalId) {
			onFailure(new IvcsError('402', 'intervalId is error'));
			return -1;
		}
		if (intervalId != -1) {
			clearInterval(intervalId);
			publication.getStatsIntervalId = null;
			onSuccess('ok');
		}
	}

	/**
	 * 返回订阅流的状态数据，可根据2次间隔之间的指标参数变化判断网络状态
	 * @param {number} milliseconds 返回的时间间隔，单位(毫秒)，默认3000毫秒。即每隔3秒返回实时流的相关状态数据
	 * @param {function} onSuccess {actualBitrate:实际码率(单位kbps),actualHeight:发送的视频分辨率的高,actualWidth:发送的视频分辨率的宽,bytesReceived:一共接受的的字节数,packetsSent:一共发送的数据包数,packetsLost:一共丢的数据包数,rate:实时的视频帧率,packetsLostRate:丢包率}
	 * @param {function} onFailure code 错误说明{401:publication无效,402:getPublicationStats任务已经在进行中,无须再次调用,403:publication.getStats失败}
	 * @returns {number} intervalId 返回定时任务id,作为移除该任务的参数。
	 */
	getSubscriptionStats(subscription, milliseconds = 3000, onSuccess, onFailure) {
		if (!subscription) {
			onFailure(new IvcsError('401', 'subscription is null'));
			return -1;
		}
		if (subscription.getStatsIntervalId && subscription.getStatsIntervalId != -1) {
			onFailure(new IvcsError('402', 'getSubscriptionStats is in progress'));
			return subscription.getStatsIntervalId;
		}
		subscription.bytesReceived ? subscription.bytesReceived : (subscription.bytesReceived = 0);
		subscription.getStatsIntervalId = subscriptionGetStats(
			subscription,
			milliseconds,
			data => {
				data.actualBitrate = ((Number(data.bytesReceived) - Number(subscription.bytesReceived)) / milliseconds) * 1000;
				subscription.bytesReceived = data.bytesReceived;
				onSuccess(data);
			},
			e => {
				onFailure(new IvcsError('403', e));
			}
		);
		return subscription.getStatsIntervalId;
	}

	/**
	 * 停止获取订阅流的状态数据
	 * @param {number} intervalId 开启时定时任务返回的id
	 * @param {function} onSuccess 'ok'
	 * @param {function} onFailure code 错误说明{401:publication无效，402:intervalId无效}
	 */
	stopSubscriptionStats(subscription, intervalId, onSuccess, onFailure) {
		if (!subscription) {
			onFailure(new IvcsError('401', 'subscription is null'));
			return -1;
		}
		if (intervalId !== subscription.getStatsIntervalId) {
			onFailure(new IvcsError('402', 'intervalId is error'));
			return -1;
		}
		if (intervalId != -1) {
			clearInterval(intervalId);
			subscription.getStatsIntervalId = null;
			onSuccess('ok');
		}
	}

	/**
	 * 获取流音量大小，根据audio
	 * @param {string} videoId audio&video标签
	 * @param {function} onSuccess 请求成功
	 * @param {function} onFailure {code,message} '500':'videoId不存在元素','501':'videoId元素无srcObject'，‘502’:'缺少参数，onSuccess或者onFailure或者cb'
	 * @param {function} cb  返回音量大小 (0-200);
	 */
	getAudioLevel(videoId, onSuccess, onFailure, cb) {
		let videoElement = document.getElementById(videoId);
		if (!videoElement) {
			console.error(`${videoId} dom not exist`);
			onFailure('500', `${videoId} dom not exist`);
			return;
		}
		if (!videoElement.srcObject) {
			console.error(`${videoId} dom srcObject is null`);
			onFailure('501', `${videoId} dom srcObject is null`);
			return;
		}
		if (!onSuccess || !onFailure || !cb) {
			console.error(`onSuccess or onFailure, or cb, is undefined`);
			onFailure('502', `onSuccess or onFailure, or cb, is undefined`);
			return;
		}
		try {
			if (this.audioContextIntervalList.has(videoId)) {
				clearInterval(this.audioContextIntervalList.get(videoId));
				this.audioContextIntervalList.delete(videoId);
			}
			if (this.audioContextList.has(videoId)) {
				this.audioContextList.delete(videoId);
			}
			// 创建AudioContext对象
			const audioContext = new AudioContext();
			// 创建MediaElementAudioSourceNode对象
			const sourceNode = audioContext.createMediaStreamSource(videoElement.srcObject);
			// 创建AnalyserNode对象
			const analyserNode = audioContext.createAnalyser();
			analyserNode.fftSize = 32;
			// 将MediaElementAudioSourceNode对象连接到AnalyserNode对象
			sourceNode.connect(analyserNode);
			// 将AnalyserNode对象连接到AudioContext.destination
			// analyserNode.connect(audioContext.destination);
			// 获取音频数据并计算音频级别
			const dataArray = new Uint8Array(analyserNode.frequencyBinCount);
			function getAudioLevel() {
				analyserNode.getByteFrequencyData(dataArray);
				const average = dataArray.reduce((acc, curr) => acc + curr, 0) / dataArray.length;
				return average;
			}
			// 每100毫秒获取一次音频级别
			let interval = setInterval(() => {
				const audioLevel = getAudioLevel();
				// console.log('audioLevel audioLevel', audioLevel);
				cb(audioLevel);
			}, 100);
			this.audioContextList.set(videoId, audioContext);
			this.audioContextIntervalList.set(videoId, interval);
			onSuccess('ok');
		} catch (error) {
			console.error(`getAudioLevel videoId: ${videoId} catch error,${error},time:${new Date().toISOString()}`);
			onFailure(error);
		}
	}

	/**
	 * 用于一般静音某个音视频流后，暂停音频上下文中的时间进程，暂停音频硬件访问并减少进程中的 CPU/电池使用。ps:请结合getAudioLevel使用
	 * @param {string} videoId audio&video标签
	 * @param {function} onSuccess 请求成功
	 * @param {function} onFailure {code,message} '500':'videoId不存在元素','501':'未监听该videoId的音量大小，未执行getAudioLevel'，‘502’:'操作失败'
	 */
	suspendGetAudioLevel(videoId, onSuccess, onFailure) {
		let videoElement = document.getElementById(videoId);
		if (!videoElement) {
			console.error(`${videoId} dom not exist`);
			onFailure('500', `${videoId} dom not exist`);
			return;
		}
		let audioContext = this.audioContextList.get(videoId);
		if (!audioContext) {
			onFailure('501', `${videoId} not exist audioContext,not getAudioLevel`);
			return;
		}
		audioContext
			.suspend()
			.then(s => {
				console.log(`audioContext:${audioContext} suspend success with videoId:${videoId},result:${s}`);
				onSuccess(s);
			})
			.catch(e => {
				console.log(`audioContext:${audioContext} suspend error with videoId:${videoId},errorInfo:${e}`);
				onFailure('502', e);
			});
	}

	/**
	 * 用于一般恢复某个音视频流声音《取消静音》后，恢复之前被暂停的音频上下文中的时间进程。ps:请结合getAudioLevel使用
	 * @param {string} videoId audio&video标签
	 * @param {function} onSuccess 请求成功
	 * @param {function} onFailure {code,message} '500':'videoId不存在元素','501':'未监听该videoId的音量大小，未执行getAudioLevel'，‘502’:'操作失败'
	 */
	resumeGetAudioLevel(videoId, onSuccess, onFailure) {
		let videoElement = document.getElementById(videoId);
		if (!videoElement) {
			console.error(`${videoId} dom not exist`);
			onFailure('500', `${videoId} dom not exist`);
			return;
		}
		let audioContext = this.audioContextList.get(videoId);
		if (!audioContext) {
			onFailure('501', `${videoId} not exist audioContext,not getAudioLevel`);
			return;
		}
		audioContext
			.resume()
			.then(s => {
				console.log(`audioContext:${audioContext} resume success with videoId:${videoId},result:${s}`);
				onSuccess(s);
			})
			.catch(e => {
				console.log(`audioContext:${audioContext} resume error with videoId:${videoId},errorInfo:${e}`);
				onFailure('502', e);
			});
	}

	/**
	 * 返回video标签首帧画面的状态，如黑屏，白屏，正常等,一般用于设置video.srcObject后去判断视频画面状态
	 * @param {string} videoId video标签id
	 * @param {function} cb {code=[404|200|405|406],status} 404:videoId无效，200:正常，405:黑屏，406:白屏
	 */
	getVideoFirstFrameStatus(videoId, cb) {
		const videoElement = document.getElementById(videoId);
		if (!videoElement) {
			cb({ code: 404, status: 'videoId is invalid' });
			return;
		}
		videoElement.load();
		const playPromise = videoElement.play();
		if (playPromise !== undefined) {
			playPromise
				.then(s => {
					console.log(`getVideoFirstFrameStatus videoId:${videoId} playPromise result:${s}`);
				})
				.catch(error => {
					console.log(`getVideoFirstFrameStatus videoId:${videoId} playPromise error:${error}`);
				});
		}
		// 已加载元数据。一般触发这个则表明视频画面正常
		videoElement.onloadedmetadata = () => {
			console.log(`getVideoFirstFrameStatus videoId:${videoId} onloadedmetadata,time:${new Date().toISOString()}`);
			cb({ code: 200, status: 'ok' });
			return;
		};
		(videoElement.onplay = () => {
			console.log(`getVideoFirstFrameStatus videoId:${videoId} onplay,time:${new Date().toISOString()}`);
			const canvas = document.createElement('canvas');
			canvas.width = videoElement.width;
			canvas.height = videoElement.height;
			const ctx = canvas.getContext('2d');
			setTimeout(() => {
				ctx.drawImage(customerVideo, 0, 0, canvas.width, canvas.height);
				const imageDate = ctx.getImageData(0, 0, canvas.width, canvas.height);
				const arr = imageDate.data;
				let isWhiteScreen = false; // 白屏
				let isBlackScreen = false; // 黑屏
				for (let a = 0; a < arr.length; a++) {
					const item = arr[a];
					if (item > 0 && item < 200) {
						isBlackScreen = false;
						isWhiteScreen = false;
						canvas = null;
						cb({ code: 200, status: 'imageDate is valid' });
						return;
					} else if (item === 0) {
						isWhiteScreen = true;
						isBlackScreen = false;
					} else if (item === 255) {
						isBlackScreen = true;
						isWhiteScreen = false;
					}
				}
				if (isBlackScreen) {
					canvas = null;
					cb({ code: 405, status: 'black screen' });
				}
				if (isWhiteScreen) {
					canvas = null;
					cb({ code: 406, status: 'white screen' });
				}
			}, 100);
		}),
			(videoElement.oncanplay = () => {
				console.log(`getVideoFirstFrameStatus videoId:${videoId} oncanplay,time:${new Date().toISOString()}`);
			}),
			(videoElement.onloadeddata = () => {
				console.log(`getVideoFirstFrameStatus videoId:${videoId} onloadeddata,time:${new Date().toISOString()}`);
			}),
			(videoElement.onabort = () => {
				console.log(`getVideoFirstFrameStatus videoId:${videoId} onabort,time:${new Date().toISOString()}`);
			});
	}

	/**
	 * video渲染后设置开画时间，是否有画面指标
	 * @param {boolean} hasFrame 是否有画面 参数丢弃，走下面的更新接口更新
	 * @param {number} openingTime 开画时间 单位毫秒 参数丢弃，走下面的更新接口更新
	 * @param {boolean} isVideo 是否是视频流
	 * @param {string} subscribeId 订阅id
	 * @param {string} fromInfo 被订阅流的信息 {fromClientId,fromClientName,fromClientRole}
	 */
	setSessionStreamVideoQuality(hasFrame = undefined, openingTime = undefined, isVideo = undefined, subscribeId, fromInfo) {
		let sessionId = this.sessionId;
		let url = this.backendUrl;
		let data = {
			sessionId: sessionId,
			subscribeId: subscribeId,
			fromClientId: fromInfo.fromClientId,
			fromClientName: fromInfo.fromClientName,
			fromClientRole: fromInfo.fromClientRole,
			toClientId: this.clientInfo.clientId,
			toClientName: this.clientInfo.userName,
			toClientRole: this.clientInfo.role,
			hasFrame: hasFrame,
			openingTime: openingTime,
			isVideo: isVideo,
		};
		return new Promise((resolve, reject) => {
			setSessionStreamQuality(url, data, sessionId, this.accessToken)
				.then(s => {
					resolve(s);
				})
				.catch(e => {
					reject(e);
				});
		});
	}

	/**
	 * 录制中断，上报中断结果
	 * @param {string} recordId 系统录制的录像id
	 * @returns
	 */
	updateRecordCrashEvent(recordId) {
		let url = this.backendUrl;
		return new Promise((resolve, reject) => {
			updateRecordStatus(url, recordId, this.accessToken)
				.then(s => {
					resolve(s);
				})
				.catch(e => {
					reject(e);
				});
		});
	}

	/**
	 * 设置订阅的卡顿，平均延时
	 * @param {number} averageSoundDelay 平均声音延迟 （单位：毫秒）
	 * @param {number} averageFrameDelay 平均画面延迟 （单位：毫秒）
	 * @param {string} screenStuckRatio 卡顿率（百分值）
	 * @param {string} subscribeId 订阅id
	 * @param {string} setSessionId sessionId 设置数据的sesionId
	 */
	setSessionStreamCallQuality(averageSoundDelay, averageFrameDelay, screenStuckRatio, subscribeId, setSessionId) {
		let sessionId = setSessionId ? setSessionId : this.sessionId;
		let url = this.backendUrl;
		let data = {
			sessionId: sessionId,
			averageSoundDelay: averageSoundDelay,
			averageFrameDelay: averageFrameDelay,
			screenStuckRatio: screenStuckRatio,
		};
		return new Promise((resolve, reject) => {
			setSesssionCallQuality(url, data, subscribeId, sessionId, this.accessToken)
				.then(s => {
					resolve(s);
				})
				.catch(e => {
					reject(e);
				});
		});
	}

	/**
	 * 设置订阅的开画时间和是否有画面
	 * @param {boolean} hasFrame 是否有画面
	 * @param {number} openingTime 开画时间 单位毫秒
	 * @param {string} subscribeId 订阅id
	 */
	setVideoStreamCallQuality(hasFrame, openingTime, subscribeId) {
		let sessionId = this.sessionId;
		let url = this.backendUrl;
		let data = {
			sessionId: sessionId,
			hasFrame: hasFrame,
			openingTime: openingTime,
		};
		return new Promise((resolve, reject) => {
			setSesssionCallQuality(url, data, subscribeId, sessionId, this.accessToken)
				.then(s => {
					resolve(s);
				})
				.catch(e => {
					reject(e);
				});
		});
	}

	/**
	 * 获取一个video标签的开画时间
	 * @param {string} videoId
	 */
	getVideoOpenTime(videoId, cb) {
		const video = document.getElementById(videoId);
		video.play();
		const startTimestamp = Date.now();
		video.addEventListener('loadedmetadata', () => {
			if (video.videoHeight > 0 && video.videoWidth > 0) {
				const onCanplayTimestamp = Date.now();
				console.log(`onloadedmetadata to getVideoOpenTime: ${onCanplayTimestamp - startTimestamp} ms`);
				cb(onCanplayTimestamp - startTimestamp);
			}
		});
	}

	/**
	 * 设置订阅的是否禁止订阅or恢复订阅
	 * @param {boolean} isMute 停止or恢复
	 * @param {string} trackKind  video/audio
	 */
	setSubscriptionMute(isMute = true, trackKind = 'video') {
		this.subscriptionArr.forEach(subscription => {
			if (subscription && isMute) {
				subscription.mute(trackKind).then(
					() => {
						console.log(`${subscription.id} mute ${trackKind} success, ${new Date().toISOString()}`);
					},
					err => {
						console.log(`${subscription.id} mute ${trackKind} failed, ${new Date().toISOString()}`, err);
					}
				);
			} else if (subscription && !isMute) {
				subscription.unmute(trackKind).then(
					() => {
						console.log(`${subscription.id} unmute ${trackKind} success, ${new Date().toISOString()}`);
					},
					err => {
						console.log(`${subscription.id} unmute ${trackKind} failed, ${new Date().toISOString()}`, err);
					}
				);
			}
		});
	}

	switchCamera(publication, stream, onSuccess, onFailure) {
		publication
			.updateMediaTrack(stream, IRtc.Base.TrackKind.AUDIO_AND_VIDEO)
			.then(resp => {
				Logger.info('ivcs-js-sdk: switchCamera updateMediaTrack success', resp, new Date().toISOString());
				onSuccess(stream);
			})
			.catch(error => {
				Logger.error('ivcs-js-sdk: switchCamera updateMediaTrack error', error, new Date().toISOString());
				let err = new IvcsError('1023', error.message || error.name || error);
				onFailure(err);
			});
	}
}

export function stopStream(stream) {
	if (!stream) {
		return;
	}
	if (stream.mediaStream) {
		stream.mediaStream.getTracks().forEach(track => {
			track.stop();
		});
	} else {
		stream.getTracks().forEach(track => {
			track.stop();
		});
	}
}

export function stopPublish(publication) {
	if (!publication) {
		return;
	}
	publication.stop();
}

export async function joinRoom(irtcClient, roomId, roomToken) {
	return await new Promise((resolve, reject) => {
		irtcClient
			.joinRoom(roomToken)
			.then(roomInfo => {
				Logger.info(`ivcs-js-sdk:join roomId,${roomId} success,${JSON.stringify(roomInfo)},at:${new Date().toISOString()}`);
				resolve(roomInfo);
			})
			.catch(error => {
				Logger.error(`ivcs-js-sdk:join roomId ${roomId} failed,${error},at:${new Date().toISOString()}`);
				let response = new IvcsError(ivcsErrorCodeEnum.ERROR_ZMS_JOINROOM_FAILED, `join roomId,${roomId} failed,${error}`);
				reject(response);
			});
	});
}

/**
 * 创建音视频流
 * @param {object} irtcClient
 * @param {object} streamOptions 流配置
 * @param {string} videoLabel 流标签
 * @param {boolean} haveAudio 是否包含语音
 * @param {boolean} haveVideo 是否包含视频
 * @param {object} streamInfo 流的相关信息，包含participateId,clientId,audioOnly
 * @param {object} userInfo 流的用户信息，包含from和to。来源信息和发送给对方的信息
 * @returns Promise
 */
export async function createCameraMicStream(irtcClient, streamOptions, videoLabel, haveAudio, haveVideo, streamInfo, userInfo = {}) {
	return await new Promise((resolve, reject) => {
		irtcClient
			.createCameraMicStream(streamOptions.audioConstraints, streamOptions.videoConstraints, haveAudio, haveVideo, {
				type: videoLabel,
				owner: streamInfo && streamInfo.participantId ? streamInfo.participantId : '',
				clientId: streamInfo && streamInfo.clientId ? streamInfo.clientId : '',
				audioOnly: streamInfo && streamInfo.audioOnly ? '1' : '0',
				userInfo,
			})
			.then(stream => {
				Logger.info(`ivcs-js-sdk:create Camera Mic stream success,${JSON.stringify(streamOptions)},streamId:${stream.id},${new Date().toISOString()}`);
				resolve(stream);
			})
			.catch(error => {
				Logger.info(`ivcs-js-sdk:createCameraMicStream failed,${JSON.stringify(error)},${new Date().toISOString()},${error.message},${error.code}`);
				let response = new IvcsError(error.code, `createCameraMicStream failed,error,${JSON.stringify(error)},time:${new Date().toISOString()}`);
				reject(response);
			});
	});
}

/**
 * 创建屏幕共享流
 * @param {object} irtcClient
 * @param {object} streamOptions 流配置
 * @param {string} videoLabel 流标签
 * @param {object} userInfo 创流的用户信息，如{from,to} 表明屏幕共享的来源和发送给的对方是
 * @returns Promise
 */
export async function createScreenStream(irtcClient, streamOptions, videoLabel, userInfo = {}) {
	return await new Promise((resolve, reject) => {
		irtcClient
			.createScreenStream(streamOptions.audioConstraints, streamOptions.videoConstraints, {
				type: videoLabel,
				userInfo,
			})
			.then(stream => {
				Logger.info(`ivcs-js-sdk:create screen stream success,streamOptions,${streamOptions},videoLabel:${videoLabel},streamId:${stream.id},${new Date().toISOString()}`);
				resolve(stream);
			})
			.catch(error => {
				Logger.info(`ivcs-js-sdk:create screen stream failed,${JSON.stringify(error)},${new Date().toISOString()}`);
				let response = new IvcsError(error.code, `create screen stream failed,${error.message || error}`);
				reject(response);
			});
	});
}

export async function publishStream(irtcClient, stream, publishOptions) {
	return await new Promise((resolve, reject) => {
		irtcClient
			.publishStream(stream, publishOptions)
			.then(publication => {
				Logger.info(`ivcs-js-sdk:publish stream ${stream.id} success,publicationId:${publication.id},publishOptions:${JSON.stringify(publishOptions)},${new Date().toISOString()}`);
				resolve(publication);
			})
			.catch(error => {
				stopStream(stream);
				Logger.info(`ivcs-js-sdk:publish stream failed,${stream.id},${error},${new Date().toISOString()}`);
				let response = new IvcsError(ivcsErrorCodeEnum.ERROR_ZMS_PUBLISH_FAILED, `publish stream,${stream.id},failed,${error}`);
				reject(response);
			});
	});
}

export async function republishStream(publication, irtcClient, stream, publishOptions) {
	// eslint-disable-next-line no-async-promise-executor
	return new Promise(async (resolve, reject) => {
		try {
			publication && publication.stop();
			let newPublication = await publishStream(irtcClient, stream, publishOptions);
			console.log('ivcs-js-sdk:newPublication:', newPublication, stream);
			resolve(newPublication);
		} catch (error) {
			Logger.error(`ivcs-js-sdk:republish  stream ${stream.id} error,${new Date().toISOString()}`);
			let response = new IvcsError(ivcsErrorCodeEnum.ERROR_ZMS_PUBLISH_FAILED, `republish stream,${stream.id},failed,${error}`);
			reject(response);
		}
	});
}

export async function subscribeStream(irtcClient, stream) {
	return await new Promise((resolve, reject) => {
		irtcClient
			.subscribeStream(stream)
			.then(subscription => {
				Logger.info(`ivcs-js-sdk:subscribe stream,${stream.id} success,${new Date().toISOString()}`);
				resolve(subscription);
			})
			.catch(function (error) {
				Logger.info(`ivcs-js-sdk:subscribe stream ${stream.id},error ,${error},${new Date().toISOString()}`);
				let response = new IvcsError(ivcsErrorCodeEnum.ERROR_ZMS_SUBSCRIBE_FAILED, `subscribe stream ${stream.id},error ,${error}`);
				reject(response);
			});
	});
}

/**
 * 根据videoId 渲染视频流
 * @param {MediaStream} stream
 * @param {string} videoId <video> video标签id
 * @param {boolean} isMe 是否是自己的流。默认不是，自己的流则默认设置静音，防止啸叫发生
 * @param {function} playError 3秒自动播放失败
 * @param {function} onEnded 视频流结束了
 * @param {function} loadVideo 视频流画面渲染出来，返回开画时间，如果无回调则认为是无画面。
 * @returns Promise
 */
export async function renderStreamWithVideoLabel(stream, videoId, isMe = false, playError = () => {}, onEnded, loadVideo) {
	console.log(`ivcs-js-sdk-log:start renderStreamWithVideoLabel with streamId: ${stream.id}, videoId:${videoId}`);
	return await new Promise((resolve, reject) => {
		let mediaDom = document.getElementById(videoId);
		if (mediaDom) {
			mediaDom.srcObject = stream.mediaStream ? stream.mediaStream : stream;
			if (isMe) {
				mediaDom.setAttribute('muted', 'true');
			} else {
				mediaDom.setAttribute('muted', 'false');
			}
			//listen stream is end
			if (stream && stream.mediaStream && stream.mediaStream.getVideoTracks()) {
				stream.mediaStream.getVideoTracks()[0]
					? (stream.mediaStream.getVideoTracks()[0].onended = () => {
							onEnded(videoId);
					  })
					: undefined;
			} else if (stream && stream.getVideoTracks()) {
				stream.getVideoTracks()[0]
					? (stream.getVideoTracks()[0].onended = () => {
							onEnded(videoId);
					  })
					: undefined;
			}
			try {
				mediaDom.play(); //修复多次render同一个video，无法触发onplay事件问题
			} catch (error) {
				reject(new IvcsError(ivcsErrorCodeEnum.ERROR_ZMS_VIDEOPLAY_FAILED, error.name || error.message || error));
			}
			//设置3秒定时任务，判断video is onPlay
			let isPlay = false;
			let isLoadMetadata = false;
			const waitTime = 5000;
			let taskInterval = setTimeout(() => {
				!isPlay && !mediaDom.isPlay && playError(new IvcsError(ivcsErrorCodeEnum.ERROR_ZMS_VIDEOPLAY_FAILED, `video id ${videoId} is not play in 3 seconds`));
			}, waitTime);
			// 播放已开始。
			mediaDom.onplay = function () {
				Logger.info(`ivcs-js-sdk:renderStreamWithVideoLabel video id ${videoId} is onplay,time:${new Date().toISOString()}`);
				isPlay = true;
				mediaDom.isPlay = true;
				clearTimeout(taskInterval);
				taskInterval = null;
			};
			// 已加载元数据。
			const startTimestamp = Date.now(); // 计算开画时间
			mediaDom.onloadedmetadata = e => {
				Logger.info(`ivcs-js-sdk:renderStreamWithVideoLabel video id ${videoId} is onloadmetadata,videoWidth:${mediaDom.videoWidth},videoHeight:${mediaDom.videoHeight},time:${new Date().toISOString()}`);
				isPlay = true;
				mediaDom.isPlay = true;
				isLoadMetadata = true;
				clearTimeout(taskInterval);
				taskInterval = null;
				if (mediaDom.videoWidth > 0 && mediaDom.videoHeight > 0) {
					const onCanplayTimestamp = Date.now();
					const waitTime = onCanplayTimestamp - startTimestamp;
					Logger.info(`ivcs-js-sdk:renderStreamWithVideoLabel video id ${videoId} is onloadmetadata,${waitTime} ms`);
					loadVideo && loadVideo(waitTime);
					loadVideo = null; // 防止无限触发
				}
				let startPlayPromise = mediaDom.play();
				if (startPlayPromise !== undefined) {
					startPlayPromise
						.catch(error => {
							Logger.error(`ivcs-js-sdk:${videoId} play error,${error}`);
							playError(new IvcsError(ivcsErrorCodeEnum.ERROR_ZMS_VIDEOPLAY_FAILED, error.name || error.message || error));
							// if (error.name === "NotAllowedError") {
							// } else {
							//     // Handle a load or playback error
							// }
						})
						.then(() => {
							// Start whatever you need to do only after playback
						});
				}
			};
			// 当在元素加载期间发生错误时运行脚本
			mediaDom.onerror = function (e) {
				Logger.info(`ivcs-js-sdk:renderStreamWithVideoLabel video id ${videoId} is onerror,time:${new Date().toISOString()}`);
				mediaDom.isPlay = false;
			};
			// 媒体内容变为空；例如，当这个 media 已经加载完成（或者部分加载完成），则发送此事件，并调用 load() 方法重新加载它。
			mediaDom.onemptied = function (e) {
				Logger.info(`ivcs-js-sdk:renderStreamWithVideoLabel video id ${videoId} is onemptied,time:${new Date().toISOString()}`);
			};
			// 当媒介数据已开始播放时运行脚本; 由于缺乏数据而暂停或延迟后，播放准备开始。
			mediaDom.onplaying = function (e) {
				Logger.info(`ivcs-js-sdk:renderStreamWithVideoLabel video id ${videoId} is onplaying,time:${new Date().toISOString()}`);
			};
			// media 中的首帧已经完成加载。
			mediaDom.onloadeddata = function (e) {
				resolve();
				Logger.info(`ivcs-js-sdk:renderStreamWithVideoLabel video id ${videoId} is onloadeddata,time:${new Date().toISOString()}`);
			};
			// 音量发生变化。当媒介改变音量亦或当音量被设置为静音时运行脚本
			mediaDom.onvolumechange = function (e) {
				Logger.info(`ivcs-js-sdk:renderStreamWithVideoLabel video id ${videoId} is onvolumechange,time:${new Date().toISOString()}`);
			};
			// 由于暂时缺少数据，播放已停止。当媒介已停止播放但打算继续播放时运行脚本
			mediaDom.onwaiting = e => {
				Logger.info(`ivcs-js-sdk:renderStreamWithVideoLabel video id ${videoId} is onwaiting,time:${new Date().toISOString()}`);
				setTimeout(() => {
					if (!isLoadMetadata) {
						Logger.info(`ivcs-js-sdk:renderStreamWithVideoLabel video id ${videoId} is onwaiting, not LoadMetadata`);
						renderStreamWithVideoLabel(stream, videoId, isMe, playError, onEnded, loadVideo);
					}
				}, 3000);
			};
			// 媒体数据加载已暂停。当浏览器已在取媒介数据但在取回整个媒介文件之前停止时运行脚本
			mediaDom.onsuspend = function (e) {
				Logger.info(`ivcs-js-sdk:renderStreamWithVideoLabel video id ${videoId} is onsuspend,time:${new Date().toISOString()}`);
			};
			// 视频停止播放，因为 media 已经到达结束点。
			mediaDom.onended = function (e) {
				Logger.info(`ivcs-js-sdk:renderStreamWithVideoLabel video id ${videoId} is onended,time:${new Date().toISOString()}`);
				mediaDom.isPlay = false;
			};
			// 当媒介数据的播放速率改变时运行脚本;播放速率发生变化。
			mediaDom.onratechange = function (e) {
				Logger.info(`ivcs-js-sdk:renderStreamWithVideoLabel video id ${videoId} is onratechange,time:${new Date().toISOString()}`);
			};
			// 当媒介能够无需因缓冲而停止即可播放至结尾时运行脚本;浏览器估计它可以在不停止内容缓冲的情况下播放媒体直到结束。
			mediaDom.oncanplaythrough = function (e) {
				Logger.info(`ivcs-js-sdk:renderStreamWithVideoLabel video id ${videoId} is oncanplaythrough,time:${new Date().toISOString()}`);
			};
			// 当媒介能够开始播放但可能因缓冲而需要停止时运行脚本;浏览器可以播放媒体文件了，但估计没有足够的数据来支撑播放到结束，不必停下来进一步缓冲内容。
			mediaDom.oncanplay = function (e) {
				Logger.info(`ivcs-js-sdk:renderStreamWithVideoLabel video id ${videoId} is oncanplay,time:${new Date().toISOString()}`);
			};
			// 当浏览器开始加载媒介数据时运行脚本
			mediaDom.onloadstart = function (e) {
				Logger.info(`ivcs-js-sdk:renderStreamWithVideoLabel video id ${videoId} is onloadstart,time:${new Date().toISOString()}`);
			};
			// 当取回媒介数据过程中（延迟）存在错误时运行脚本; 用户代理（user agent）正在尝试获取媒体数据，但数据意外未出现。
			mediaDom.onstalled = function (e) {
				Logger.info(`ivcs-js-sdk:renderStreamWithVideoLabel video id ${videoId} is onstalled,time:${new Date().toISOString()}`);
			};
			// 当媒介元素的定位属性 [1] 不再为真且定位已结束时运行脚本;*跳帧（seek）*操作完成。
			mediaDom.onseeked = function (e) {
				Logger.info(`ivcs-js-sdk:renderStreamWithVideoLabel video id ${videoId} is onseeked,time:${new Date().toISOString()}`);
			};
			// 播放已暂停。
			mediaDom.onpause = function (e) {
				Logger.info(`ivcs-js-sdk:renderStreamWithVideoLabel video id ${videoId} is onpause,time:${new Date().toISOString()}`);
				// fix wechat切换前后台导致video pause 无法听到声音！
				mediaDom.play();
			};
		} else {
			Logger.info(`ivcs-js-sdk:renderStreamWithVideoLabel video id ${videoId} dom not exist,time:${new Date().toISOString()}`);
			let error = new IvcsError(ivcsErrorCodeEnum.ERROR_ZMS_RENDERVIDEO_NOTEXIST, `dom not exist ,${videoId}`);
			reject(error);
		}
	});
}

export async function leaveRoom(irtcClient) {
	return await new Promise((resolve, reject) => {
		if (!irtcClient) {
			reject(new IvcsError(ivcsErrorCodeEnum.ERROR_ZMS_IRTC_UNDEFINED, 'irtc Client is null'));
			return;
		}
		irtcClient
			.leaveRoom()
			.then(resp => {
				resolve(resp);
			})
			.catch(error => {
				reject(new IvcsError(ivcsErrorCodeEnum.ERROR_ZMS_JOINROOM_FAILED, error));
			});
	});
}

function queryPublication(publicationArr, publicationId) {
	let filterResult = publicationArr.filter(item => item.id === publicationId);
	return filterResult.length > 0 ? filterResult[0] : null;
}

const checkTypeParameter = type => type === 'audio' || type === 'video';

const checkPublicationExist = (publicationArr, publicationId) => queryPublication(publicationArr, publicationId) != null;

export async function mute(publicationId, type, irtcClient) {
	return await new Promise((resolve, reject) => {
		if (!checkTypeParameter(type)) {
			reject(`type is invalid,only support 'audio' & 'video'`);
			return;
		}
		if (!checkPublicationExist(irtcClient.publicationArr, publicationId)) {
			reject('stream source is invalid,publication id is invalid');
			return;
		}
		queryPublication(irtcClient.publicationArr, publicationId)
			.mute(type)
			.then(() => {
				resolve();
			})
			.catch(error => {
				if (type === 'audio') {
					reject(new IvcsError(ivcsErrorCodeEnum.ERROR_ZMS_MUTE_AUDIO_FAILED, error));
				} else {
					reject(new IvcsError(ivcsErrorCodeEnum.ERROR_ZMS_MUTE_VIDEO_FAILED, error));
				}
			});
	});
}

export async function unmute(publicationId, type, irtcClient) {
	return await new Promise((resolve, reject) => {
		if (!checkTypeParameter(type)) {
			reject(`type is invalid,only support 'audio' & 'video'`);
			return;
		}
		if (!checkPublicationExist(irtcClient.publicationArr, publicationId)) {
			reject('stream source is invalid,publication id is invalid');
			return;
		}
		queryPublication(irtcClient.publicationArr, publicationId)
			.unmute(type)
			.then(() => {
				resolve();
			})
			.catch(error => {
				if (type === 'audio') {
					reject(new IvcsError(ivcsErrorCodeEnum.ERROR_ZMS_UNMUTE_AUDIO_FAILED, error));
				} else {
					reject(new IvcsError(ivcsErrorCodeEnum.ERROR_ZMS_UNMUTE_VIDEO_FAILED, error));
				}
			});
	});
}

export function isEffectiveMediaStreams(stream) {
	if (stream.mediaStream) {
		if (typeof stream.mediaStream.getTracks != 'function') {
			return false;
		}
		if (stream.mediaStream.getTracks() && stream.mediaStream.getTracks().length == 0) {
			return false;
		}
	} else {
		if (typeof stream.getTracks != 'function') {
			return false;
		}
		if (stream.getTracks() && stream.getTracks().length == 0) {
			return false;
		}
	}
	return true;
}

/**
 * 重新渲染一次远端流，用于修复iphone version17+ 浏览器无法最大化显示
 * @param {irtcClient} irtcClient
 * @param {string} streamLabel 流标签
 * @param {string} videoId video标签 id
 * @param {function} onSuccess  成功
 * @param {function} onFailure 失败
 * @param {function} playError 自动播放失败
 * @param {function} onEnded 流已结束，对方的流需要重新render/直接的流可以切换摄像头
 */
export function renderStreamAgain(irtcClient, streamLabel, videoId, onSuccess, onFailure, playError, onEnded) {
	for (let i = 0; i < irtcClient.remoteStreams.length; i++) {
		let stream = irtcClient.remoteStreams[i];
		const streamInfo = irtcClient.getStreamInfo(stream);
		if (streamInfo.attributes && streamInfo.attributes.type === streamLabel) {
			try {
				let isMe = false;
				renderStreamWithVideoLabel(stream, videoId, isMe, playError, onEnded, () => {});
				onSuccess(streamInfo);
			} catch (error) {
				onFailure(error);
			}
			return;
		}
	}
}

/**
 * 订阅远端流，并统计会话质量，开画时间 & 是否有画面
 * @param {irtcClient} irtcClient
 * @param {string} streamLabel 流标签
 * @param {string} videoId video标签 id
 * @param {function} onSuccess  成功
 * @param {function} onFailure 失败
 * @param {function} playError 自动播放失败
 * @param {function} onEnded 流已结束，对方的流需要重新render/直接的流可以切换摄像头
 * @param {function} subscribeSuccess 订阅成功。返回stream，应用层可直接渲染, 默认为空。sdk内部会执行渲染
 */
export async function renderRemoteStream(irtcClient, streamLabel, videoId, onSuccess, onFailure, playError, onEnded, subscribeSuccess = undefined) {
	if (!irtcClient) {
		onFailure(new IvcsError(ivcsErrorCodeEnum.ERROR_ZMS_IRTC_UNDEFINED, 'irtc Client is null'));
		return;
	}
	let streamLableIsEffective = false;
	//render local stream
	for (let i = 0; i < irtcClient.createStreams.length; i++) {
		let stream = irtcClient.createStreams[i];
		if (stream.attributes && stream.attributes.type === streamLabel) {
			streamLableIsEffective = true;
			try {
				// eslint-disable-next-line no-unused-vars
				let renderResult = await renderStreamWithVideoLabel(stream, videoId, true, playError, onEnded);
				onSuccess();
			} catch (error) {
				onFailure(error);
			}
			return;
		}
	}
	//render remote stream
	for (let i = 0; i < irtcClient.remoteStreams.length; i++) {
		let stream = irtcClient.remoteStreams[i];
		const streamInfo = irtcClient.getStreamInfo(stream);
		//render stream with streamId
		if (streamLabel == stream.id) {
			streamLableIsEffective = true;
			try {
				let isMe = irtcClient.isMePublishStream(stream);
				let subscription = await subscribeStream(irtcClient, stream);
				subscribeSuccess && subscribeSuccess(stream);
				stream.videoId = videoId; // 绑定 remoteStream和videoId 关联关系
				stream.subscribeId = subscription.id; // 绑定stream 和 subscription 关联关系
				subscription.videoId = videoId; // 绑定 订阅和videoId 关联关系
				try {
					subscription.fromClientRole && (await irtcClient.setSessionStreamVideoQuality(undefined, undefined, !streamInfo.audioOnly, subscription.id, subscription.fromInfo));
				} catch (error) {
					console.log('pushSessionStreamVideoQuality error', error);
				}
				// eslint-disable-next-line no-unused-vars
				let renderResult = await renderStreamWithVideoLabel(stream, videoId, isMe, playError, onEnded, openingTime => {
					subscription.fromClientRole && !streamInfo.audioOnly && irtcClient.setVideoStreamCallQuality(true, openingTime, subscription.id);
				});
				onSuccess(subscription.id, subscription, streamInfo);
			} catch (e) {
				onFailure(e);
			}
			return;
		}
		//render stream with streamAttribute
		if (streamInfo.attributes && streamInfo.attributes.type === streamLabel) {
			streamLableIsEffective = true;
			try {
				let isMe = irtcClient.isMePublishStream(stream);
				let subscription = await subscribeStream(irtcClient, stream);
				subscribeSuccess && subscribeSuccess(stream);
				stream.videoId = videoId; // 绑定 remoteStream和videoId 关联关系
				stream.subscribeId = subscription.id; // 绑定stream 和 subscription 关联关系
				subscription.videoId = videoId; // 绑定 订阅和videoId 关联关系
				try {
					subscription.fromClientRole && (await irtcClient.setSessionStreamVideoQuality(undefined, undefined, !streamInfo.audioOnly, subscription.id, subscription.fromInfo));
				} catch (error) {
					console.log('pushSessionStreamVideoQuality error', error);
				}
				// eslint-disable-next-line no-unused-vars
				let renderResult = await renderStreamWithVideoLabel(stream, videoId, isMe, playError, onEnded, openingTime => {
					subscription.fromClientRole && !streamInfo.audioOnly && irtcClient.setVideoStreamCallQuality(true, openingTime, subscription.id);
				});
				onSuccess(subscription.id, subscription, streamInfo);
			} catch (error) {
				onFailure(error);
			}
			return;
		}
	}

	//render remote stream without label
	for (let i = 0; i < irtcClient.remoteStreams.length; i++) {
		let stream = irtcClient.remoteStreams[i];
		//render stream with mcu stream example: mini streamIn streams
		if (stream.origin === 'admin' && !stream.attributes) {
			try {
				let subscription = await subscribeStream(irtcClient, stream);
				subscribeSuccess && subscribeSuccess(stream);
				// eslint-disable-next-line no-unused-vars
				let renderResult = await renderStreamWithVideoLabel(stream, videoId, false, playError, onEnded);
				onSuccess(subscription.id, subscription);
			} catch (error) {
				onFailure(error);
			}
			return;
		}
	}
	if (!streamLableIsEffective) {
		onFailure(new IvcsError(ivcsErrorCodeEnum.ERROR_ZMS_STREAMLABEL_INVALID, 'streamLabel is invalid'));
	}
}

/**
 * conferenceClient添加监听
 * @param {*} irtcClient
 */
export function zmsAddEventListener(irtcClient) {
	irtcClient.remoteStreams.forEach(stream => {
		if (stream.id !== irtcClient.mixStreamId) {
			let result = irtcClient.getStreamInfo(stream);
			dispatchEvent(IRTC_CUSTOMEREVENT.STREAMADD, null, {
				id: result.id,
				streamLabel: result.streamLabel,
				audioOnly: result.audioOnly,
				userInfo: result.userInfo,
				isAudioMute: result.isAudioMute,
				isVideoMute: result.isVideoMute,
			});
			dispatchEvent('onStreamAdd', null, {
				id: result.id,
				streamLabel: result.streamLabel,
				audioOnly: result.audioOnly,
				userInfo: result.userInfo,
				isAudioMute: result.isAudioMute,
				isVideoMute: result.isVideoMute,
			});
		}
	});
	irtcClient.streamAddEvent = addEventListener(IRTC_CUSTOMEREVENT.STREAMADD, info => {
		let streamInfo = info.data;
		dispatchEvent('onStreamAdd', null, {
			id: streamInfo.id,
			streamLabel: streamInfo.streamLabel,
			audioOnly: streamInfo.audioOnly,
			userInfo: streamInfo.userInfo,
			isAudioMute: streamInfo.isAudioMute,
			isVideoMute: streamInfo.isVideoMute,
		});
		Logger.info(`ivcs-js-sdk:stream add ${JSON.stringify(info)},time:${new Date().toISOString()}`);
	});
	irtcClient.streamEndEvent = addEventListener(IRTC_CUSTOMEREVENT.STREAMENDED, info => {
		let endInfo = info.data;
		let streamLabel = endInfo.streamInfo.streamLabel;
		dispatchEvent('onStreamEnd', null, {
			streamLabel: streamLabel,
			streamInfo: endInfo.streamInfo,
			streamId: endInfo.streamId,
		});
		Logger.info(`ivcs-js-sdk:stream end ${JSON.stringify(info)},time:${new Date().toISOString()}`);
	});
	// 流发生变化，语音,视频发生变化
	irtcClient.streamUpdateEvent = addEventListener(IRTC_CUSTOMEREVENT.STREAMUPDATED, info => {
		let data = info.data;
		// id:string{streamId},streamLabel: string(streamLabel),userInfo: object(userInfo),info:[{field:'media.video.status',value:boolean}]
		dispatchEvent('onStreamUpdated', null, data);
	});
	// eslint-disable-next-line no-unused-vars
	irtcClient.serverDisConnectEvent = addEventListener(IRTC_CUSTOMEREVENT.SERVERDISCONNECTED, info => {
		dispatchEvent(systemErrorName, systemErrorCode.zmsServerDisconnected, 'mcu server disconnected');
	});
	irtcClient.subscribeEndEvent = addEventListener(IRTC_CUSTOMEREVENT.SUBSCRIPTIONEND, info => {
		dispatchEvent(systemErrorName, systemErrorCode.subscribeStreamEnd, info.data);
	});
}

/**
 * publication 增加end事件
 * @param {*} irtcClient
 * @param {*} publication
 */
export function publicationAddEventListener(irtcClient, publication) {
	irtcClient.publishEndEvent = addEventListener(IRTC_CUSTOMEREVENT.PUBLICATIONEND, info => {
		let publicationId = info.data.publicationId;
		// todo distinguish localStream or screen-share
		if (publicationId === publication.id) {
			dispatchEvent(systemErrorName, systemErrorCode.localStreamEnd, info.data);
		}
	});
}

export function zmsRemoveEventListener(irtcClient) {
	if (irtcClient) {
		removeEventListener(IRTC_CUSTOMEREVENT.SERVERDISCONNECTED, irtcClient.serverDisConnectEvent);
		removeEventListener(IRTC_CUSTOMEREVENT.PUBLICATIONEND, irtcClient.publishEndEvent);
		removeEventListener(IRTC_CUSTOMEREVENT.SUBSCRIPTIONEND, irtcClient.subscribeEndEvent);
		removeEventListener(IRTC_CUSTOMEREVENT.STREAMADD, irtcClient.streamAddEvent);
		removeEventListener(IRTC_CUSTOMEREVENT.STREAMENDED, irtcClient.streamEndEvent);
	}
}

/**
 * 媒体流是否有效
 * @param {object} mediaStream
 * @returns {boolean}
 */
export function medisStreamIsValid(stream) {
	if (!stream) {
		return false;
	} else {
		if (stream.mediaStream) {
			if (!stream.mediaStream.getVideoTracks()) {
				return false;
			}
		} else {
			if (!stream.getVideoTracks()) {
				return false;
			}
		}
	}
	return true;
}

/**
 * 停止本地流
 * @param {object} stream
 */
export function stopMediaStream(stream) {
	if (!stream) {
		return;
	}
	if (stream.mediaStream) {
		let oldVideoTracks = stream.mediaStream.getVideoTracks();
		let oldAudioTracks = stream.mediaStream.getAudioTracks();
		for (let track of oldVideoTracks) {
			track.stop();
		}
		for (let track of oldAudioTracks) {
			track.stop();
		}
	} else {
		let oldVideoTracks = stream.getVideoTracks();
		let oldAudioTracks = stream.getAudioTracks();
		for (let track of oldVideoTracks) {
			track.stop();
		}
		for (let track of oldAudioTracks) {
			track.stop();
		}
	}
}

/**
 * 替换视频画面
 * @param {object} oldMediaStream 被替换(原先)的流
 * @param {object} newMediaStream 替换的流
 * @param {object} publication  推流的对象
 * @param {function} onSuccess  替换成功回调
 * @param {function} onFailure  替换失败回调
 * @param {boolean} isStopStream 是否需要释放oldMediaStream
 */
export function replaceMediaStream(oldMediaStream, newMediaStream, publication, onSuccess, onFailure, isStopStream = true) {
	if (!publication || !publication.getPCSenders) {
		let error = new IvcsError('1000', 'publication is invalid');
		onFailure(error);
		return;
	}
	if (!medisStreamIsValid(oldMediaStream)) {
		let error = new IvcsError('1000', 'oldMediaStream is invalid');
		onFailure(error);
		return;
	}
	if (!medisStreamIsValid(newMediaStream)) {
		let error = new IvcsError('1000', 'newMediaStream is invalid');
		onFailure(error);
		return;
	}
	isStopStream && stopMediaStream(oldMediaStream);
	let newVideoTrack = newMediaStream.mediaStream ? newMediaStream.mediaStream.getVideoTracks()[0] : newMediaStream.getVideoTracks()[0];
	let newAudioTrack = newMediaStream.mediaStream ? newMediaStream.mediaStream.getAudioTracks()[0] : newMediaStream.getAudioTracks()[0];
	publication.getPCSenders() &&
		publication.getPCSenders().forEach(sender => {
			if (sender.track && sender.track.kind == 'video') {
				let videoTrackSender = sender;
				videoTrackSender.replaceTrack(newVideoTrack);
			}
			if (sender.track && sender.track.kind == 'audio') {
				let audioTrackSender = sender;
				audioTrackSender.replaceTrack(newAudioTrack);
			}
		});
	// publication 没内容后，则切换摄像头会失败，则停止newMediaStream
	!publication.getPCSenders() && stopMediaStream(newMediaStream);
	onSuccess('ok');
}

/**
 * 开启本地录制根据流标签
 * @param {IRtcClient} irtcClient
 * @param {string} streamLabel
 * @param {function} onSuccess
 * @param {function} onFailure
 * @returns {IRtc.Base.LocalRecorder} localRecord,可用于停止和下载的调用方
 */
export function startLocalRecordWithStreamLabel(irtcClient, streamLabel, onSuccess, onFailure) {
	if (!irtcClient) {
		onFailure('irtcClient is null');
		return;
	}
	let remoteStream = null;
	for (const stream of irtcClient.createStreams) {
		const streamInfo = irtcClient.getStreamInfo(stream);
		let label = streamInfo.attributes && streamInfo.attributes.type ? streamInfo.attributes.type : stream.id;
		if (label == streamLabel) {
			remoteStream = stream;
		}
	}
	if (!remoteStream) {
		for (const stream of irtcClient.remoteStreams) {
			const streamInfo = irtcClient.getStreamInfo(stream);
			let label = streamInfo.attributes && streamInfo.attributes.type ? streamInfo.attributes.type : stream.id;
			if (label == streamLabel) {
				remoteStream = stream;
			}
		}
	}
	if (!remoteStream) {
		onFailure('streamLabel is error');
		return;
	}
	let recordOption = 'video/webm;codecs=h264,opus';
	let localRecord = irtcClient.startLocalRecord(remoteStream, recordOption, onSuccess);
	localRecord.streamLabel = streamLabel;
	irtcClient.localRecordList.set(streamLabel, localRecord);
	return localRecord;
}

/**
 * 停止录制
 * @param {IRtcClient} irtcClient
 * @param {string} streamLabel
 * @param {function} onSuccess
 * @param {function} onFailure
 */
export function stopLcoalRecordWithStreamLabel(irtcClient, streamLabel, onSuccess, onFailure) {
	if (!irtcClient) {
		onFailure('irtcClient is null');
		return;
	}
	let localRecord = irtcClient.localRecordList.get(streamLabel);
	if (!localRecord) {
		onFailure('streamLabel is invalid');
		return;
	}
	localRecord.addEventListener('onstop', () => {
		onSuccess();
	});
	localRecord.stop();
}

/**
 * 停止录制
 * @param {IRtcClient} irtcClient
 * @param {string} streamLabel
 * @param {string} fileName
 * @param {function} onSuccess
 * @param {function} onFailure
 */
export function downloadLcoalRecordWithStreamLabel(irtcClient, streamLabel, fileName, onSuccess, onFailure) {
	if (!irtcClient) {
		onFailure('irtcClient is null');
		return;
	}
	let localRecord = irtcClient.localRecordList.get(streamLabel);
	if (!localRecord) {
		onFailure('streamLabel is invalid');
		return;
	}
	irtcClient.downloadLocalRecord(localRecord, fileName);
	onSuccess();
}

/**
 * 判断是否有摄像头
 * @returns {Promise<boolean>}
 */
export function TestVideoDevice() {
	return new Promise((resolve, reject) => {
		if (navigator.mediaDevices && navigator.mediaDevices.enumerateDevices) {
			navigator.mediaDevices
				.enumerateDevices()
				.then(devices => {
					const hasVideoDevice = devices.some(device => {
						return device.kind === 'videoinput';
					});
					if (hasVideoDevice) {
						resolve(true);
					} else {
						resolve(false);
					}
				})
				.catch(err => {
					reject({ code: '100001', message: err });
				});
		} else {
			reject({ code: '100002', message: 'not support enumerateDevices' });
		}
	});
}

export { IRtcClient, IRTC_CUSTOMEREVENT };
