package com.lonbon.threecameramaster

import android.Manifest
import android.content.Context
import android.content.Intent
import android.content.pm.PackageManager
import android.hardware.Camera
import android.os.Build
import android.os.Bundle
import android.text.TextUtils
import android.util.Config.DEBUG
import android.util.Log
import android.view.SurfaceHolder
import android.view.SurfaceView
import android.view.TextureView
import android.view.View
import android.view.ViewGroup
import android.view.inputmethod.InputMethodManager
import android.widget.Button
import android.widget.EditText
import android.widget.FrameLayout
import android.widget.ImageView
import android.widget.Toast
import androidx.core.app.ActivityCompat
import androidx.core.content.ContextCompat
import com.lonbon.threecameramaster.test.ChuangbaiTestActivity
import com.lonbon.threecameramaster.track.VideoReportLayout
import com.lonbon.threecameramaster.track.YuvFboProgram
import io.agora.base.NV21Buffer
import io.agora.base.VideoFrame
import io.agora.rtc2.ChannelMediaOptions
import io.agora.rtc2.Constants
import io.agora.rtc2.IRtcEngineEventHandler
import io.agora.rtc2.RtcConnection
import io.agora.rtc2.RtcEngine
import io.agora.rtc2.RtcEngineConfig
import io.agora.rtc2.RtcEngineEx
import io.agora.rtc2.SimulcastStreamConfig
import io.agora.rtc2.video.VideoCanvas
import io.agora.rtc2.video.VideoEncoderConfiguration
import io.agora.rtc2.video.VideoEncoderConfiguration.AdvanceOptions
import io.agora.rtc2.video.VideoEncoderConfiguration.FRAME_RATE
import io.agora.rtc2.video.VideoEncoderConfiguration.ORIENTATION_MODE
import io.agora.rtc2.video.VideoEncoderConfiguration.VideoDimensions
import java.util.Random

/**
 * Created by djj on 2023/11/11
 */
class MainActivity : BaseActivity(), View.OnClickListener {
    //    var redCamera: Camera? = null
//    var colorCamera: Camera? = null
//    var thirdCamera: Camera? = null
    var surfaceViewRed: SurfaceView? = null
    var surfaceViewColor: SurfaceView? = null
    var surfaceViewThird: SurfaceView? = null
    var surfaceHolderRed: SurfaceHolder? = null
    var surfaceHolderColor: SurfaceHolder? = null
    var surfaceHolderThird: SurfaceHolder? = null

    //agora
    private var etChannelName: EditText? = null
    private var btnAdd: Button? = null
    private var btnTrack: Button? = null
    private var mRtcEngine: RtcEngineEx? = null
    private var myUid = 0

    private var joined = false
    private lateinit var flLocalVideo:VideoReportLayout
    private lateinit var flVideoContainer: Array<VideoReportLayout>
    private val videoTrackIds: MutableList<Int> = ArrayList()
    private val connections: MutableList<RtcConnection> = ArrayList()
    private lateinit var mData: ByteArray
    private var mWidth = 0
    private var mHeight = 0
    private lateinit var mData1: ByteArray
    private var mWidth1 = 0
    private var mHeight1 = 0
    private lateinit var mData2: ByteArray
    private var mWidth2 = 0
    private var mHeight2 = 0
    private var isFirst = true
    private var isFirst1 = true
    private var isFirst2 = true
    private var videoTrack = 0
    private var videoTrack1 = 0
    private var videoTrack2 = 0
    private var connection: RtcConnection? = null
    private var connection1: RtcConnection? = null
    private var connection2: RtcConnection? = null

    // 填写声网控制台中生成的临时 Token
    private val token = ""
    private val yuvFboProgram: YuvFboProgram? = null
    private var imgSwitch: ImageView? = null
    private val uidMap: MutableMap<Int, VideoReportLayout?> = HashMap<Int, VideoReportLayout?>()
    private val isSwitch = true
    private lateinit var cameraHelper: CameraHelper
    private lateinit var cameraHelper1: CameraHelper
    private lateinit var cameraHelper2: CameraHelper
//    private var mSurfaceView: GLTextureView? = null
//    private var lastI420Frame: VideoFrame? = null
//    private val yuvUploader: YuvUploader = YuvUploader()
//    private var viewportWidth = 0
//    private var viewportHeight = 0
//    private var textureBufferHelper: TextureBufferHelper? = null
//    private val drawer = GlRectDrawer()
//    private val renderMatrix = Matrix()


    private fun checkSelfPermission(permission: String, requestCode: Int): Boolean {
        if (ContextCompat.checkSelfPermission(this, permission) !=
            PackageManager.PERMISSION_GRANTED
        ) {
            ActivityCompat.requestPermissions(this, REQUESTED_PERMISSIONS, requestCode)
            return false
        }
        return true
    }

    override fun onCreate(savedInstanceState: Bundle?) {
        super.onCreate(savedInstanceState)
        setContentView(R.layout.activity_main)
        surfaceViewRed = findViewById(R.id.redsurface)
        surfaceViewColor = findViewById(R.id.colorsurface)
        surfaceViewThird = findViewById(R.id.thirdsurface)
        surfaceHolderRed = surfaceViewRed!!.holder
        surfaceHolderColor = surfaceViewColor!!.holder
        surfaceHolderThird = surfaceViewThird!!.holder
        initView()


        //动态权限
        if (ContextCompat.checkSelfPermission(
                this,
                Manifest.permission.CAMERA
            ) != PackageManager.PERMISSION_GRANTED
        ) {
            ActivityCompat.requestPermissions(this, arrayOf(Manifest.permission.CAMERA), 1)
        }
        val number = Camera.getNumberOfCameras()
        Log.v(TAG, "cameraNum=$number")
        when (number) {
            1 -> {
                initRedCamera() //0
            }
            2 -> {
                initRedCamera() //0
                initColorCamera() //1
            }
            3 -> {
                initRedCamera() //0
                initColorCamera() //1
                initThirdCanera() //2
            }
        }
    }

    override fun onWindowFocusChanged(hasFocus: Boolean) { //new add
        super.onWindowFocusChanged(hasFocus)
        if (hasFocus && Build.VERSION.SDK_INT >= 19) {
            val decorView: View = getWindow().getDecorView()
            decorView.systemUiVisibility = (View.SYSTEM_UI_FLAG_FULLSCREEN
                    or View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY
                    or View.SYSTEM_UI_FLAG_HIDE_NAVIGATION)
        }
    }

    private fun initView() {
        etChannelName = findViewById(R.id.et_channel_name)
        btnAdd = findViewById(R.id.btn_add)
        btnAdd!!.setOnClickListener(this)
        btnTrack = findViewById(R.id.btn_track)
        btnTrack!!.setOnClickListener(this)
//        imgSwitch = findViewById(R.id.img_switch)
//        imgSwitch!!.setOnClickListener(this)
        flLocalVideo = findViewById(R.id.fl_local_video)
        flVideoContainer = arrayOf<VideoReportLayout>(
            findViewById(R.id.fl_video_container_01),
            findViewById(R.id.fl_video_container_02),
            findViewById(R.id.fl_video_container_03),
            findViewById(R.id.fl_remote),
            findViewById(R.id.fl_remote1),
            findViewById(R.id.fl_remote2)
        )
        try {
            // 创建 RtcEngineConfig 对象，并进行配置
            val config = RtcEngineConfig()
            config.mContext = baseContext
            config.mAppId = getString(R.string.agora_app_id)
//            config.mAppId = "f6bf90a2c7bd47c18591a86a4dc97e6e"
            config.mEventHandler = mRtcEventHandler
            //设置音频场景以获取更好的音频效果
            config.mAudioScenario =
                Constants.AudioScenario.getValue(Constants.AudioScenario.CHATROOM)
            // 创建并初始化 RtcEngine
            mRtcEngine = RtcEngine.create(config) as RtcEngineEx
            mRtcEngine!!.setParameters("{\"che.video.android_camera_select\": 0}")
            mRtcEngine!!.setParameters( "{\"che.hardware_decoding\":1}")



            //joinChannel前调用如下接口开启h265软编
//            mRtcEngine.setParameters("{\"engine.video.codec_type\":3}");
//            mRtcEngine.setParameters("{\"engine.video.enable_hw_encoder\":false}");
//            mRtcEngine.setParameters("{\"rtc.video.minscore_for_swh265enc\":30}");
//            mRtcEngine.setParameters("{\"che.video.android_bitrate_adjustment_type\":0}");
            //265 编码
//            mRtcEngine.setParameters("\"che.video.videoCodecIndex\": 2");

//            CameraCapturerConfiguration.CaptureFormat captureFormat = new CameraCapturerConfiguration.CaptureFormat();
//            captureFormat.fps = 15;
//            mRtcEngine!!.setCameraCapturerConfiguration(
//                CameraCapturerConfiguration(
//                    CameraCapturerConfiguration.CAMERA_DIRECTION.CAMERA_FRONT
//                )
//            )
        } catch (e: Exception) {
            throw RuntimeException("Check the error.")
        }
    }

//    private val mVideoFrameObserver:IVideoFrameObserver = object :IVideoFrameObserver{
//        override fun onCaptureVideoFrame(sourceType: Int, videoFrame: VideoFrame?): Boolean {
//            // TODO 获取本地设备采集到的视频数据。
//            Log.d(TAG,"onCaptureVideoFrame-sourceType:$sourceType,width=${videoFrame!!.buffer.width},sourceType1=${videoFrame!!.sourceType}")
//            if (videoFrame!!.sourceType == VideoFrame.SourceType.kFrontCamera){
//                if (mSurfaceView != null && videoFrame !== lastI420Frame) {
//                    Log.d(TAG, "onCaptureVideoFrame_buffer: ${videoFrame!!.buffer},textureBufferHelper=${textureBufferHelper}")
//                    lastI420Frame = videoFrame
//                    textureBufferHelper!!.invoke<Void> {
//                        if (lastI420Frame!!.buffer is I420Buffer) {
//                            val i420Buffer = lastI420Frame!!.buffer as I420Buffer
//                            yuvUploader.uploadFromBuffer(i420Buffer)
//                        }
//                        null
//                    }
//                    mSurfaceView!!.requestRender()
//                }
//            }
//            return true
//        }
//
//        override fun onPreEncodeVideoFrame(sourceType: Int, videoFrame: VideoFrame?): Boolean {
//            Log.d(TAG,"onPreEncodeVideoFrame-sourceType:$sourceType,sourceType1=${videoFrame!!.sourceType}")
//            return false
//        }
//
//        override fun onMediaPlayerVideoFrame(videoFrame: VideoFrame?, mediaPlayerId: Int): Boolean {
//            Log.d(TAG,"onPreEncodeVideoFrame-mediaPlayerId:$mediaPlayerId,sourceType1=${videoFrame!!.sourceType}")
//            return false
//        }
//
//        override fun onRenderVideoFrame(
//            channelId: String?,
//            uid: Int,
//            videoFrame: VideoFrame?
//        ): Boolean {
////            Log.d(TAG, "onRenderVideoFrame: " + i + "   connection: " + rtcConnection.id + "  buffer: " + videoFrame.getBuffer());
////            if (mSurfaceView != null && videoFrame !== lastI420Frame) {
////                Log.d(TAG, "onRenderVideoFrame: " + uid + "   connection: " + channelId + "  buffer: " + videoFrame!!.buffer +"textureBufferHelper=${textureBufferHelper}")
////                lastI420Frame = videoFrame
////                textureBufferHelper!!.invoke<Void> {
////                    if (lastI420Frame!!.buffer is I420Buffer) {
////                        val i420Buffer = lastI420Frame!!.buffer as I420Buffer
////                        yuvUploader.uploadFromBuffer(i420Buffer)
////                    }
////                    null
////                }
////                mSurfaceView!!.requestRender()
////            }
//            return false
//        }
//
//        override fun getVideoFrameProcessMode(): Int {
//            return IVideoFrameObserver.PROCESS_MODE_READ_WRITE
//        }
//
//        override fun getVideoFormatPreference(): Int {
//           return VIDEO_PIXEL_I420
//        }
//
//        override fun getRotationApplied(): Boolean {
//            return false
//        }
//
//        override fun getMirrorApplied(): Boolean {
//            return false
//        }
//
//        override fun getObservedFramePosition(): Int {
//            return POSITION_POST_CAPTURER
//        }
//
//    }
//
//    private var glRenderer: GLTextureView.Renderer = object : GLTextureView.Renderer {
//        override fun onSurfaceCreated(gl: GL10?, config: EGLConfig?) {
//            Log.d(TAG, "onSurfaceCreated")
//            textureBufferHelper = TextureBufferHelper.create("bufferHelper", EglBase10.Context(mSurfaceView!!.eglContext))
//        }
//
//        override fun onSurfaceChanged(gl: GL10?, width: Int, height: Int) {
//            Log.d(TAG, "onSurfaceChanged  w: $width  h: $height")
//            viewportWidth = width
//            viewportHeight = height
//        }
//
//        override fun onDrawFrame(gl: GL10?) {
//            GLES20.glClearColor(0f /* red */, 0f /* green */, 0f /* blue */, 0f /* alpha */)
//            GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT)
//            if (lastI420Frame == null) return
//            Log.d(TAG, "onDrawFrame: " + lastI420Frame!!.rotation)
//            renderMatrix.reset()
//            renderMatrix.preTranslate(0.5f, 0.5f)
//            renderMatrix.preScale(1f, -1f) // I420-frames are upside down
//            renderMatrix.preRotate(lastI420Frame!!.rotation.toFloat())
//            renderMatrix.preTranslate(-0.5f, -0.5f)
//            try {
//                drawer.drawYuv(
//                    yuvUploader.yuvTextures,
//                    RendererCommon.convertMatrixFromAndroidGraphicsMatrix(renderMatrix),
//                    lastI420Frame!!.rotatedWidth,
//                    lastI420Frame!!.rotatedHeight,
//                    0,
//                    0,
//                    viewportWidth,
//                    viewportHeight
//                )
//            } catch (exception: NullPointerException) {
//                Log.e(TAG, "skip empty buffer!")
//            }
//        }
//    }

    private val mRtcEventHandler: IRtcEngineEventHandler = object : IRtcEngineEventHandler() {
        override fun onError(err: Int) {
            Log.w(
                TAG,
                String.format("onError code %d message %s", err, RtcEngine.getErrorDescription(err))
            )
        }

        override fun onLeaveChannel(stats: RtcStats) {
            super.onLeaveChannel(stats)
            Log.i(TAG, String.format("local user %d leaveChannel!", myUid))
//            lastI420Frame = null
//            if (mSurfaceView != null) {
//                mSurfaceView!!.requestRender()
//            }
        }

        override fun onJoinChannelSuccess(channel: String, uid: Int, elapsed: Int) {
            Log.i(TAG, String.format("onJoinChannelSuccess channel %s uid %d %d", channel, uid,  elapsed))
            myUid = uid
            joined = true

//            runOnUiThread {
//                /**Display remote video stream */
//                mSurfaceView = GLTextureView(baseContext)
//                mSurfaceView!!.preserveEGLContextOnPause = true
//                mSurfaceView!!.setEGLContextClientVersion(2)
//                mSurfaceView!!.setRenderer(glRenderer)
//                mSurfaceView!!.renderMode = GLSurfaceView.RENDERMODE_WHEN_DIRTY
//                if (flLocalVideo.childCount > 0) {
//                    flLocalVideo.removeAllViews()
//                }
//                // Add to the remote container
//                flLocalVideo.addView(
//                    mSurfaceView,
//                    FrameLayout.LayoutParams(
//                        ViewGroup.LayoutParams.MATCH_PARENT,
//                        ViewGroup.LayoutParams.MATCH_PARENT
//                    )
//                )
//            }
        }

        override fun onUserJoined(uid: Int, elapsed: Int) {
            super.onUserJoined(uid, elapsed)
            Log.i(TAG, "onUserJoined->$uid")
            /**Check if the context is correct */
            val context: Context = baseContext ?: return
            runOnUiThread {
                /**Display remote video stream */
                val videoView = createVideoView(uid)
                uidMap[uid] = idleVideoContainer
                // Setup remote video to render
                mRtcEngine!!.setupRemoteVideo(
                    VideoCanvas(
                        videoView,
                        VideoCanvas.RENDER_MODE_FIT,
                        uid
                    )
                )
            }
        }

        override fun onUserMuteVideo(uid: Int, muted: Boolean) {
            super.onUserMuteVideo(uid, muted)
        }

        override fun onUserMuteAudio(uid: Int, muted: Boolean) {
            super.onUserMuteAudio(uid, muted)
        }

        override fun onUserOffline(uid: Int, reason: Int) {
            Log.i(TAG, String.format("user %d offline! reason:%d", uid, reason))
            runOnUiThread(Runnable {
                /**Clear render view
                 * Note: The video will stay at its last frame, to completely remove it you will need to
                 * remove the SurfaceView from its parent */
                /**Clear render view
                 * Note: The video will stay at its last frame, to completely remove it you will need to
                 * remove the SurfaceView from its parent */
                /**Clear render view
                 * Note: The video will stay at its last frame, to completely remove it you will need to
                 * remove the SurfaceView from its parent */

                /**Clear render view
                 * Note: The video will stay at its last frame, to completely remove it you will need to
                 * remove the SurfaceView from its parent */
                resetVideoLayout(uid)
            })
        }

        override fun onRemoteVideoStats(stats: RemoteVideoStats) {
            super.onRemoteVideoStats(stats)
            val videoLayoutByUid: VideoReportLayout? = getVideoLayoutByUid(stats.uid)
            if (videoLayoutByUid != null) {
                videoLayoutByUid.setRemoteVideoStats(stats)
            }
        }

        override fun onRemoteAudioStats(stats: RemoteAudioStats) {
            super.onRemoteAudioStats(stats)
            val videoLayoutByUid: VideoReportLayout? = getVideoLayoutByUid(stats.uid)
            if (videoLayoutByUid != null) {
                videoLayoutByUid.setRemoteAudioStats(stats)
            }
        }
    }

    private fun createVideoView(uid: Int): View? {
        Log.d(TAG, "uid=$uid")
        var videoContainer: VideoReportLayout? = getVideoLayoutByUid(uid)
        if (videoContainer == null) {
            videoContainer = idleVideoContainer
        }
        if (videoContainer == null) {
            return null
        }
        videoContainer.removeAllViews()
        val videoView = SurfaceView(getBaseContext())
        videoContainer.addView(videoView)
        videoContainer.setReportUid(uid)
        return videoView
    }

    private val idleVideoContainer: VideoReportLayout?
        private get() {
            for (frameLayout in flVideoContainer) {
                if (frameLayout.getReportUid() === -1) {
                    return frameLayout
                }
            }
            return null
        }

    private fun resetVideoLayout(uid: Int) {
        val videoContainer: VideoReportLayout = getVideoLayoutByUid(uid) ?: return
        videoContainer.removeAllViews()
        videoContainer.setReportUid(-1)
    }

    private fun getVideoLayoutByUid(uid: Int): VideoReportLayout? {
        for (frameLayout in flVideoContainer) {
            if (frameLayout.getReportUid() === uid) {
                return frameLayout
            }
        }
        return null
    }

    //初始化并加入
    private fun joinChannel(channelName: String) {
        /**Set up to play remote sound with receiver */
        mRtcEngine!!.setDefaultAudioRoutetoSpeakerphone(true)
        /**In the demo, the default is to enter as the anchor. */
        mRtcEngine!!.setClientRole(Constants.CLIENT_ROLE_BROADCASTER)
        // Enables the video module.
        mRtcEngine!!.enableVideo()
        //设置默认订阅的视频流类型。
//        mRtcEngine!!.setRemoteDefaultVideoStreamType(Constants.VIDEO_STREAM_LOW)
        // 创建 ChannelMediaOptions 对象，并进行配置
        val options = ChannelMediaOptions()
        options.clientRoleType = Constants.CLIENT_ROLE_AUDIENCE
        options.autoSubscribeAudio = true
        options.autoSubscribeVideo = true
        options.publishCameraTrack = false
        // 使用临时 Token 加入频道，自行指定用户 ID 并确保其在频道内的唯一性
        mRtcEngine!!.joinChannel(token, channelName, 0, options)
    }

    //初始化并加入
    private fun joinChannel() {
        Log.d(TAG, "isFirst=" + isFirst + ",joined=" + joined + ",size=" + videoTrackIds.size)
        if (isFirst) {
            mRtcEngine?.enableVideo()
            /**Display remote video stream */
//            mSurfaceView = GLTextureView(baseContext)
//            mSurfaceView!!.preserveEGLContextOnPause = true
//            mSurfaceView!!.setEGLContextClientVersion(2)
//            mSurfaceView!!.setRenderer(glRenderer)
//            mSurfaceView!!.renderMode = GLSurfaceView.RENDERMODE_WHEN_DIRTY

            // Create render view by RtcEngine
            var textureView: TextureView? = TextureView(baseContext)
            val local = VideoCanvas(textureView, Constants.RENDER_MODE_FIT, 0)
            local.mirrorMode = Constants.VIDEO_MIRROR_MODE_DISABLED
            local.sourceType = Constants.VIDEO_SOURCE_CUSTOM
            mRtcEngine?.setupLocalVideo(local)
            if (flLocalVideo.childCount > 0) {
                flLocalVideo.removeAllViews()
            }
            // Add to the local container
            flLocalVideo.addView(
                textureView,
                FrameLayout.LayoutParams(
                    ViewGroup.LayoutParams.MATCH_PARENT,
                    ViewGroup.LayoutParams.MATCH_PARENT
                )
            )
            mRtcEngine?.setDefaultAudioRoutetoSpeakerphone(true)
            mRtcEngine?.startPreview(Constants.VideoSourceType.VIDEO_SOURCE_CUSTOM)
//            mRtcEngine!!.registerVideoFrameObserver(mVideoFrameObserver)
            // Enables the video module.


            if (!joined || videoTrackIds.size >= 4) {
                return
            }
            videoTrack = mRtcEngine!!.createCustomVideoTrack()
            Log.d(TAG, "videoTrack=11：$videoTrack")
            if (videoTrack < 0) {
                Toast.makeText(this, "createCustomVideoTrack failed!", Toast.LENGTH_LONG).show()
                return
            }
            val channelId = etChannelName!!.text.toString()
            val videoEncoderConfiguration = VideoEncoderConfiguration(
                VideoEncoderConfiguration.VD_1920x1080,
                FRAME_RATE.FRAME_RATE_FPS_15,
                VideoEncoderConfiguration.STANDARD_BITRATE,
                ORIENTATION_MODE.ORIENTATION_MODE_ADAPTIVE
            )
            val advanceOptions = AdvanceOptions()
            advanceOptions.encodingPreference =
                VideoEncoderConfiguration.ENCODING_PREFERENCE.PREFER_SOFTWARE
            videoEncoderConfiguration.advanceOptions = advanceOptions
            mRtcEngine!!.setVideoEncoderConfiguration(videoEncoderConfiguration)
            //发送端设置双流模式
            mRtcEngine!!.setDualStreamMode(
                Constants.SimulcastStreamMode.ENABLE_SIMULCAST_STREAM,
                SimulcastStreamConfig(VideoDimensions(480, 270), 256, 9),
            )
            //设置订阅的视频流类型
            mRtcEngine!!.setRemoteVideoStreamType(0, Constants.VIDEO_STREAM_LOW)
            val option = ChannelMediaOptions()
            option.clientRoleType = Constants.CLIENT_ROLE_BROADCASTER
            option.autoSubscribeAudio = true
            option.autoSubscribeVideo = true
            option.publishCustomVideoTrack = true
            option.publishCameraTrack = false
            /*
            specify custom video track id to publish in this channel.
             */
            option.customVideoTrackId = videoTrack

            val res = mRtcEngine!!.joinChannel(token, channelId, 0, option)
            Log.e(TAG, "res==$res")
            if (res != 0) {
                /*
                 * destroy a created custom video track id
                 *
                 * @param video_track_id The video track id which was created by createCustomVideoTrack
                 * @return
                 * - 0: Success.
                 * - < 0: Failure.
                 */
                mRtcEngine!!.destroyCustomVideoTrack(videoTrack)
                mRtcEngine!!.leaveChannel()
                Log.e(TAG, RtcEngine.getErrorDescription(Math.abs(res)))
            } else {
                Log.v(TAG, "数据回调0011:" + mData.size + ",width=" + mWidth + ",height=" + mHeight)
                isFirst = false
                Log.v(TAG, "数据回调0011:joined=$joined,mRtcEngine=$mRtcEngine")
                if (joined && mRtcEngine != null) {
                    //----------------NV21--------------
                    // 创建一个 frameBuffer 对象，将原始的 YUV 数据存储到 NV21 格式的缓冲区中
                    val frameBuffer: VideoFrame.Buffer = NV21Buffer(mData, mWidth, mHeight, null)

                    // 获取 SDK 当前的 Monotonic Time
                    val currentMonotonicTimeInMs = mRtcEngine!!.currentMonotonicTimeInMs
                    // 创建一个 VideoFrame 对象，传入要推送的 NV21 视频帧和视频帧的 Monotonic Time (单位为纳秒)
                    val videoFrame =
                        VideoFrame(
                            frameBuffer, 0, currentMonotonicTimeInMs * 1000000,
                            null as VideoFrame.ColorSpace?,
                            null as ByteArray?,
                            1.0f,
                            0
                        )
                    // 通过视频轨道将视频帧推送到 SDK
                    val ret = mRtcEngine!!.pushExternalVideoFrameEx(videoFrame, videoTrack)
                    Log.v(TAG, "数据回调0011:ret=$ret,videoTrack=$videoTrack")
                    if (ret < 0) {
                        Log.w(TAG, "pushExternalVideoFrameEx error code=$ret")
                    }
                }

                /*
                 * cache video track ids , video file readers and rtc connection to release while fragment destroying.
                 */
                videoTrackIds.add(videoTrack)
//                connections.add(connection!!)
            }
        } else {
//            Log.v(TAG, "数据回调001122:" + mData.size + ",width=" + mWidth + ",height=" + mHeight + ",videoTrack=" + videoTrack)
            if (joined && mRtcEngine != null) {
                //----------------NV21--------------
                // 创建一个 frameBuffer 对象，将原始的 YUV 数据存储到 NV21 格式的缓冲区中
                val frameBuffer: VideoFrame.Buffer = NV21Buffer(mData, mWidth, mHeight, null)

                // 获取 SDK 当前的 Monotonic Time
                val currentMonotonicTimeInMs = mRtcEngine!!.currentMonotonicTimeInMs
                // 创建一个 VideoFrame 对象，传入要推送的 NV21 视频帧和视频帧的 Monotonic Time (单位为纳秒)
                val videoFrame =
                    VideoFrame(
                        frameBuffer, 0, currentMonotonicTimeInMs * 1000000,
                        null as VideoFrame.ColorSpace?,
                        null as ByteArray?,
                        1.0f,
                        0
                    )
                // 通过视频轨道将视频帧推送到 SDK
                val ret = mRtcEngine!!.pushExternalVideoFrameEx(videoFrame, videoTrack)
                Log.e(TAG, "res==1:$ret")
                if (ret < 0) {
                    Log.w(
                        TAG,
                        "pushExternalVideoFrameEx error code=$ret"
                    )
                }
            }
        }

    }

    override fun onClick(v: View) {
        if (v.id == R.id.btn_add) {
            //加入
            if (btnAdd!!.text == "加入会议") {
                if (TextUtils.isEmpty(etChannelName!!.text)) {
                    Toast.makeText(this@MainActivity, "频道号不可为空", Toast.LENGTH_LONG).show()
                } else {
                    btnAdd!!.text = "退出会议"
                    //                    hideSoftKeyboard(v);
                    // 如果已经授权，则初始化 RtcEngine 并加入频道
                    if (checkSelfPermission(REQUESTED_PERMISSIONS[0], PERMISSION_REQ_ID) &&
                        checkSelfPermission(REQUESTED_PERMISSIONS[1], PERMISSION_REQ_ID)
                    ) {
//                        joinChannel(etChannelName!!.text.toString())
                        joined = true
                    }
                }
            } else {
                // 停止本地视频预览
                mRtcEngine!!.leaveChannel()
                btnAdd!!.text = "加入会议"
                resetAllVideoLayout()
                flLocalVideo.removeAllViews()
                val number = Camera.getNumberOfCameras()
                Log.v(TAG, "cameraNum=$number")
                if (number == 1) {
                    mRtcEngine!!.destroyCustomVideoTrack(videoTrack)
//                    mRtcEngine!!.leaveChannelEx(connection)
                    isFirst = true
                } else if (number == 2) {
                    mRtcEngine!!.destroyCustomVideoTrack(videoTrack)
//                    mRtcEngine!!.leaveChannelEx(connection)
                    isFirst = true
                    mRtcEngine!!.destroyCustomVideoTrack(videoTrack1)
                    mRtcEngine!!.leaveChannelEx(connection1)
                    isFirst1 = true
                } else if (number == 3) {
                    mRtcEngine!!.destroyCustomVideoTrack(videoTrack)
//                    mRtcEngine!!.leaveChannelEx(connection)
                    isFirst = true
                    mRtcEngine!!.destroyCustomVideoTrack(videoTrack1)
                    mRtcEngine!!.leaveChannelEx(connection1)
                    isFirst1 = true
                    mRtcEngine!!.destroyCustomVideoTrack(videoTrack2)
                    mRtcEngine!!.leaveChannelEx(connection2)
                    isFirst2 = true
                }
                joined = false
                videoTrackIds.clear()
                connections.clear()
            }
        } else if (v.id == R.id.btn_track) {
            //
//            mRtcEngine.disableVideo();
//            CodecCapInfo[] codecCapInfos = mRtcEngine.queryCodecCapability();
//            if (codecCapInfos.length > 0){
//                int codecCapMask = codecCapInfos[0].codecCapMask;
//                int codecType = codecCapInfos[0].codecType;
//                int hwDecodingLevel = codecCapInfos[0].codecLevels.hwDecodingLevel;
//                int swDecodingLevel = codecCapInfos[0].codecLevels.swDecodingLevel;
//
//                Log.v(TAG, "CodecCapInfo：size=" + codecCapInfos.length+",codecType="+codecType+
//                        ",codecCapMask="+codecCapMask+",hwDecodingLevel="+hwDecodingLevel+",swDecodingLevel="+swDecodingLevel);
//
//                int codecCapMask1 = codecCapInfos[1].codecCapMask;
//                int codecType1 = codecCapInfos[1].codecType;
//                int hwDecodingLevel1 = codecCapInfos[1].codecLevels.hwDecodingLevel;
//                int swDecodingLevel1 = codecCapInfos[1].codecLevels.swDecodingLevel;
//
//                Log.v(TAG, "CodecCapInfo="+",codecType1="+codecType1+
//                        ",codecCapMask1="+codecCapMask1+",hwDecodingLevel1="+hwDecodingLevel1+",swDecodingLevel1="+swDecodingLevel1);
//
//                int codecCapMask2 = codecCapInfos[2].codecCapMask;
//                int codecType2 = codecCapInfos[2].codecType;
//                int hwDecodingLevel2 = codecCapInfos[2].codecLevels.hwDecodingLevel;
//                int swDecodingLevel2 = codecCapInfos[2].codecLevels.swDecodingLevel;
//
//                Log.v(TAG, "CodecCapInfo2="+",codecType2="+codecType2+
//                        ",codecCapMask2="+codecCapMask2+",hwDecodingLevel2="+hwDecodingLevel2+",swDecodingLevel2="+swDecodingLevel2);
//            }
            startActivity(Intent(this@MainActivity, ChuangbaiTestActivity::class.java))
        }
//        else if (v.id == R.id.img_switch) {
//            for (i in uidMap.keys) {
//                Log.v(TAG, "i=" + i + ",value=" + uidMap[i])
//                if (uidMap[i]?.equals(flVideoContainer[1]) == true) {
//                    mRtcEngine!!.setupRemoteVideo(
//                        VideoCanvas(
//                            flVideoContainer[2],
//                            VideoCanvas.RENDER_MODE_FIT,
//                            i
//                        )
//                    )
//                } else if (uidMap[i]?.equals(flVideoContainer[2]) == true) {
//                    mRtcEngine!!.setupRemoteVideo(
//                        VideoCanvas(
//                            flVideoContainer[1],
//                            VideoCanvas.RENDER_MODE_FIT,
//                            i
//                        )
//                    )
//                }
//            }
//        }
        else if (v.id == R.id.colorsurface) {
            //initColorCamera();
        }
    }

    private fun resetAllVideoLayout() {
        for (frameLayout in flVideoContainer) {
            if (frameLayout.reportUid !== -1) {
                frameLayout.removeAllViews()
                frameLayout.reportUid = -1
            }
        }
    }

    fun hideSoftKeyboard(view: View?) {
        // 步骤一：获取当前焦点的View
        val currentFocus: View = this.currentFocus!!
        // 步骤二：创建InputMethodManager对象
        val imm = getSystemService(Context.INPUT_METHOD_SERVICE) as InputMethodManager
        // 步骤三：关闭软键盘
        imm.hideSoftInputFromWindow(currentFocus.windowToken, 0)
    }

    //打开红外色相机
    private fun initRedCamera() {
        cameraHelper = CameraHelper(this, 0, 1920, 1080)
        cameraHelper.setOnPreviewListener(object : CameraHelper.OnPreviewListener {
            override fun onPreviewFrame(data: ByteArray, camera: Camera?,width: Int, height: Int) {
                //预览回调
                if (DEBUG) Log.v(
                    TAG,
                    "数据回调00:" + data.size + ",width=" + width + ",height=" + height
                )
                //byte[] yuv, int width, int height
                mData = data
                mWidth = width
                mHeight = height
//                Log.v(TAG, "数据回调00join:$joined")
                if (joined) {
//                    createPushingVideoTrack()
                    joinChannel()
                }
            }
        })
        surfaceHolderRed!!.addCallback(object : SurfaceHolder.Callback {
            override fun surfaceCreated(holder: SurfaceHolder) {
                //开始预览
                cameraHelper!!.startPreview(holder)
            }

            override fun surfaceChanged(
                holder: SurfaceHolder,
                format: Int,
                width: Int,
                height: Int
            ) {
            }

            override fun surfaceDestroyed(holder: SurfaceHolder) {
                //结束预览
                cameraHelper!!.stopPreview()
            }
        })

//        redCamera = Camera.open(0)
//        //        redCamera.setDisplayOrientation(270);
//        val parameters = redCamera?.getParameters()
//        val w = parameters?.previewSize?.width
//        val h = parameters?.previewSize?.height
//        redCamera?.setPreviewCallback(PreviewCallback { data, camera ->
//            //                PreviewImage.setColorPic(data, w, h);
//            //                if (DEBUG) Log.v(TAG, "数据回调11:" + data.length+",width="+w+",height="+h);
//            //byte[] yuv, int width, int height
//            mData = data
//            mWidth = w!!
//            mHeight = h!!
//            if (joined) {
//                createPushingVideoTrack()
//            }
//        })
//        redCamera?.startPreview() //显示相机
//        println("打开摄像机")
//        surfaceHolderColor!!.addCallback(object : SurfaceHolder.Callback {
//            override fun surfaceCreated(holder: SurfaceHolder) {
//                try {
//                    redCamera?.setPreviewDisplay(holder)
//                } catch (e: IOException) {
//                    e.printStackTrace()
//                }
//            }
//
//            override fun surfaceChanged(
//                holder: SurfaceHolder,
//                format: Int,
//                width: Int,
//                height: Int
//            ) {
//            }
//
//            override fun surfaceDestroyed(holder: SurfaceHolder) {
//        })
    }

    fun initColorCamera() {
        cameraHelper1 = CameraHelper(this, 1, 1920, 1080)
        cameraHelper1.setOnPreviewListener(object : CameraHelper.OnPreviewListener {
            override fun onPreviewFrame(data: ByteArray, camera: Camera?,width: Int, height: Int) {
                //预览回调
                if (DEBUG) Log.v(
                    TAG,
                    "数据回调11:" + data.size + ",width=" + width + ",height=" + height
                )
                //byte[] yuv, int width, int height
                mData1 = data
                mWidth1 = width
                mHeight1 = height
//                Log.v(TAG, "数据回调00join:$joined")
                if (joined) {
                    createPushingVideoTrack1()
                }
            }
        })
        surfaceHolderColor!!.addCallback(object : SurfaceHolder.Callback {
            override fun surfaceCreated(holder: SurfaceHolder) {
                //开始预览
                cameraHelper1!!.startPreview(holder)
            }

            override fun surfaceChanged(
                holder: SurfaceHolder,
                format: Int,
                width: Int,
                height: Int
            ) {
            }

            override fun surfaceDestroyed(holder: SurfaceHolder) {
                //结束预览
                cameraHelper1!!.stopPreview()
            }
        })

//        colorCamera = Camera.open(1)
//        //        colorCamera.setDisplayOrientation(270);
//        val parameters = colorCamera?.getParameters()
//        val w = parameters?.previewSize?.width
//        val h = parameters?.previewSize?.height
//        colorCamera?.setPreviewCallback(PreviewCallback { data, camera -> //                PreviewImage.setColorPic(data, w, h);
//
//            //                if (DEBUG) Log.v(TAG, "数据回调11:" + data.length+",width="+w+",height="+h);
//            //byte[] yuv, int width, int height
//            mData1 = data
//            mWidth1 = w!!
//            mHeight1 = h!!
//            if (joined) {
//                createPushingVideoTrack1()
//            }
//        })
//        colorCamera?.startPreview() //显示相机
//        println("打开摄像机1")
//        surfaceHolderColor!!.addCallback(object : SurfaceHolder.Callback {
//            override fun surfaceCreated(holder: SurfaceHolder) {
//                try {
//                    colorCamera?.setPreviewDisplay(holder)
//                } catch (e: IOException) {
//                    e.printStackTrace()
//                }
//            }
//
//            override fun surfaceChanged(
//                holder: SurfaceHolder,
//                format: Int,
//                width: Int,
//                height: Int
//            ) {
//            }
//
//            override fun surfaceDestroyed(holder: SurfaceHolder) {}
//        })
    }

    fun initThirdCanera() {
        cameraHelper2 = CameraHelper(this, 2, 1920, 1080)
        cameraHelper2.setOnPreviewListener(object : CameraHelper.OnPreviewListener {
            override fun onPreviewFrame(data: ByteArray, camera: Camera?,width: Int, height: Int) {
                //预览回调
                if (DEBUG) Log.v(
                    TAG,
                    "数据回调22:" + data.size + ",width=" + width + ",height=" + height
                )
                //byte[] yuv, int width, int height
                mData2 = data
                mWidth2 = width
                mHeight2 = height
//                Log.v(TAG, "数据回调00join:$joined")
                if (joined) {
                    createPushingVideoTrack2()
                }
            }
        })
        surfaceHolderThird!!.addCallback(object : SurfaceHolder.Callback {
            override fun surfaceCreated(holder: SurfaceHolder) {
                //开始预览
                cameraHelper2!!.startPreview(holder)
            }

            override fun surfaceChanged(
                holder: SurfaceHolder,
                format: Int,
                width: Int,
                height: Int
            ) {
            }

            override fun surfaceDestroyed(holder: SurfaceHolder) {
                //结束预览
                cameraHelper2!!.stopPreview()
            }
        })


//        thirdCamera = Camera.open(2)
//        //        thirdCamera.setDisplayOrientation(270);
//        val parameters = thirdCamera?.getParameters()
//        val w = parameters?.previewSize?.width
//        val h = parameters?.previewSize?.height
//        thirdCamera?.setPreviewCallback(PreviewCallback { data, camera -> //                PreviewImage.setColorPic(data, w, h);
//
//            //                if (DEBUG) Log.v(TAG, "数据回调22:" + data.length+",width="+w+",height="+h);
//            //byte[] yuv, int width, int height
//            mData2 = data
//            mWidth2 = w!!
//            mHeight2 = h!!
//            if (joined) {
//                createPushingVideoTrack2()
//            }
//        })
//        thirdCamera?.startPreview() //显示相机
//        println("打开摄像机2")
//        surfaceHolderThird!!.addCallback(object : SurfaceHolder.Callback {
//            override fun surfaceCreated(holder: SurfaceHolder) {
//                try {
//                    thirdCamera?.setPreviewDisplay(holder)
//                } catch (e: IOException) {
//                    e.printStackTrace()
//                }
//            }
//
//            override fun surfaceChanged(
//                holder: SurfaceHolder,
//                format: Int,
//                width: Int,
//                height: Int
//            ) {
//            }
//
//            override fun surfaceDestroyed(holder: SurfaceHolder) {}
//        })
    }

    //创建视频轨道
    private fun createPushingVideoTrack() {
        Log.d(TAG, "isFirst=" + isFirst + ",joined=" + joined + ",size=" + videoTrackIds.size)
        if (isFirst) {
            if (!joined || videoTrackIds.size >= 4) {
                return
            }

            /*
             * Get an custom video track id created by internal,which could used to publish or preview
             *
             * @return
             * - > 0: the useable video track id.
             * - < 0: Failure.
             */
            videoTrack = mRtcEngine!!.createCustomVideoTrack()
            Log.d(TAG, "videoTrack=$videoTrack")
            if (videoTrack < 0) {
                Toast.makeText(this, "createCustomVideoTrack failed!", Toast.LENGTH_LONG).show()
                return
            }
            val channelId = etChannelName!!.text.toString()
            val uid = Random().nextInt(1000) + 20000
            connection = RtcConnection(channelId, uid)
            val videoEncoderConfiguration = VideoEncoderConfiguration(
                VideoEncoderConfiguration.VD_1920x1080,
                FRAME_RATE.FRAME_RATE_FPS_15,
                VideoEncoderConfiguration.STANDARD_BITRATE,
                ORIENTATION_MODE.ORIENTATION_MODE_ADAPTIVE
            )
            val advanceOptions = AdvanceOptions()
            advanceOptions.encodingPreference =
                VideoEncoderConfiguration.ENCODING_PREFERENCE.PREFER_SOFTWARE
            videoEncoderConfiguration.advanceOptions = advanceOptions
            mRtcEngine!!.setVideoEncoderConfigurationEx(videoEncoderConfiguration, connection)
            //发送端设置双流模式
//            mRtcEngine!!.setDualStreamModeEx(
//                Constants.SimulcastStreamMode.ENABLE_SIMULCAST_STREAM,
//                SimulcastStreamConfig(VideoDimensions(480, 270), 256, 9),
//                connection
//            )
//            //设置订阅的视频流类型
//            mRtcEngine!!.setRemoteVideoStreamTypeEx(uid, Constants.VIDEO_STREAM_LOW, connection)
            val option = ChannelMediaOptions()
            option.clientRoleType = Constants.CLIENT_ROLE_BROADCASTER
            option.autoSubscribeAudio = true
            option.autoSubscribeVideo = true
            option.publishCustomVideoTrack = true
//            option.publishCameraTrack = false
            /*
            specify custom video track id to publish in this channel.
             */
            option.customVideoTrackId = videoTrack
            val res = mRtcEngine!!.joinChannelEx(
                token,
                connection,
                option,
                object : IRtcEngineEventHandler() {})
            Log.e(TAG, "res==$res")
            if (res != 0) {
                /*
                 * destroy a created custom video track id
                 *
                 * @param video_track_id The video track id which was created by createCustomVideoTrack
                 * @return
                 * - 0: Success.
                 * - < 0: Failure.
                 */
                mRtcEngine!!.destroyCustomVideoTrack(videoTrack)
                mRtcEngine!!.leaveChannelEx(connection)
                Log.e(TAG, RtcEngine.getErrorDescription(Math.abs(res)))
            } else {
                if (DEBUG) Log.v(
                    TAG,
                    "数据回调0011:" + mData.size + ",width=" + mWidth + ",height=" + mHeight
                )
                isFirst = false
                if (DEBUG) Log.v(
                    TAG,
                    "数据回调0011:joined=$joined,mRtcEngine=$mRtcEngine"
                )
                if (joined && mRtcEngine != null) {
                    //----------------NV21--------------
                    // 创建一个 frameBuffer 对象，将原始的 YUV 数据存储到 NV21 格式的缓冲区中
                    val frameBuffer: VideoFrame.Buffer = NV21Buffer(mData, mWidth, mHeight, null)

                    // 获取 SDK 当前的 Monotonic Time
                    val currentMonotonicTimeInMs = mRtcEngine!!.currentMonotonicTimeInMs
                    // 创建一个 VideoFrame 对象，传入要推送的 NV21 视频帧和视频帧的 Monotonic Time (单位为纳秒)
                    val videoFrame =
                        VideoFrame(frameBuffer, 0, currentMonotonicTimeInMs * 1000000)

                    // 通过视频轨道将视频帧推送到 SDK
                    val ret = mRtcEngine!!.pushExternalVideoFrameEx(videoFrame, videoTrack)
                    if (DEBUG) Log.v(
                        TAG,
                        "数据回调0011:ret=$ret,videoTrack=$videoTrack"
                    )
                    if (ret < 0) {
                        Log.w(
                            TAG,
                            "pushExternalVideoFrameEx error code=$ret"
                        )
                    }
                }

                /*
                 * cache video track ids , video file readers and rtc connection to release while fragment destroying.
                 */videoTrackIds.add(videoTrack)
                connections.add(connection!!)
            }
        } else {
//            if (DEBUG) Log.v(
//                TAG,
//                "数据回调001122:" + mData.size + ",width=" + mWidth + ",height=" + mHeight + ",videoTrack=" + videoTrack
//            )
            if (joined && mRtcEngine != null) {
                //----------------NV21--------------
                // 创建一个 frameBuffer 对象，将原始的 YUV 数据存储到 NV21 格式的缓冲区中
                val frameBuffer: VideoFrame.Buffer = NV21Buffer(mData, mWidth, mHeight, null)

                // 获取 SDK 当前的 Monotonic Time
                val currentMonotonicTimeInMs = mRtcEngine!!.currentMonotonicTimeInMs
                // 创建一个 VideoFrame 对象，传入要推送的 NV21 视频帧和视频帧的 Monotonic Time (单位为纳秒)
                val videoFrame = VideoFrame(frameBuffer, 0, currentMonotonicTimeInMs * 1000000)

                // 通过视频轨道将视频帧推送到 SDK
                val ret = mRtcEngine!!.pushExternalVideoFrameEx(videoFrame, videoTrack)
                if (ret < 0) {
                    Log.w(
                        TAG,
                        "pushExternalVideoFrameEx error code=$ret"
                    )
                }
            }
        }
    }

    private fun createPushingVideoTrack1() {
        if (isFirst1) {
            if (!joined || videoTrackIds.size >= 4) {
                return
            }

            videoTrack1 = mRtcEngine!!.createCustomVideoTrack()
            if (videoTrack1 < 0) {
                Toast.makeText(this, "createCustomVideoTrack failed!", Toast.LENGTH_LONG).show()
                return
            }
            val channelId = etChannelName!!.text.toString()
            val uid = Random().nextInt(1000) + 20000
            connection1 = RtcConnection(channelId, uid)
            val videoEncoderConfiguration = VideoEncoderConfiguration(
                VideoEncoderConfiguration.VD_3840x2160,
                FRAME_RATE.FRAME_RATE_FPS_30,
                VideoEncoderConfiguration.STANDARD_BITRATE,
                ORIENTATION_MODE.ORIENTATION_MODE_ADAPTIVE
            )
            val advanceOptions = AdvanceOptions()
            advanceOptions.encodingPreference =
                VideoEncoderConfiguration.ENCODING_PREFERENCE.PREFER_HARDWARE
            videoEncoderConfiguration.advanceOptions = advanceOptions
            mRtcEngine!!.setVideoEncoderConfigurationEx(videoEncoderConfiguration, connection1)

            //发送端设置双流模式
//            mRtcEngine.setDualStreamModeEx(Constants.SimulcastStreamMode.AUTO_SIMULCAST_STREAM,
//                    new SimulcastStreamConfig(new VideoEncoderConfiguration.VideoDimensions(480,270),256,9),
//                    connection1);
//            //设置订阅的视频流类型
//            mRtcEngine.setRemoteVideoStreamTypeEx(uid,VIDEO_STREAM_LOW,connection1);
            val option = ChannelMediaOptions()
            option.clientRoleType = Constants.CLIENT_ROLE_BROADCASTER
            option.autoSubscribeAudio = true
            option.autoSubscribeVideo = true
            option.publishCustomVideoTrack = true
//            option.publishCameraTrack = false
            /*
            specify custom video track id to publish in this channel.
             */option.customVideoTrackId = videoTrack1
            val res = mRtcEngine!!.joinChannelEx(
                token,
                connection1,
                option,
                object : IRtcEngineEventHandler() {
                    override fun onLocalVideoStats(
                        source: Constants.VideoSourceType?,
                        stats: LocalVideoStats?
                    ) {
                        Log.e(TAG, "LocalVideoStats==sentFrameRate:${LocalVideoStats().sentFrameRate},sentBitrate: ${LocalVideoStats().sentBitrate},captureFrameWidth: ${LocalVideoStats().captureFrameWidth}")
                        super.onLocalVideoStats(source, stats)
                    }
                })
            Log.e(TAG, "res==$res")
            if (res != 0) {
                mRtcEngine!!.destroyCustomVideoTrack(videoTrack1)
                mRtcEngine!!.leaveChannelEx(connection1)
                Log.e(TAG, RtcEngine.getErrorDescription(Math.abs(res)))
            } else {
                if (DEBUG) Log.v(
                    TAG,
                    "数据回调1122:" + mData1.size + ",width=" + mWidth1 + ",height=" + mHeight1
                )
                isFirst1 = false
                if (joined && mRtcEngine != null) {
                    // 创建一个 frameBuffer 对象，将原始的 YUV 数据存储到 NV21 格式的缓冲区中
                    val frameBuffer: VideoFrame.Buffer = NV21Buffer(mData1, mWidth1, mHeight1, null)

                    // 获取 SDK 当前的 Monotonic Time
                    val currentMonotonicTimeInMs = mRtcEngine!!.currentMonotonicTimeInMs
                    // 创建一个 VideoFrame 对象，传入要推送的 NV21 视频帧和视频帧的 Monotonic Time (单位为纳秒)
//                    val videoFrame = VideoFrame(frameBuffer, 0, currentMonotonicTimeInMs * 1000000)
                    val videoFrame =
                        VideoFrame(
                            frameBuffer, 0, currentMonotonicTimeInMs * 1000000,
                            null as VideoFrame.ColorSpace?,
                            null as ByteArray?,
                            1.0f,
                            1
                        )
                    // 通过视频轨道将视频帧推送到 SDK
                    val ret = mRtcEngine!!.pushExternalVideoFrameEx(videoFrame, videoTrack1)
                    if (ret < 0) {
                        Log.w(
                            TAG,
                            "pushExternalVideoFrameEx error code=$ret"
                        )
                    }
                }

                /*
                 * cache video track ids , video file readers and rtc connection to release while fragment destroying.
                 */videoTrackIds.add(videoTrack1)
                connections.add(connection1!!)
            }
        } else {
            if (DEBUG) Log.v(
                TAG,
                "数据回调112233:" + mData1.size + ",width=" + mWidth1 + ",height=" + mHeight1 + ",videoTrack1=" + videoTrack1
            )
            if (joined && mRtcEngine != null) {
                // 创建一个 frameBuffer 对象，将原始的 YUV 数据存储到 NV21 格式的缓冲区中
                val frameBuffer: VideoFrame.Buffer = NV21Buffer(mData1, mWidth1, mHeight1, null)

                // 获取 SDK 当前的 Monotonic Time
                val currentMonotonicTimeInMs = mRtcEngine!!.currentMonotonicTimeInMs
                // 创建一个 VideoFrame 对象，传入要推送的 NV21 视频帧和视频帧的 Monotonic Time (单位为纳秒)
//                val videoFrame = VideoFrame(frameBuffer, 0, currentMonotonicTimeInMs * 1000000)
                val videoFrame =
                    VideoFrame(
                        frameBuffer, 0, currentMonotonicTimeInMs * 1000000,
                        null as VideoFrame.ColorSpace?,
                        null as ByteArray?,
                        1.0f,
                        1
                    )
                // 通过视频轨道将视频帧推送到 SDK
                val ret = mRtcEngine!!.pushExternalVideoFrameEx(videoFrame, videoTrack1)
                Log.e(TAG, "res==2:$ret")
                if (ret < 0) {
                    Log.w(
                        TAG,
                        "pushExternalVideoFrameEx error code=$ret"
                    )
                }
            }
        }
    }

    private fun createPushingVideoTrack2() {
        if (isFirst2) {
            Log.w(TAG, "createPushingVideoTrack2=" + videoTrackIds.size)
            if (!joined || videoTrackIds.size >= 4) {
                return
            }

            /*
             * Get an custom video track id created by internal,which could used to publish or preview
             *
             * @return
             * - > 0: the useable video track id.
             * - < 0: Failure.
             */videoTrack2 = mRtcEngine!!.createCustomVideoTrack()
            Log.w(TAG, "videoTrack2=$videoTrack2")
            if (videoTrack2 < 0) {
                Toast.makeText(this, "createCustomVideoTrack failed!", Toast.LENGTH_LONG).show()
                return
            }
            val channelId = etChannelName!!.text.toString()
            val uid = Random().nextInt(1000) + 20000
            connection2 = RtcConnection(channelId, uid)
            val videoEncoderConfiguration = VideoEncoderConfiguration(
                VideoEncoderConfiguration.VD_1920x1080,
                FRAME_RATE.FRAME_RATE_FPS_15,
                VideoEncoderConfiguration.STANDARD_BITRATE,
                ORIENTATION_MODE.ORIENTATION_MODE_ADAPTIVE
            )
            val advanceOptions = AdvanceOptions()
            advanceOptions.encodingPreference =
                VideoEncoderConfiguration.ENCODING_PREFERENCE.PREFER_SOFTWARE
            videoEncoderConfiguration.advanceOptions = advanceOptions
            mRtcEngine!!.setVideoEncoderConfigurationEx(videoEncoderConfiguration, connection2)

            //发送端设置双流模式
//            mRtcEngine.setDualStreamModeEx(Constants.SimulcastStreamMode.ENABLE_SIMULCAST_STREAM,
//                    new SimulcastStreamConfig(new VideoEncoderConfiguration.VideoDimensions(480,270),256,9),
//                    connection2);
//            //设置订阅的视频流类型
//            mRtcEngine.setRemoteVideoStreamTypeEx(uid,VIDEO_STREAM_LOW,connection2);
            val option = ChannelMediaOptions()
            option.clientRoleType = Constants.CLIENT_ROLE_BROADCASTER
            option.autoSubscribeAudio = true
            option.autoSubscribeVideo = true
            option.publishCustomVideoTrack = true
//            option.publishCameraTrack = false
            /*
            specify custom video track id to publish in this channel.
             */option.customVideoTrackId = videoTrack2
            val res = mRtcEngine!!.joinChannelEx(
                token,
                connection2,
                option,
                object : IRtcEngineEventHandler() {})
            Log.e(TAG, "res2==$res")
            if (res != 0) {
                mRtcEngine!!.destroyCustomVideoTrack(videoTrack2)
                mRtcEngine!!.leaveChannelEx(connection2)
                Log.e(TAG, RtcEngine.getErrorDescription(Math.abs(res)))
            } else {
                if (DEBUG) Log.v(
                    TAG,
                    "数据回调2233:" + mData2.size + ",width=" + mWidth2 + ",height=" + mHeight2
                )
                isFirst2 = false
                if (joined && mRtcEngine != null) {
                    // 创建一个 frameBuffer 对象，将原始的 YUV 数据存储到 NV21 格式的缓冲区中
                    val frameBuffer: VideoFrame.Buffer = NV21Buffer(mData2, mWidth2, mHeight2, null)

                    // 获取 SDK 当前的 Monotonic Time
                    val currentMonotonicTimeInMs = mRtcEngine!!.currentMonotonicTimeInMs
                    // 创建一个 VideoFrame 对象，传入要推送的 NV21 视频帧和视频帧的 Monotonic Time (单位为纳秒)
//                    val videoFrame = VideoFrame(frameBuffer, 0, currentMonotonicTimeInMs * 1000000)
                    val videoFrame = VideoFrame(
                        frameBuffer, 0, currentMonotonicTimeInMs * 1000000,
                        null as VideoFrame.ColorSpace?,
                        null as ByteArray?,
                        1.0f,
                        2
                    )

                    // 通过视频轨道将视频帧推送到 SDK
                    val ret = mRtcEngine!!.pushExternalVideoFrameEx(videoFrame, videoTrack2)
                    if (ret < 0) {
                        Log.w(
                            TAG,
                            "pushExternalVideoFrameEx error code=$ret"
                        )
                    }
                }

                /*
                 * cache video track ids , video file readers and rtc connection to release while fragment destroying.
                 */videoTrackIds.add(videoTrack2)
                connections.add(connection2!!)
            }
        } else {
            if (DEBUG) Log.v(
                TAG,
                "数据回调223344:" + mData2.size + ",width=" + mWidth2 + ",height=" + mHeight2 + ",videoTrack2=" + videoTrack2
            )
            if (joined && mRtcEngine != null) {
                // 创建一个 frameBuffer 对象，将原始的 YUV 数据存储到 NV21 格式的缓冲区中
                val frameBuffer: VideoFrame.Buffer = NV21Buffer(mData2, mWidth2, mHeight2, null)

                // 获取 SDK 当前的 Monotonic Time
                val currentMonotonicTimeInMs = mRtcEngine!!.currentMonotonicTimeInMs
                // 创建一个 VideoFrame 对象，传入要推送的 NV21 视频帧和视频帧的 Monotonic Time (单位为纳秒)
//                val videoFrame = VideoFrame(frameBuffer, 0, currentMonotonicTimeInMs * 1000000)
                val videoFrame = VideoFrame(
                    frameBuffer, 0, currentMonotonicTimeInMs * 1000000,
                    null as VideoFrame.ColorSpace?,
                    null as ByteArray?,
                    1.0f,
                    2
                )

                // 通过视频轨道将视频帧推送到 SDK
                val ret = mRtcEngine!!.pushExternalVideoFrameEx(videoFrame, videoTrack2)
                Log.e(TAG, "res==3:$ret")
                if (ret < 0) {
                    Log.w(
                        TAG,
                        "pushExternalVideoFrameEx error code=$ret"
                    )
                }
            }
        }
    }

    override fun onResume() {
        super.onResume()
    }

    override fun onRestart() {
        super.onRestart()
//        if (redCamera != null) {
//            redCamera!!.startPreview()
//        }
//        if (colorCamera != null) {
//            colorCamera!!.startPreview()
//        }
//        if (thirdCamera != null) {
//            thirdCamera!!.startPreview()
//        }

    }

    override fun onDestroy() {
        super.onDestroy()
//        if (redCamera != null) {
//            redCamera!!.setPreviewCallback(null)
//            redCamera!!.stopPreview()
//            redCamera!!.release()
//            redCamera = null
//        }
//        if (colorCamera != null) {
//            colorCamera!!.setPreviewCallback(null)
//            colorCamera!!.stopPreview()
//            colorCamera!!.release()
//            colorCamera = null
//        }
//        if (thirdCamera != null) {
//            thirdCamera!!.setPreviewCallback(null)
//            thirdCamera!!.stopPreview()
//            thirdCamera!!.release()
//            thirdCamera = null
//        }
        /**leaveChannel and Destroy the RtcEngine instance */
//        if (textureBufferHelper != null) {
//            textureBufferHelper!!.dispose()
//            textureBufferHelper = null
//        }
//        if (yuvUploader != null) {
//            yuvUploader.release()
//        }


        if (cameraHelper != null){
            cameraHelper.stopPreview()
        }
        if (cameraHelper1 != null){
            cameraHelper1.stopPreview()
        }
        if (cameraHelper2 != null){
            cameraHelper2.stopPreview()
        }
    }

    companion object {
        private const val TAG = "MainActivity"

        //声网权限申请
        private const val PERMISSION_REQ_ID = 22
        private val REQUESTED_PERMISSIONS = arrayOf(
            Manifest.permission.RECORD_AUDIO,
            Manifest.permission.CAMERA,
            Manifest.permission.READ_EXTERNAL_STORAGE,
            Manifest.permission.WRITE_EXTERNAL_STORAGE
        )
    }
}
