package com.tencent.trtc.customcamera;

import android.graphics.Bitmap;
import android.graphics.ImageFormat;
import android.graphics.YuvImage;
import android.os.Build;
import android.os.Bundle;
import android.os.IBinder;
import android.renderscript.Allocation;
import android.renderscript.Element;
import android.renderscript.RenderScript;
import android.renderscript.ScriptIntrinsicYuvToRGB;
import android.renderscript.Type;
import android.text.TextUtils;
import android.util.Log;
import android.view.TextureView;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;

import androidx.annotation.RequiresApi;

import com.example.basic.TRTCBaseActivity;
import com.llvision.glass3.core.camera.client.IFrameCallback;
import com.llvision.glass3.core.lcd.client.IGlassDisplay;
import com.llvision.glass3.core.lcd.client.ILCDClient;
import com.llvision.glass3.platform.ConnectionStatusListener;
import com.llvision.glass3.platform.GlassException;
import com.llvision.glass3.platform.IGlass3Device;
import com.llvision.glass3.platform.LLVisionGlass3SDK;
import com.llvision.glxss.common.exception.BaseException;
import com.llvision.glxss.common.service.GlassServiceManager;
import com.llvision.glxss.common.service.audio.GlassAudioService;
import com.llvision.glxss.common.service.camera.GlassCameraService;
import com.llvision.glxss.common.utils.LogUtil;
import com.llvision.glxss.common.utils.ToastUtils;
import com.tencent.liteav.TXLiteAVCode;
import com.tencent.rtmp.ui.TXCloudVideoView;
import com.tencent.trtc.TRTCCloud;
import com.tencent.trtc.TRTCCloudDef;
import com.tencent.trtc.TRTCCloudListener;
import com.tencent.trtc.customcamera.helper.CustomCameraCapture;
import com.tencent.trtc.customcamera.helper.CustomFrameRender;
import com.tencent.trtc.debug.GenerateTestUserSig;

import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Random;

/**
 * TRTC 自定义相机采集&渲染的示例
 * 本文件展示了如何使用TRTC SDK 实现相机的自定义采集&渲染功能，主要流程如下：
 * -
 * 调用
 * {@link com.tencent.trtc.customcamera.helper.CustomCameraCapture#startInternal(
 *com.tencent.trtc.customcamera.helper.CustomCameraCapture.VideoFrameReadListener)}，
 * 启动Camera采集，并传入一个VideoFrameReadListener；
 * - 将{@link com.tencent.trtc.customcamera.helper.CustomCameraCapture.VideoFrameReadListener}
 * 返回的视频帧通过TRTC的自定义视频采集接口
 * {@link com.tencent.trtc.TRTCCloud#sendCustomVideoData(int, com.tencent.trtc.TRTCCloudDef.TRTCVideoFrame)}; 发送给TRTC
 * SDK；
 * -
 * 通过
 * {@link com.tencent.trtc.TRTCCloud#setLocalVideoRenderListener(
 *int, int, com.tencent.trtc.TRTCCloudListener.TRTCVideoRenderListener)}
 * 获取处理后的本地视频帧并渲染到屏幕上；
 * -
 * 如果有远端主播，可以通过
 * {@link com.tencent.trtc.TRTCCloud#setRemoteVideoRenderListener(
 *String, int, int, com.tencent.trtc.TRTCCloudListener.TRTCVideoRenderListener)}
 * 获取远端主播的视频帧并渲染到屏幕上；
 * - 更多细节，详见API说明文档{https://liteav.sdk.qcloud.com/doc/api/zh-cn/group__TRTCCloud__android.html}
 * <p>
 * Custom Video Capturing & Rendering
 * This document shows how to enable custom video capturing and rendering in the TRTC SDK.
 * - Call
 * {@link com.tencent.trtc.customcamera.helper.CustomCameraCapture#startInternal(
 *com.tencent.trtc.customcamera.helper.CustomCameraCapture.VideoFrameReadListener)}
 * to start video capturing by the camera, with `VideoFrameReadListener` passed in.
 * - Call the custom video capturing API
 * {@link com.tencent.trtc.TRTCCloud#sendCustomVideoData(int, com.tencent.trtc.TRTCCloudDef.TRTCVideoFrame)}; to send
 * the video frames returned by
 * `{@link com.tencent.trtc.customcamera.helper.CustomCameraCapture.VideoFrameReadListener}` to the SDK.
 * - Get the processed local video data using
 * {@link com.tencent.trtc.TRTCCloud#setLocalVideoRenderListener(
 *int, int, com.tencent.trtc.TRTCCloudListener.TRTCVideoRenderListener)}
 * and render it to the screen.
 * - If there is a remote anchor, call
 * {@link com.tencent.trtc.TRTCCloud#setRemoteVideoRenderListener(
 *String, int, int, com.tencent.trtc.TRTCCloudListener.TRTCVideoRenderListener)}
 * to get the anchor’s video frames and render them to the screen.
 * - For more information, please see the API document {https://liteav.sdk.qcloud
 * .com/doc/api/zh-cn/group__TRTCCloud__android.html}.
 */
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR1)
public class CustomCameraActivity extends TRTCBaseActivity implements View.OnClickListener {

    private static final String TAG = "CustomCameraActivity";

    private ImageView mImageBack;
    private TextView mTextTitle;
    private Button mButtonStartPush;
    private EditText mEditRoomId;
    private EditText mEditUserId;
    private TXCloudVideoView mTXCloudPreviewView;
    private List<TXCloudVideoView> mRemoteVideoList;

    private TRTCCloud mTRTCCloud;
    private CustomCameraCapture mCustomCameraCapture;
    private CustomFrameRender mCustomFrameRender;

    private List<String> mRemoteUserIdList;
    private boolean mStartPushFlag = false;
    private HashMap<String, CustomFrameRender> mCustomRemoteRenderMap;


    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.customcamera_activity_custom_camera);
        getSupportActionBar().hide();

        if (checkPermission()) {
            initView();
        }

        mRemoteUserIdList = new ArrayList<>();
        initGlass();
    }

    private void initGlass() {
        LLVisionGlass3SDK.getInstance().registerConnectionListener(mConnectionStatusListener);
        initDisplayInfo();
    }

    private void initDisplayInfo() {
        try {
            LogUtil.e(TAG, "init display info");
            List<IGlass3Device> glass3Devices = LLVisionGlass3SDK.getInstance().getGlass3DeviceList();
            mIlcdClient = (ILCDClient) LLVisionGlass3SDK.getInstance().getGlass3Client(IGlass3Device.Glass3DeviceClient.LCD);
            if (glass3Devices.size() > 0) {
                IGlass3Device glass3Device = glass3Devices.get(0);
                try {
                    mGlassDisplay = mIlcdClient.getGlassDisplay(glass3Device);
                } catch (BaseException e) {
                    e.printStackTrace();
                    LogUtil.e(TAG, "init display info  " + e.toString());
                }
            }
        } catch (BaseException e) {
            e.printStackTrace();
        }
    }

    public static IGlass3Device mGlassDevice;
    private GlassCameraService glassCameraService;
    private GlassAudioService GlassAudioService;

    private void openAudio() {
        IBinder binder = GlassServiceManager.getBinder(GlassServiceManager.GLASS_AUDIO_SERVICE);
        if (binder != null) {
            GlassAudioService = ((GlassAudioService.Binder) binder).getService();
            GlassAudioService.startAudioRecord();
            GlassAudioService.addAudioConsumer(new GlassAudioService.GlassAudioConsumer() {
                @Override
                public void onStartAudioRecord(int i, boolean b, int i1) {
                    Log.e("111", "addAudioConsumer  onStartAudioRecord回调");
                }

                @Override
                public void onAudioData(byte[] bytes, int i, int i1) {
                    Log.e("111", "addAudioConsumer  onAudioData 回调" + i + "  " + i1);
                    TRTCCloudDef.TRTCAudioFrame trtcAudioFrame = new TRTCCloudDef.TRTCAudioFrame();
                    trtcAudioFrame.data = bytes;
                    trtcAudioFrame.sampleRate = 44100;
                    trtcAudioFrame.channel = 1;
                    trtcAudioFrame.timestamp = mTRTCCloud.generateCustomPTS();
                    mTRTCCloud.sendCustomAudioData(trtcAudioFrame);

                }

                @Override
                public void onStopAudioRecord() {
                    Log.e("111", "addAudioConsumer  onStopAudioRecord 回调");
                }
            });
        }

    }

    private void openCamera() {
        IBinder binder = GlassServiceManager.getBinder(GlassServiceManager.GLASS_CAMERA_SERVICE);
        if (binder != null) {
            glassCameraService = ((GlassCameraService.Binder) binder).getService();

        }
        if (glassCameraService != null) {
            glassCameraService.addFrameConsumer(new IFrameCallback() {
                @Override
                public void onFrameAvailable(byte[] bytes) {
                    // Android 平台有 Buffer 和 Texture 两种方案，此处以 Texture 方案为例，推荐！
                    YuvImage yuvImage = new YuvImage(bytes, ImageFormat.NV21, 1280, 720, null);
                    Log.e("111", "onFrameAvailable  回调");
                    TRTCCloudDef.TRTCVideoFrame videoFrame = new TRTCCloudDef.TRTCVideoFrame();
                    videoFrame.width = 1280;
                    videoFrame.height = 720;
                    videoFrame.timestamp = mTRTCCloud.generateCustomPTS();
                    videoFrame.pixelFormat = TRTCCloudDef.TRTC_VIDEO_PIXEL_FORMAT_NV21;
                    videoFrame.bufferType = TRTCCloudDef.TRTC_VIDEO_BUFFER_TYPE_BYTE_ARRAY;
                    videoFrame.data = yuvImage.getYuvData();
                    mTRTCCloud.sendCustomVideoData(TRTCCloudDef.TRTC_VIDEO_STREAM_TYPE_BIG, videoFrame);
                }
            });
        }
    }

    private final ConnectionStatusListener mConnectionStatusListener = new ConnectionStatusListener() {

        @Override
        public void onServiceConnected(List<IGlass3Device> glass3Devices) {
            LogUtil.i(TAG, "onServiceConnected glass3Devices = " + glass3Devices.size());
        }

        @Override
        public void onServiceDisconnected() {
            LogUtil.i(TAG, "onServiceDisconnected");
        }

        @Override
        public void onDeviceConnect(IGlass3Device device) {
            LogUtil.i(TAG, "onDeviceConnect device = " + device);
            initDisplayInfo();
        }

        @Override
        public void onDeviceDisconnect(IGlass3Device device) {
            LogUtil.i(TAG, "onDeviceDisconnect device = " + device);
            if (mGlassDisplay != null) {
                mGlassDisplay.stopCaptureScreen();
                mGlassDisplay.release();
                mGlassDisplay = null;
            }
//        resetView();
        }

        @Override
        public void onError(int code, String msg) {
            LogUtil.i(TAG, "onError code = " + code + " msg = " + msg);
//        resetView();
        }

    };


    private void initView() {
        mImageBack = findViewById(R.id.iv_back);
        mTextTitle = findViewById(R.id.tv_room_number);
        mButtonStartPush = findViewById(R.id.btn_start_push);
        mEditRoomId = findViewById(R.id.et_room_id);
        mEditUserId = findViewById(R.id.et_user_id);
        mTXCloudPreviewView = findViewById(R.id.txcvv_main_local);
        mRemoteVideoList = new ArrayList<>();

        mRemoteVideoList.add((TXCloudVideoView) findViewById(R.id.txcvv_video_remote1));
        mRemoteVideoList.add((TXCloudVideoView) findViewById(R.id.txcvv_video_remote2));
        mRemoteVideoList.add((TXCloudVideoView) findViewById(R.id.txcvv_video_remote3));
        mRemoteVideoList.add((TXCloudVideoView) findViewById(R.id.txcvv_video_remote4));
        mRemoteVideoList.add((TXCloudVideoView) findViewById(R.id.txcvv_video_remote5));
        mRemoteVideoList.add((TXCloudVideoView) findViewById(R.id.txcvv_video_remote6));

        mImageBack.setOnClickListener(this);
        mButtonStartPush.setOnClickListener(this);

        mEditUserId.setText(new Random().nextInt(100000) + 1000000 + "");
        mTextTitle.setText(getString(R.string.customcamera_roomid) + ":" + mEditRoomId.getText().toString());
    }

    int a = 0;

    private void enterRoom(String roomId, String userId) {
        mCustomCameraCapture = new CustomCameraCapture();
        mCustomFrameRender = new CustomFrameRender(userId, TRTCCloudDef.TRTC_VIDEO_STREAM_TYPE_BIG);
        mCustomRemoteRenderMap = new HashMap<>();

        mTRTCCloud = TRTCCloud.sharedInstance(getApplicationContext());
        mTRTCCloud.setListener(new TRTCCloudImplListener(CustomCameraActivity.this));
        mTRTCCloud.setRemoteVideoRenderListener("111", TRTCCloudDef.TRTC_VIDEO_PIXEL_FORMAT_NV21,
                TRTCCloudDef.TRTC_VIDEO_BUFFER_TYPE_BYTE_ARRAY, new TRTCCloudListener.TRTCVideoRenderListener() {
                    @Override
                    public void onRenderVideoFrame(String userId, int streamType, TRTCCloudDef.TRTCVideoFrame frame) {
                        // 详见TRTC-API-Example 中自定义渲染的工具类：com.tencent.trtc.mediashare.helper.CustomFrameRender
                        if (a == 0) {
                            a++;
                            Log.i(TAG, "userId == " + userId);
                            Log.i(TAG, "streamType == " + streamType);
                            Log.i(TAG, "frame == " + frame.toString());
                            Log.i(TAG, "frame.bufferType == " + frame.bufferType);
                            Log.i(TAG, "frame.pixelFormat == " + frame.pixelFormat);
                            Log.i(TAG, "frame.texture == " + frame.texture);
                            Log.i(TAG, "frame.width == " + frame.width);
                            Log.i(TAG, "frame.height == " + frame.height);
                            Log.i(TAG, "frame.timestamp == " + frame.timestamp);
                            Log.i(TAG, "frame.rotation == " + frame.rotation);
                        }
                        runOnUiThread(new Runnable() {
                            @Override
                            public void run() {
//                                Bitmap bitmap = BitmapFactory.decodeByteArray(frame.data, 0, frame.data.length);
//                                Bitmap bitmap = yuvByte2Bitmap(frame.data, 1280, 720);
                                Bitmap bitmap = nv21ToBitmap(frame.data, frame.width, frame.height);
                                glassImageView.setImageBitmap(bitmap);
                            }
                        });
                    }
                });

        TRTCCloudDef.TRTCParams mTRTCParams = new TRTCCloudDef.TRTCParams();
        mTRTCParams.sdkAppId = GenerateTestUserSig.SDKAPPID;
        mTRTCParams.userId = userId;
        mTRTCParams.roomId = Integer.parseInt(roomId);
        mTRTCParams.userSig = GenerateTestUserSig.genTestUserSig(mTRTCParams.userId);
        mTRTCParams.role = TRTCCloudDef.TRTCRoleAnchor;
        mTRTCCloud.enterRoom(mTRTCParams, TRTCCloudDef.TRTC_APP_SCENE_LIVE);
        mTRTCCloud.enableCustomAudioCapture(true);
        mTRTCCloud.enableCustomVideoCapture(TRTCCloudDef.TRTC_VIDEO_STREAM_TYPE_BIG, true);
        mTRTCCloud.setLocalVideoRenderListener(TRTCCloudDef.TRTC_VIDEO_PIXEL_FORMAT_Texture_2D,
                TRTCCloudDef.TRTC_VIDEO_BUFFER_TYPE_TEXTURE, mCustomFrameRender);
        final TextureView textureView = new TextureView(this);
        mTXCloudPreviewView.addVideoView(textureView);
        mCustomFrameRender.start(textureView);
    }

    private RenderScript rs;

    private ScriptIntrinsicYuvToRGB yuvToRgbIntrinsic;

    public Bitmap nv21ToBitmap(byte[] nv21, int width, int height) {

        Type.Builder yuvType = new Type.Builder(rs, Element.U8(rs)).setX(nv21.length);

        Allocation in = Allocation.createTyped(rs, yuvType.create(), Allocation.USAGE_SCRIPT);

        Type.Builder rgbaType = new Type.Builder(rs, Element.RGBA_8888(rs)).setX(width).setY(height);

        Allocation out = Allocation.createTyped(rs, rgbaType.create(), Allocation.USAGE_SCRIPT);

        in.copyFrom(nv21);

        yuvToRgbIntrinsic.setInput(in);

        yuvToRgbIntrinsic.forEach(out);

        Bitmap bmpout = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);

        out.copyTo(bmpout);

        return bmpout;

    }

    private void hideRemoteView() {
        mRemoteUserIdList.clear();
        for (TXCloudVideoView videoView : mRemoteVideoList) {
            videoView.setVisibility(View.GONE);
        }
    }

    private void exitRoom() {
        if (mCustomRemoteRenderMap != null) {
            for (CustomFrameRender render : mCustomRemoteRenderMap.values()) {
                if (render != null) {
                    render.stop();
                }
            }
            mCustomRemoteRenderMap.clear();
        }
        if (mCustomCameraCapture != null) {
            mCustomCameraCapture.stop();
        }
        if (mCustomFrameRender != null) {
            mCustomFrameRender.stop();
        }
        hideRemoteView();
        if (mTRTCCloud != null) {
            mTRTCCloud.stopAllRemoteView();
            mTRTCCloud.exitRoom();
            mTRTCCloud.setListener(null);
        }
        mTRTCCloud = null;
        TRTCCloud.destroySharedInstance();
    }

    private View mGlassOverlayView;
    private ImageView glassImageView;
    private IGlassDisplay mGlassDisplay;
    private ILCDClient mIlcdClient;

    @Override
    public void onClick(View view) {
        if (view.getId() == R.id.iv_back) {
            finish();
        } else if (view.getId() == R.id.btn_start_push) {

            //注意 权限实在上一个页面判断了
            // 第一步 判断设备是否连接到手机
            try {
                if (LLVisionGlass3SDK.getInstance().getGlass3DeviceList().isEmpty()) {
                    ToastUtils.showShort(CustomCameraActivity.this, "设备未连接 返回");
                    return;
                }
            } catch (GlassException e) {
                e.printStackTrace();
            }
            ToastUtils.showShort(CustomCameraActivity.this, "第一步骤 设备已经连接上了");

            // 第二部 创建这个mGlassOverlayView
            if (mGlassOverlayView == null) {
                mGlassOverlayView = getLayoutInflater().inflate(R.layout
                        .layout_glass_screen, null);
                glassImageView = mGlassOverlayView.findViewById(R.id.iv);
            }
            mGlassDisplay.createCaptureScreen(CustomCameraActivity.this, mGlassOverlayView);
            ToastUtils.showLong(CustomCameraActivity.this, "开启扩展屏");
            mGlassDisplay.setSwitch(true);
            openCamera();
            openAudio();
            String roomId = mEditRoomId.getText().toString();
            String userId = mEditUserId.getText().toString();
            if (!mStartPushFlag) {
                if (!TextUtils.isEmpty(roomId) && !TextUtils.isEmpty(userId)) {
                    mButtonStartPush.setText(R.string.customcamera_stop_push);
                    enterRoom(roomId, userId);
                    mStartPushFlag = true;
                } else {
                    Toast.makeText(CustomCameraActivity.this,
                            getString(R.string.customcamera_please_input_roomid_userid), Toast.LENGTH_SHORT).show();
                }
            } else {
                mButtonStartPush.setText(R.string.customcamera_start_push);
                exitRoom();
                mStartPushFlag = false;
            }

        }
    }

    private void startRemoteCustomRender(String userId, TXCloudVideoView renderView) {
        CustomFrameRender customRender = new CustomFrameRender(userId, TRTCCloudDef.TRTC_VIDEO_STREAM_TYPE_BIG);
        TextureView textureView = new TextureView(renderView.getContext());
        renderView.addVideoView(textureView);
        mTRTCCloud.setRemoteVideoRenderListener(userId, TRTCCloudDef.TRTC_VIDEO_PIXEL_FORMAT_I420,
                TRTCCloudDef.TRTC_VIDEO_BUFFER_TYPE_BYTE_ARRAY, customRender);
        customRender.start(textureView);
        mCustomRemoteRenderMap.put(userId, customRender);
        mTRTCCloud.startRemoteView(userId, TRTCCloudDef.TRTC_VIDEO_STREAM_TYPE_BIG, null);
    }


    private void stopRemoteCustomRender(String userId) {
        CustomFrameRender render = mCustomRemoteRenderMap.remove(userId);
        if (render != null) {
            render.stop();
        }
        mTRTCCloud.stopRemoteView(userId, TRTCCloudDef.TRTC_VIDEO_STREAM_TYPE_BIG);
    }


    protected class TRTCCloudImplListener extends TRTCCloudListener {

        private WeakReference<CustomCameraActivity> mContext;

        public TRTCCloudImplListener(CustomCameraActivity activity) {
            super();
            mContext = new WeakReference<>(activity);
        }

        @Override
        public void onUserVideoAvailable(String userId, boolean available) {
            int index = mRemoteUserIdList.indexOf(userId);
            if (available) {
                if (index != -1) {
                    return;
                }
                mRemoteUserIdList.add(userId);
            } else {
                if (index == -1) {
                    return;
                }
                stopRemoteCustomRender(userId);
                mRemoteUserIdList.remove(index);
            }
            refreshRemoteVideo();

        }

        private void refreshRemoteVideo() {
            if (mRemoteUserIdList.size() > 0) {
                for (int i = 0; i < mRemoteUserIdList.size() || i < 6; i++) {
                    if (i < mRemoteUserIdList.size() && !TextUtils.isEmpty(mRemoteUserIdList.get(i))) {
                        mRemoteVideoList.get(i).setVisibility(View.VISIBLE);
                        startRemoteCustomRender(mRemoteUserIdList.get(i), mRemoteVideoList.get(i));
                    } else {
                        mRemoteVideoList.get(i).setVisibility(View.GONE);
                    }
                }
            } else {
                for (int i = 0; i < 6; i++) {
                    mRemoteVideoList.get(i).setVisibility(View.GONE);
                }
            }
        }

        @Override
        public void onError(int errCode, String errMsg, Bundle extraInfo) {
            CustomCameraActivity activity = mContext.get();
            if (activity != null) {
                Toast.makeText(activity, "onError: " + errMsg + "[" + errCode + "]", Toast.LENGTH_SHORT).show();
                if (errCode == TXLiteAVCode.ERR_ROOM_ENTER_FAIL) {
                    activity.exitRoom();
                }
            }
        }
    }

    @Override
    protected void onPermissionGranted() {
        initView();
    }

    @Override
    protected void onDestroy() {
        super.onDestroy();
        exitRoom();
        if (mGlassDisplay != null) {
            mGlassDisplay.stopCaptureScreen();
        }
        LLVisionGlass3SDK.getInstance().unRegisterConnectionListener(mConnectionStatusListener);
    }
}