package com.kanjj.mywebrtc.fragment;

import android.os.Bundle;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;

import androidx.annotation.Nullable;
import androidx.fragment.app.Fragment;

import com.kanjj.mywebrtc.ChatActivity;
import com.kanjj.mywebrtc.R;
import com.kanjj.mywebrtc.network.SocketClient;

import org.webrtc.AudioSource;
import org.webrtc.AudioTrack;
import org.webrtc.Camera1Enumerator;
import org.webrtc.Camera2Enumerator;
import org.webrtc.CameraEnumerator;
import org.webrtc.Logging;
import org.webrtc.MediaConstraints;
import org.webrtc.PeerConnectionFactory;
import org.webrtc.RendererCommon;
import org.webrtc.SurfaceTextureHelper;
import org.webrtc.SurfaceViewRenderer;
import org.webrtc.VideoCapturer;
import org.webrtc.VideoSource;
import org.webrtc.VideoTrack;

import java.util.ArrayList;

import butterknife.BindView;
import butterknife.ButterKnife;

/**
 * 视频的fragment
 */
public class VideoFragment extends Fragment implements View.OnClickListener {

    private static final String TAG = "VideoFragment";

    @BindView(R.id.chat_video_local)
    SurfaceViewRenderer localVideo;

    @BindView(R.id.chat_video_remote_1)
    SurfaceViewRenderer remoteVideo1;

    @BindView(R.id.chat_video_remote_2)
    SurfaceViewRenderer remoteVideo2;
    @BindView(R.id.chat_video_close_open_video_btn)
    Button closeOpenBtn;

    private ChatActivity chatActivity;
    private SocketClient socketClient;

    private static final int VIDEO_RESOLUTION_WIDTH = 640;
    private static final int VIDEO_RESOLUTION_HEIGHT = 360;
    private static final int VIDEO_FPS = 30;


    public static final String VIDEO_TRACK_ID = "1";//"ARDAMSv0";
    public static final String AUDIO_TRACK_ID = "2";//"ARDAMSa0";

    private VideoTrack localVideoTrack;
    private AudioTrack localAudioTrack;
    private VideoCapturer localVideoCapturer;

    private PeerConnectionFactory peerConnectionFactory;
    private SurfaceTextureHelper localSurfaceTextureHelper;


    @Override
    public View onCreateView(LayoutInflater inflater, ViewGroup container,
                             Bundle savedInstanceState) {
        View view = inflater.inflate(R.layout.fragment_video, container, false);
        ButterKnife.bind(this, view);
        closeOpenBtn.setOnClickListener(this);
        return view;
    }


    @Override
    public void onActivityCreated(@Nullable Bundle savedInstanceState) {
        super.onActivityCreated(savedInstanceState);
        chatActivity = (ChatActivity) getActivity();
        socketClient = SocketClient.getInstance();
        initView();
        initVideo();
    }

    public void initView() {
        // 创建 socketClient 对象
        ArrayList<SurfaceViewRenderer> list = new ArrayList<>();
        list.add(remoteVideo1);
        list.add(remoteVideo2);
        socketClient.surfaceViews = list;
        // 初始化video设置
        localVideo.init(socketClient.rootEglBase.getEglBaseContext(), null);
        localVideo.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FILL);
        localVideo.setMirror(true);
        localVideo.setEnableHardwareScaler(false /* enabled */);

        remoteVideo1.init(socketClient.rootEglBase.getEglBaseContext(), null);
        remoteVideo1.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FILL);
        remoteVideo1.setMirror(true);
        remoteVideo1.setEnableHardwareScaler(true /* enabled */);
        remoteVideo1.setZOrderMediaOverlay(true);

        remoteVideo2.init(socketClient.rootEglBase.getEglBaseContext(), null);
        remoteVideo2.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FILL);
        remoteVideo2.setMirror(true);
        remoteVideo2.setEnableHardwareScaler(true /* enabled */);
        remoteVideo2.setZOrderMediaOverlay(true);
    }


    public void initVideo() {
        peerConnectionFactory = socketClient.peerConnectionFactory;
        // 初始化音视频设备
        localVideoCapturer = createVideoCapturer();

        localSurfaceTextureHelper = SurfaceTextureHelper.create("CaptureThread", socketClient.rootEglBase.getEglBaseContext());
        VideoSource videoSource = peerConnectionFactory.createVideoSource(false);
        localVideoCapturer.initialize(localSurfaceTextureHelper, getActivity(), videoSource.getCapturerObserver());

        localVideoTrack = peerConnectionFactory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
        localVideoTrack.setEnabled(true);
        localVideoTrack.addSink(localVideo);

        AudioSource audioSource = peerConnectionFactory.createAudioSource(new MediaConstraints());
        localAudioTrack = peerConnectionFactory.createAudioTrack(AUDIO_TRACK_ID, audioSource);
        localAudioTrack.setEnabled(true);
        socketClient.setStream(localVideoTrack, localAudioTrack);
    }

    @Override
    public void onStart() {
        super.onStart();
        // 创建socket连接
        socketClient.createSocket();
    }

    @Override
    public void onResume() {
        super.onResume();
        localVideoCapturer.startCapture(VIDEO_RESOLUTION_WIDTH, VIDEO_RESOLUTION_HEIGHT, VIDEO_FPS);
    }

    @Override
    public void onPause() {
        super.onPause();
        try {
            localVideoCapturer.stopCapture();
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }

    private VideoCapturer createVideoCapturer() {
        if (Camera2Enumerator.isSupported(getActivity())) {
            return createCameraCapturer(new Camera2Enumerator(getActivity()));
        } else {
            return createCameraCapturer(new Camera1Enumerator(true));
        }
    }

    private VideoCapturer createCameraCapturer(CameraEnumerator enumerator) {
        final String[] deviceNames = enumerator.getDeviceNames();

        // First, try to find front facing camera
        Log.d(TAG, "Looking for front facing cameras.");
        for (String deviceName : deviceNames) {
            if (enumerator.isFrontFacing(deviceName)) {
                Logging.d(TAG, "Creating front facing camera capturer.");
                VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);
                if (videoCapturer != null) {
                    return videoCapturer;
                }
            }
        }

        // Front facing camera not found, try something else
        Log.d(TAG, "Looking for other cameras.");
        for (String deviceName : deviceNames) {
            if (!enumerator.isFrontFacing(deviceName)) {
                Logging.d(TAG, "Creating other camera capturer.");
                VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);
                if (videoCapturer != null) {
                    return videoCapturer;
                }
            }
        }
        return null;
    }

    /**
     * 进行清理工作
     */
    public void destroyView() {
        System.out.println("======================= destroyView");
        if(localVideo != null)
            localVideo.release();
        if(localVideoTrack != null)
            localVideoTrack.dispose();
        localAudioTrack.dispose();
        localVideoCapturer.dispose();
        localSurfaceTextureHelper.dispose();
        remoteVideo1.release();
        remoteVideo2.release();
        PeerConnectionFactory.stopInternalTracingCapture();
        PeerConnectionFactory.shutdownInternalTracer();
    }

    @Override
    public void onClick(View view) {
        switch (view.getId()) {
            case R.id.chat_video_close_open_video_btn:
                if (localVideoTrack.enabled()) {
                    localVideoTrack.setEnabled(false);
                } else {
                    localVideoTrack.setEnabled(true);
                }
                break;
            default:
                break;
        }
    }
}