package com.asiainfo.arsdk.android.demo;

import android.Manifest;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.content.Context;
import android.content.pm.PackageManager;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.util.Log;
import android.webkit.ConsoleMessage;
import android.webkit.JsResult;
import android.webkit.PermissionRequest;
import android.webkit.WebChromeClient;
import android.webkit.WebView;
import android.webkit.WebViewClient;

import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;

import com.asiainfo.arsdk.android.demo.databinding.LayoutDemoHomeBinding;
import com.rayneo.arsdk.android.ui.activity.BaseMirrorActivity;

import org.webrtc.AudioSource;
import org.webrtc.AudioTrack;
import org.webrtc.Camera1Enumerator;
import org.webrtc.Camera2Enumerator;
import org.webrtc.CameraEnumerator;
import org.webrtc.CameraVideoCapturer;
import org.webrtc.DataChannel;
import org.webrtc.DefaultVideoDecoderFactory;
import org.webrtc.DefaultVideoEncoderFactory;
import org.webrtc.EglBase;
import org.webrtc.IceCandidate;
import org.webrtc.MediaConstraints;
import org.webrtc.MediaStream;
import org.webrtc.PeerConnection;
import org.webrtc.PeerConnectionFactory;
import org.webrtc.RendererCommon;
import org.webrtc.RtpReceiver;
import org.webrtc.SurfaceTextureHelper;
import org.webrtc.SurfaceViewRenderer;
import org.webrtc.VideoSource;
import org.webrtc.VideoTrack;
import org.webrtc.audio.JavaAudioDeviceModule;

import java.util.List;

public class DemoHomeActivity extends BaseMirrorActivity<LayoutDemoHomeBinding> implements PeerConnection.Observer {
    private static final String TAG = "DemoHomeActivity";

    private EglBase.Context eglBaseContext;
    private PeerConnectionFactory peerConnectionFactory;
    //    private static List<PeerConnection.IceServer> iceServers;
    private List<String> streamList;
    private PeerConnection peerConnection;
    private DataChannel channel;

    private CameraVideoCapturer mVideoCapturer;
    private VideoTrack localVideoTrack;

    private SurfaceViewRenderer mLocalView;
    private WebView mWebView;

    public void checkCameraPermissions(Context context) {
        if (ContextCompat.checkSelfPermission(context, Manifest.permission.CAMERA)
                != PackageManager.PERMISSION_GRANTED) {
            // Permission is not granted
            Log.d("checkCameraPermissions", "No Camera Permissions");
            ActivityCompat.requestPermissions((Activity) context,
                    new String[]{Manifest.permission.CAMERA},
                    100);
        }
    }

    @Override
    public void onCreate(Bundle bundle) {
        super.onCreate(bundle);
        this.checkCameraPermissions(this);
        /**
         PeerConnectionFactory.InitializationOptions initializationOptions = PeerConnectionFactory
         .InitializationOptions.builder(this)
         .setEnableInternalTracer(true)
         .setFieldTrials("WebRTC-H264HighProfile/Enabled/")
         .createInitializationOptions();
         PeerConnectionFactory.initialize(initializationOptions);
         PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
         options.disableEncryption = true;
         options.disableNetworkMonitor = true;
         eglBaseContext = EglBase.create().getEglBaseContext();
         initPeerConnectionFactory(options);
         PeerConnection.RTCConfiguration configuration = new PeerConnection.RTCConfiguration(new ArrayList<>());
         peerConnection = peerConnectionFactory.createPeerConnection(configuration, this);
         this.setLocalView(this.findViewById(R.id.webrtc_surface_view));
         this.startLocalVideo(false, false);
         **/
        // DataChannel.Init 可配参数说明：
        // ordered：是否保证顺序传输；
        // maxRetransmitTimeMs：重传允许的最长时间；
        // maxRetransmits：重传允许的最大次数；
//        DataChannel.Init init = new DataChannel.Init();
//        if (peerConnection != null) {
//            channel = peerConnection.createDataChannel("text", init);
//        }
        this.requestPermissions(new String[]{Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO}, 1111);
        initWebView();
    }

    @SuppressLint("SetJavaScriptEnabled")
    private void initWebView() {
        mWebView = findViewById(R.id.web_view);
//        mWebView.setBackgroundColor(0);
//        mWebView.setAlpha(0.5f);
//        WebView.setWebContentsDebuggingEnabled(true);
        WebViewClient client = new WebViewClient();
        mWebView.setWebViewClient(client);
        mWebView.getSettings().setBuiltInZoomControls(true);
        mWebView.getSettings().setDomStorageEnabled(true);
        mWebView.getSettings().setJavaScriptEnabled(true);
        mWebView.getSettings().setAllowUniversalAccessFromFileURLs(true);
//        mWebView.setWebChromeClient(new WebChromeClient() {
//            @Override
//            public void onPermissionRequest(PermissionRequest request) {
//                request.grant(request.getResources());
//            }
//        });

        mWebView.setWebChromeClient(new WebChromeClient() {
            @Override
            public boolean onConsoleMessage(ConsoleMessage consoleMessage) {
                Log.w(TAG, "onConsoleMessage " + consoleMessage.messageLevel() + " " + consoleMessage.message());
                return super.onConsoleMessage(consoleMessage);
            }

            @Override
            public void onPermissionRequest(PermissionRequest request) {
                request.grant(request.getResources());
                request.getOrigin();
            }

            @Override
            public boolean onJsAlert(WebView view, String url, String message, JsResult result) {
                result.confirm();
                return true;
            }
        });

//        mWebView.getSettings().s
        String url = "file:android_asset/index.html";
        mWebView.loadUrl(url);
    }

    private CameraVideoCapturer createCameraCapturer(CameraEnumerator enumerator, boolean isUseFront) {
        final String[] deviceNames = enumerator.getDeviceNames();
        // 首先，尝试找到前置摄像头
        for (String deviceName : deviceNames) {
            if (isUseFront) {
                if (enumerator.isFrontFacing(deviceName)) {
                    CameraVideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);
                    if (videoCapturer != null) {
                        return videoCapturer;
                    }
                }
            } else {
                if (enumerator.isBackFacing(deviceName)) {
                    CameraVideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);
                    if (videoCapturer != null) {
                        return videoCapturer;
                    }
                }
            }
        }
        return null;
    }

    private CameraVideoCapturer createCameraCapturer(boolean isUseFront) {
        if (Camera2Enumerator.isSupported(this)) {
            return createCameraCapturer(new Camera2Enumerator(this), isUseFront);
        } else {
            return createCameraCapturer(new Camera1Enumerator(true), isUseFront);
        }
    }


    public void setLocalView(SurfaceViewRenderer localSurfaceView) {
        mLocalView = localSurfaceView;
        mLocalView.init(eglBaseContext, null);
        mLocalView.setMirror(true);
        mLocalView.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FILL);
        mLocalView.setKeepScreenOn(true);
        mLocalView.setZOrderMediaOverlay(true);
        mLocalView.setEnableHardwareScaler(false);
    }


    public void startLocalVideo(boolean isOpenAudio, boolean isUseFront) {
        VideoSource videoSource = peerConnectionFactory.createVideoSource(true);
        SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(Thread.currentThread().getName()
                , eglBaseContext);
        mVideoCapturer = createCameraCapturer(isUseFront);
        mVideoCapturer.initialize(surfaceTextureHelper, this, videoSource.getCapturerObserver());
        // 宽,高,帧率
        mVideoCapturer.startCapture(800, 600, 30);//TODO 细化
        localVideoTrack = peerConnectionFactory
                .createVideoTrack("video1", videoSource);
        localVideoTrack.addSink(mLocalView);
//        MediaStream localMediaStream = peerConnectionFactory
//                .createLocalMediaStream("audio1");
//        localMediaStream.addTrack(localVideoTrack);
//        peerConnection.addTrack(localVideoTrack, streamList);
//        peerConnection.addStream(localMediaStream);
        if (isOpenAudio) {
            startLocalAudioCapture();
        }
//        initObserver();
    }

    private void startLocalAudioCapture() {
        //语音
        MediaConstraints audioConstraints = new MediaConstraints();
        //回声消除
        audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair("googEchoCancellation", "true"));
        //自动增益
        audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair("googAutoGainControl", "true"));
        //高音过滤
        audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair("googHighpassFilter", "true"));
        //噪音处理
        audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair("googNoiseSuppression", "true"));
        AudioSource audioSource = peerConnectionFactory.createAudioSource(audioConstraints);
        AudioTrack audioTrack = peerConnectionFactory.createAudioTrack("audio1", audioSource);
        MediaStream localMediaStream = peerConnectionFactory.createLocalMediaStream("stream1");
        localMediaStream.addTrack(audioTrack);
        // 设置本地音量
//        audioTrack.setVolume(Constant.VOLUME);
        peerConnection.addTrack(audioTrack, streamList);
        peerConnection.addStream(localMediaStream);
    }

    protected JavaAudioDeviceModule createJavaAudioDeviceModule() {
        return JavaAudioDeviceModule.builder(this)
                .setAudioSource(MediaRecorder.AudioSource.MIC)
                .setSamplesReadyCallback(new JavaAudioDeviceModule.SamplesReadyCallback() {
                    @Override
                    public void onWebRtcAudioRecordSamplesReady(JavaAudioDeviceModule.AudioSamples audioSamples) {
//                        if (mLocalRecorder != null) {
//                            mLocalRecorder.onWebRtcAudioRecordSamplesReady(audioSamples);
//                        }
//                        if (mRemoteRecorder != null) {
//                            mRemoteRecorder.onWebRtcAudioRecordSamplesReady(audioSamples);
//                        }
                    }
                })
                .createAudioDeviceModule();
    }

    protected void initPeerConnectionFactory(PeerConnectionFactory.Options options) {
        peerConnectionFactory = PeerConnectionFactory.builder()
                .setOptions(options)
                .setAudioDeviceModule(createJavaAudioDeviceModule())
                .setVideoDecoderFactory(new DefaultVideoDecoderFactory(eglBaseContext))
                .setVideoEncoderFactory(new DefaultVideoEncoderFactory(eglBaseContext
                        , true, true))
                .createPeerConnectionFactory();
    }

    @Override
    public void onSignalingChange(PeerConnection.SignalingState signalingState) {

    }

    @Override
    public void onIceConnectionChange(PeerConnection.IceConnectionState iceConnectionState) {

    }

    @Override
    public void onIceConnectionReceivingChange(boolean b) {

    }

    @Override
    public void onIceGatheringChange(PeerConnection.IceGatheringState iceGatheringState) {

    }

    @Override
    public void onIceCandidate(IceCandidate iceCandidate) {

    }

    @Override
    public void onIceCandidatesRemoved(IceCandidate[] iceCandidates) {

    }

    @Override
    public void onAddStream(MediaStream mediaStream) {

    }

    @Override
    public void onRemoveStream(MediaStream mediaStream) {

    }

    @Override
    public void onDataChannel(DataChannel dataChannel) {

    }

    @Override
    public void onRenegotiationNeeded() {

    }

    @Override
    public void onAddTrack(RtpReceiver rtpReceiver, MediaStream[] mediaStreams) {

    }
}
