package com.hhu.molibs;

import android.Manifest;
import android.app.Activity;
import android.content.Context;
import android.provider.MediaStore;
import android.util.Log;

import org.webrtc.AudioSource;
import org.webrtc.AudioTrack;
import org.webrtc.Camera1Enumerator;
import org.webrtc.Camera2Enumerator;
import org.webrtc.CameraEnumerator;
import org.webrtc.DefaultVideoDecoderFactory;
import org.webrtc.DefaultVideoEncoderFactory;
import org.webrtc.EglBase;
import org.webrtc.MediaConstraints;
import org.webrtc.PeerConnectionFactory;
import org.webrtc.RendererCommon;
import org.webrtc.SurfaceTextureHelper;
import org.webrtc.SurfaceViewRenderer;
import org.webrtc.VideoCapturer;
import org.webrtc.VideoDecoderFactory;
import org.webrtc.VideoEncoderFactory;
import org.webrtc.VideoFrame;
import org.webrtc.VideoSink;
import org.webrtc.VideoSource;
import org.webrtc.VideoTrack;

import pub.devrel.easypermissions.EasyPermissions;

/*
*   webrtc client
*   mqtt client
* */
public class TRtc {
    public final static String TAG="TRtc";
    Context mCtx;
    TRtcConfig mConfig ;
    PeerConnectionFactory mPeerConnectionFactory;
    EglBase mRootEglBase;
    SurfaceTextureHelper mSurfaceTextureHelper;
    VideoCapturer mVideoCapturer;
    VideoSource mVideoSource;
    VideoTrack mVideoTrack;
    TRtcProxyVideoSink videoSink;
    AudioSource mAudioSource;
    AudioTrack mAudioTrack;

    public void init(){
        try{
            requestPermission();
            initFactory();
            initVideo();
            initAudio();
        }
        catch (Exception er){
            Log.e(TAG, "init: ",er );
        }
    }
    public void deInit(){
        try{
            if (mVideoCapturer!=null)
                mVideoCapturer.dispose();
            if(mSurfaceTextureHelper!=null)
                mSurfaceTextureHelper.dispose();
            PeerConnectionFactory.stopInternalTracingCapture();
            PeerConnectionFactory.shutdownInternalTracer();
        }
        catch (Exception er){
            Log.e(TAG, "init: ",er );
        }
    }
    public void startCapture(){
        try{
            if( mVideoCapturer!=null){
                mVideoCapturer.startCapture(mConfig.videoWidth, mConfig.videoHeight, mConfig.videoFPS);
            }
        }
        catch (Exception er){
            Log.e(TAG, "startCapture: ", er);
        }
    }
    public void stopCapture(){
        try{
            if( mVideoCapturer!=null){
                mVideoCapturer.stopCapture();
            }
        }
        catch (Exception er){
            Log.e(TAG, "stopCapture: ", er);
        }
    }


    void requestPermission(){
        try{
            String[] perms = {
                    Manifest.permission.CAMERA,
                    Manifest.permission.RECORD_AUDIO
            };
            if (!EasyPermissions.hasPermissions(mCtx, perms)) {
                EasyPermissions.requestPermissions((Activity) mCtx, "Need permissions for camera & microphone", 0, perms);
            }
            /**
             @Override
             public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
             super.onRequestPermissionsResult(requestCode, permissions, grantResults);
             EasyPermissions.onRequestPermissionsResult(requestCode, permissions, grantResults, this);
             }
              * */
        }
        catch (Exception er){
            Log.e(TAG, "init: ",er );
        }
    }
    void initFactory(){
        try{
            mRootEglBase = EglBase.create( );
            mSurfaceTextureHelper = SurfaceTextureHelper.create("CaptureThread", mRootEglBase.getEglBaseContext());
            final VideoEncoderFactory encoderFactory;
            final VideoDecoderFactory decoderFactory;
            encoderFactory = new DefaultVideoEncoderFactory(mRootEglBase.getEglBaseContext(), true /* enableIntelVp8Encoder */, false);
            decoderFactory = new DefaultVideoDecoderFactory(mRootEglBase.getEglBaseContext());
            PeerConnectionFactory.initialize(PeerConnectionFactory.InitializationOptions.builder(mCtx)
                    .setEnableInternalTracer(true)
                    .createInitializationOptions());
            PeerConnectionFactory.Builder builder = PeerConnectionFactory.builder()
                    .setVideoEncoderFactory(encoderFactory)
                    .setVideoDecoderFactory(decoderFactory);
            builder.setOptions(null);
            mPeerConnectionFactory = builder.createPeerConnectionFactory();
        }
        catch (Exception er){
            Log.e(TAG, "init: ",er );
        }
    }
    void initVideo(){
        try{
            boolean flag = Camera2Enumerator.isSupported(mCtx);
            CameraEnumerator enumerator = flag?(new Camera2Enumerator(mCtx)):(new Camera1Enumerator(true));
            final String[] deviceNames = enumerator.getDeviceNames();

            String currentCamera = "";
            for (String devName : deviceNames) {
                if( enumerator.isFrontFacing(devName) && mConfig.mCameraId==0){
                    currentCamera = devName;
                    break;
                }
            }
            for (String devName : deviceNames) {
                if( enumerator.isBackFacing(devName) && mConfig.mCameraId==1){
                    currentCamera = devName;
                    break;
                }
            }
            mVideoCapturer = enumerator.createCapturer(currentCamera , null);
            mVideoSource = mPeerConnectionFactory.createVideoSource(false);
            mVideoCapturer.initialize(mSurfaceTextureHelper, mCtx.getApplicationContext(), mVideoSource.getCapturerObserver());
            mVideoTrack = mPeerConnectionFactory.createVideoTrack( mConfig.videoTrackId, mVideoSource);
            mVideoTrack.setEnabled(true);
            videoSink = new TRtcProxyVideoSink();
            mVideoTrack.addSink(videoSink);
        }
        catch (Exception er){
            Log.e(TAG, "init: ",er );
        }
    }
    void initAudio(){
        try{

            //audio source
            mAudioSource = mPeerConnectionFactory.createAudioSource(new MediaConstraints());
            //audio track
            mAudioTrack = mPeerConnectionFactory.createAudioTrack(mConfig.audioTrackId, mAudioSource);
            mAudioTrack.setEnabled(true);
        }
        catch (Exception er){
            Log.e(TAG, "init: ",er );
        }
    }
    void initSurfaceView(SurfaceViewRenderer view , boolean mirrorFlag , TRtcProxyVideoSink skin){
        try{
            if( view!=null){
                view.init(mRootEglBase.getEglBaseContext(), null);
                view.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FILL);
                //mRemoteSurfaceView.setMirror(true);
                view.setEnableHardwareScaler(true /* enabled */);
                view.setZOrderMediaOverlay(true);
                view.setMirror(mirrorFlag);
                if( skin!=null){
                    skin.setTarget(view);
                }
            }
        }
        catch (Exception er){
            Log.e(TAG, "init: ",er );
        }
    }


    public static class  TRtcConfig{
        public int mCameraId = 0 ; //0-f;1-b
        public String videoTrackId ="videoTrackId";
        public String audioTrackId ="audioTrackId";
        public int videoWidth = 1280;
        public int videoHeight = 720;
        public int videoFPS = 25;
    }
    public static class TRtcProxyVideoSink implements VideoSink {
        private VideoSink mTarget;
        @Override
        synchronized public void onFrame(VideoFrame frame) {
            if (mTarget == null) {
                Log.d(TAG, "Dropping frame in proxy because target is null.");
                return;
            }
            mTarget.onFrame(frame);
        }
        synchronized void setTarget(VideoSink target) {
            this.mTarget = target;
        }
    }
}
