package org.appspot.apprtc;

import android.content.Intent;
import android.content.Context;
import android.os.Environment;
import android.os.ParcelFileDescriptor;
import android.util.Log;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.EnumSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.webrtc.AudioSource;
import org.webrtc.AudioTrack;
import org.webrtc.Camera1Enumerator;
import org.webrtc.Camera2Enumerator;
import org.webrtc.CameraEnumerator;
import org.webrtc.CameraVideoCapturer;
import org.webrtc.DataChannel;
import org.webrtc.EglBase;
import org.webrtc.FileVideoCapturer;
import org.webrtc.IceCandidate;
import org.webrtc.Logging;
import org.webrtc.MediaConstraints;
import org.webrtc.MediaStream;
import org.webrtc.PeerConnectionFactory;
import org.webrtc.StatsObserver;
import org.webrtc.StatsReport;
import org.webrtc.VideoCapturer;
import org.webrtc.VideoRenderer;
import org.webrtc.VideoSource;
import org.webrtc.VideoTrack;
import org.webrtc.voiceengine.WebRtcAudioManager;
import org.webrtc.voiceengine.WebRtcAudioRecord;
import org.webrtc.voiceengine.WebRtcAudioTrack;
import org.webrtc.voiceengine.WebRtcAudioRecord.AudioRecordStartErrorCode;
import org.webrtc.voiceengine.WebRtcAudioRecord.WebRtcAudioRecordErrorCallback;
import org.webrtc.voiceengine.WebRtcAudioTrack.WebRtcAudioTrackErrorCallback;
import org.webrtc.voiceengine.WebRtcAudioUtils;

/**
 * Peer connection client implementation.
 *
 * <p>All public methods are routed to local looper thread.
 * All PeerConnectionEvents callbacks are invoked from the same looper thread.
 * This class is a singleton.
 */
public class LocalMediaStream {
  public static final String VIDEO_TRACK_ID = "ARDAMSv0";
  public static final String AUDIO_TRACK_ID = "ARDAMSa0";
  public static final String VIDEO_TRACK_TYPE = "video";
  private static final String TAG = "PCRTCClient";
  private static final String VIDEO_CODEC_VP8 = "VP8";
  private static final String VIDEO_CODEC_VP9 = "VP9";
  private static final String VIDEO_CODEC_H264 = "H264";
  private static final String VIDEO_CODEC_H264_BASELINE = "H264 Baseline";
  private static final String VIDEO_CODEC_H264_HIGH = "H264 High";
  public static final String AUDIO_CODEC_OPUS = "opus";
  public  static final String AUDIO_CODEC_ISAC = "ISAC";
  public static final String VIDEO_CODEC_PARAM_START_BITRATE = "x-google-start-bitrate";
  private static final String VIDEO_FLEXFEC_FIELDTRIAL =
      "WebRTC-FlexFEC-03-Advertised/Enabled/WebRTC-FlexFEC-03/Enabled/";
  private static final String VIDEO_VP8_INTEL_HW_ENCODER_FIELDTRIAL = "WebRTC-IntelVP8/Enabled/";
  private static final String VIDEO_H264_HIGH_PROFILE_FIELDTRIAL =
      "WebRTC-H264HighProfile/Enabled/";
  private static final String DISABLE_WEBRTC_AGC_FIELDTRIAL =
      "WebRTC-Audio-MinimizeResamplingOnMobile/Enabled/";
  public static final String AUDIO_CODEC_PARAM_BITRATE = "maxaveragebitrate";
  private static final String AUDIO_ECHO_CANCELLATION_CONSTRAINT = "googEchoCancellation";
  private static final String AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT = "googAutoGainControl";
  private static final String AUDIO_HIGH_PASS_FILTER_CONSTRAINT = "googHighpassFilter";
  private static final String AUDIO_NOISE_SUPPRESSION_CONSTRAINT = "googNoiseSuppression";
  private static final String AUDIO_LEVEL_CONTROL_CONSTRAINT = "levelControl";
  private static final String DTLS_SRTP_KEY_AGREEMENT_CONSTRAINT = "DtlsSrtpKeyAgreement";
  private static final int HD_VIDEO_WIDTH = 720;//1280;
  private static final int HD_VIDEO_HEIGHT = 360;//720;
  public static final int BPS_IN_KBPS = 1000;

   
  private final ScheduledExecutorService executor;

 
   
  private AudioSource audioSource;
  private VideoSource videoSource;
  public boolean videoCallEnabled;
  public boolean preferIsac;
  public String preferredVideoCodec;
  private boolean videoCapturerStopped;
  private boolean isError;
  private Timer statsTimer;
 
  private ParcelFileDescriptor aecDumpFileDescriptor;
 
  public MediaStream local_stream;
  private VideoCapturer videoCapturer;
  
  // enableVideo is set to true if video should be rendered and sent.
  private boolean enableVideoTrack;
  private VideoTrack localVideoTrack;
  
   
  // enableAudio is set to true if audio should be sent.
  private boolean enableAudioTrack;
  private AudioTrack localAudioTrack;
 
  private Context context;
  private Intent intent;
 
  private boolean loopback;
  
  private MediaConstraints audioConstraints;
  public MediaConstraints pcConstraints;
  public MediaConstraints sdpMediaConstraints;
  
  private int videoWidth;
  private int videoHeight;
  private int videoFps;
 
  private boolean tracing;
  private boolean videoFlexfecEnabled;
  
  public static PeerConnectionFactory factory;
  
  public   int videoMaxBitrate;
  private   String videoCodec;
  private   boolean videoCodecHwAcceleration;
 
  public   int audioStartBitrate;
  private   String audioCodec;
  private   boolean noAudioProcessing;
  private   boolean aecDump;
  private   boolean useOpenSLES;
  private   boolean disableBuiltInAEC;
  private   boolean disableBuiltInAGC;
  private   boolean disableBuiltInNS;
  private   boolean enableLevelControl;
  private   boolean disableWebRtcAGCAndHPF;
  
  private VideoRenderer.Callbacks localRender;
  
  
  public LocalMediaStream(Intent intent, Context context, final EglBase.Context renderEGLContext, VideoRenderer.Callbacks local_render) {
	  executor = Executors.newSingleThreadScheduledExecutor();
	  this.intent = intent;
	  this.context = context;
 
	  videoCallEnabled = intent.getBooleanExtra(CallActivity.EXTRA_VIDEO_CALL, true);
	  loopback = intent.getBooleanExtra(CallActivity.EXTRA_LOOPBACK, false);
	  videoWidth = intent.getIntExtra(CallActivity.EXTRA_VIDEO_WIDTH, 0);
	  videoHeight = intent.getIntExtra(CallActivity.EXTRA_VIDEO_HEIGHT, 0);
	  videoFps = intent.getIntExtra(CallActivity.EXTRA_VIDEO_FPS, 0);
	  tracing = intent.getBooleanExtra(CallActivity.EXTRA_TRACING, false);
	  noAudioProcessing = intent.getBooleanExtra(CallActivity.EXTRA_NOAUDIOPROCESSING_ENABLED, false);
	  enableLevelControl = intent.getBooleanExtra(CallActivity.EXTRA_ENABLE_LEVEL_CONTROL, false);
	  videoFlexfecEnabled = intent.getBooleanExtra(CallActivity.EXTRA_FLEXFEC_ENABLED, false);	  
	  videoMaxBitrate = intent.getIntExtra(CallActivity.EXTRA_VIDEO_BITRATE, 0);      
      videoCodec = intent.getStringExtra(CallActivity.EXTRA_VIDEOCODEC);
      videoCodecHwAcceleration = intent.getBooleanExtra(CallActivity.EXTRA_HWCODEC_ENABLED, true);
      audioStartBitrate = intent.getIntExtra(CallActivity.EXTRA_AUDIO_BITRATE, 0);
      audioCodec = intent.getStringExtra(CallActivity.EXTRA_AUDIOCODEC);
      aecDump = intent.getBooleanExtra(CallActivity.EXTRA_AECDUMP_ENABLED, false);
      useOpenSLES = intent.getBooleanExtra(CallActivity.EXTRA_OPENSLES_ENABLED, false);
      disableBuiltInAEC = intent.getBooleanExtra(CallActivity.EXTRA_DISABLE_BUILT_IN_AEC, false);
      disableBuiltInAGC = intent.getBooleanExtra(CallActivity.EXTRA_DISABLE_BUILT_IN_AGC, false);
      disableBuiltInNS = intent.getBooleanExtra(CallActivity.EXTRA_DISABLE_BUILT_IN_NS, false);
      disableWebRtcAGCAndHPF = intent.getBooleanExtra(CallActivity.EXTRA_DISABLE_WEBRTC_AGC_AND_HPF, false);
      
      factory = null;
    
      preferIsac = false;
      videoCapturerStopped = false;
      isError = false;
      
      local_stream = null;
      videoCapturer = null;
      enableVideoTrack = true;
      localVideoTrack = null;
 
      enableAudioTrack = true;
      localAudioTrack = null;
      statsTimer = new Timer();
      
	  localRender = local_render;
	  
	  
	  createFactory(context);
	  
	  if (videoCallEnabled) {
	      Log.d(TAG, "EGLContext: " + renderEGLContext);
	      factory.setVideoHwAccelerationOptions(renderEGLContext, renderEGLContext);
	  }
	   
	  videoCapturer = createVideoCapturer();
	  
	  createMediaConstraints();
	  
	  
	  /////////////////////////audio track
	  audioSource = factory.createAudioSource(audioConstraints);
	  localAudioTrack = factory.createAudioTrack(AUDIO_TRACK_ID, audioSource);
	  localAudioTrack.setEnabled(enableAudioTrack);
	  
	  ////////////////////////video track
	  videoSource = factory.createVideoSource(videoCapturer);
	  videoCapturer.startCapture(videoWidth, videoHeight, videoFps);
	  localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
	  localVideoTrack.setEnabled(enableVideoTrack);
	  localVideoTrack.addRenderer(new VideoRenderer(localRender));
	  
	  ///////////////////////local stream
	  local_stream = factory.createLocalMediaStream("ARDAMS");
	  if (videoCallEnabled) {
		  local_stream.addTrack(localVideoTrack);
	  }
	  local_stream.addTrack(localAudioTrack);
	      
  }
  

  private void createFactory(Context context) {
	  
	PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
	  
	if (loopback) 
		options.networkIgnoreMask = 0;
      
    PeerConnectionFactory.initializeInternalTracer();
    if (tracing) {
      PeerConnectionFactory.startInternalTracingCapture(
          Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator
          + "webrtc-trace.txt");
    }
    Log.d(TAG, "Create peer connection factory. Use video: " + videoCallEnabled);
    isError = false;

    // Initialize field trials.
    String fieldTrials = "";
    if (videoFlexfecEnabled) {
      fieldTrials += VIDEO_FLEXFEC_FIELDTRIAL;
      Log.d(TAG, "Enable FlexFEC field trial.");
    }
    fieldTrials += VIDEO_VP8_INTEL_HW_ENCODER_FIELDTRIAL;
    if (disableWebRtcAGCAndHPF) {
      fieldTrials += DISABLE_WEBRTC_AGC_FIELDTRIAL;
      Log.d(TAG, "Disable WebRTC AGC field trial.");
    }

    // Check preferred video codec.
    preferredVideoCodec = VIDEO_CODEC_VP8;
    if (videoCallEnabled && videoCodec != null) {
      switch (videoCodec) {
        case VIDEO_CODEC_VP8:
          preferredVideoCodec = VIDEO_CODEC_VP8;
          break;
        case VIDEO_CODEC_VP9:
          preferredVideoCodec = VIDEO_CODEC_VP9;
          break;
        case VIDEO_CODEC_H264_BASELINE:
          preferredVideoCodec = VIDEO_CODEC_H264;
          break;
        case VIDEO_CODEC_H264_HIGH:
          // TODO(magjed): Strip High from SDP when selecting Baseline instead of using field trial.
          fieldTrials += VIDEO_H264_HIGH_PROFILE_FIELDTRIAL;
          preferredVideoCodec = VIDEO_CODEC_H264;
          break;
        default:
          preferredVideoCodec = VIDEO_CODEC_VP8;
      }
    }
    Log.d(TAG, "Preferred video codec: " + preferredVideoCodec);
    PeerConnectionFactory.initializeFieldTrials(fieldTrials);
    Log.d(TAG, "Field trials: " + fieldTrials);

    // Check if ISAC is used by default.
    preferIsac = audioCodec != null && audioCodec.equals(AUDIO_CODEC_ISAC);

    // Enable/disable OpenSL ES playback.
    if (!useOpenSLES) {
      Log.d(TAG, "Disable OpenSL ES audio even if device supports it");
      WebRtcAudioManager.setBlacklistDeviceForOpenSLESUsage(true /* enable */);
    } else {
      Log.d(TAG, "Allow OpenSL ES audio if device supports it");
      WebRtcAudioManager.setBlacklistDeviceForOpenSLESUsage(false);
    }

    if (disableBuiltInAEC) {
      Log.d(TAG, "Disable built-in AEC even if device supports it");
      WebRtcAudioUtils.setWebRtcBasedAcousticEchoCanceler(true);
    } else {
      Log.d(TAG, "Enable built-in AEC if device supports it");
      WebRtcAudioUtils.setWebRtcBasedAcousticEchoCanceler(false);
    }

    if (disableBuiltInAGC) {
      Log.d(TAG, "Disable built-in AGC even if device supports it");
      WebRtcAudioUtils.setWebRtcBasedAutomaticGainControl(true);
    } else {
      Log.d(TAG, "Enable built-in AGC if device supports it");
      WebRtcAudioUtils.setWebRtcBasedAutomaticGainControl(false);
    }

    if (disableBuiltInNS) {
      Log.d(TAG, "Disable built-in NS even if device supports it");
      WebRtcAudioUtils.setWebRtcBasedNoiseSuppressor(true);
    } else {
      Log.d(TAG, "Enable built-in NS if device supports it");
      WebRtcAudioUtils.setWebRtcBasedNoiseSuppressor(false);
    }

    // Set audio record error callbacks.
    WebRtcAudioRecord.setErrorCallback(new WebRtcAudioRecordErrorCallback() {
      @Override
      public void onWebRtcAudioRecordInitError(String errorMessage) {
        Log.e(TAG, "onWebRtcAudioRecordInitError: " + errorMessage);
        reportError(errorMessage);
      }

      @Override
      public void onWebRtcAudioRecordStartError(
          AudioRecordStartErrorCode errorCode, String errorMessage) {
        Log.e(TAG, "onWebRtcAudioRecordStartError: " + errorCode + ". " + errorMessage);
        reportError(errorMessage);
      }

      @Override
      public void onWebRtcAudioRecordError(String errorMessage) {
        Log.e(TAG, "onWebRtcAudioRecordError: " + errorMessage);
        reportError(errorMessage);
      }
    });

    WebRtcAudioTrack.setErrorCallback(new WebRtcAudioTrackErrorCallback() {
      @Override
      public void onWebRtcAudioTrackInitError(String errorMessage) {
        reportError(errorMessage);
      }

      @Override
      public void onWebRtcAudioTrackStartError(String errorMessage) {
        reportError(errorMessage);
      }

      @Override
      public void onWebRtcAudioTrackError(String errorMessage) {
        reportError(errorMessage);
      }
    });

    // Create peer connection factory.
    PeerConnectionFactory.initializeAndroidGlobals(context, videoCodecHwAcceleration);
    if (options != null) {
      Log.d(TAG, "Factory networkIgnoreMask option: " + options.networkIgnoreMask);
    }
    factory = new PeerConnectionFactory(options);
    Log.d(TAG, "Peer connection factory created.");
  }
  

  private void createMediaConstraints() {
    // Create peer connection constraints.
    pcConstraints = new MediaConstraints();
    // Enable DTLS for normal calls and disable for loopback calls.
  
    if (loopback) {
      pcConstraints.optional.add(
          new MediaConstraints.KeyValuePair(DTLS_SRTP_KEY_AGREEMENT_CONSTRAINT, "false"));
    } else {
      pcConstraints.optional.add(
          new MediaConstraints.KeyValuePair(DTLS_SRTP_KEY_AGREEMENT_CONSTRAINT, "true"));
    }

 
    // Check if there is a camera on device and disable video call if not.
    if (videoCapturer == null) {
      Log.w(TAG, "No camera on device. Switch to audio only call.");
      videoCallEnabled = false;
    }
    // Create video constraints if video call is enabled.
    if (videoCallEnabled) {
      // If video resolution is not specified, default to HD.
      if (videoWidth == 0 || videoHeight == 0) {
        videoWidth = HD_VIDEO_WIDTH;
        videoHeight = HD_VIDEO_HEIGHT;
      }

      // If fps is not specified, default to 30.
      if (videoFps == 0) {
        videoFps = 30;
      }
      Logging.d(TAG, "Capturing format: " + videoWidth + "x" + videoHeight + "@" + videoFps);
    }

    
    
    // Create audio constraints.
    audioConstraints = new MediaConstraints();
    // added for audio performance measurements
    if (noAudioProcessing) {
      Log.d(TAG, "Disabling audio processing");
      audioConstraints.mandatory.add(
          new MediaConstraints.KeyValuePair(AUDIO_ECHO_CANCELLATION_CONSTRAINT, "false"));
      audioConstraints.mandatory.add(
          new MediaConstraints.KeyValuePair(AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT, "false"));
      audioConstraints.mandatory.add(
          new MediaConstraints.KeyValuePair(AUDIO_HIGH_PASS_FILTER_CONSTRAINT, "false"));
      audioConstraints.mandatory.add(
          new MediaConstraints.KeyValuePair(AUDIO_NOISE_SUPPRESSION_CONSTRAINT, "false"));
    }
    if (enableLevelControl) {
      Log.d(TAG, "Enabling level control.");
      audioConstraints.mandatory.add(
          new MediaConstraints.KeyValuePair(AUDIO_LEVEL_CONTROL_CONSTRAINT, "true"));
    }
    
    
    // Create SDP constraints.
    sdpMediaConstraints = new MediaConstraints();
    sdpMediaConstraints.mandatory.add(
        new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"));
    if (videoCallEnabled || loopback) {
      sdpMediaConstraints.mandatory.add(
          new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true"));
    } else {
      sdpMediaConstraints.mandatory.add(
          new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "false"));
    }
    
  }
  
  
  private VideoCapturer createVideoCapturer() {
    VideoCapturer videoCapturer = null;
    if (useCamera2()) {
      if (!captureToTexture()) {
        reportError(context.getString(R.string.camera2_texture_only_error));
        return null;
      }
      Logging.d(TAG, "Creating capturer using camera2 API.");
      videoCapturer = createCameraCapturer(new Camera2Enumerator(context));
    } else {
      Logging.d(TAG, "Creating capturer using camera1 API.");
      videoCapturer = createCameraCapturer(new Camera1Enumerator(captureToTexture()));
    }
    
    if (videoCapturer == null) {
      reportError("Failed to open camera");
      return null;
    }
    return videoCapturer;
  }
  
  private VideoCapturer createCameraCapturer(CameraEnumerator enumerator) {
    final String[] deviceNames = enumerator.getDeviceNames();

    // First, try to find front facing camera
    Logging.d(TAG, "Looking for front facing cameras.");
    for (String deviceName : deviceNames) {
      if (enumerator.isFrontFacing(deviceName)) {
        Logging.d(TAG, "Creating front facing camera capturer.");
        VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);

        if (videoCapturer != null) {
          return videoCapturer;
        }
      }
    }

    // Front facing camera not found, try something else
    Logging.d(TAG, "Looking for other cameras.");
    for (String deviceName : deviceNames) {
      if (!enumerator.isFrontFacing(deviceName)) {
        Logging.d(TAG, "Creating other camera capturer.");
        VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);

        if (videoCapturer != null) {
          return videoCapturer;
        }
      }
    }

    return null;
  }
 
  
  private boolean useCamera2() {
	    return Camera2Enumerator.isSupported(context) && intent.getBooleanExtra(CallActivity.EXTRA_CAMERA2, true);
  }
  private boolean captureToTexture() {
	    return intent.getBooleanExtra(CallActivity.EXTRA_CAPTURETOTEXTURE_ENABLED, false);
  }
 
  private void reportError(final String description) {
	  
  }
  
  
  
  ////////////////////////////////////////////////
  ////////////////////////////////////////////////
  ////////////start and stop capture local video//
  public void stopVideoSource() {
    executor.execute(new Runnable() {
      @Override
      public void run() {
        if (videoCapturer != null && !videoCapturerStopped) {
          Log.d(TAG, "Stop video source.");
          try {
            videoCapturer.stopCapture();
          } catch (InterruptedException e) {
          }
          videoCapturerStopped = true;
        }
      }
    });
  }
  public void startVideoSource() {
    executor.execute(new Runnable() {
      @Override
      public void run() {
        if (videoCapturer != null && videoCapturerStopped) {
          Log.d(TAG, "Restart video source.");
          videoCapturer.startCapture(videoWidth, videoHeight, videoFps);
          videoCapturerStopped = false;
        }
      }
    });
  }
  
  ////////////////////////////////////////////////
  ////////////////////////////////////////////////
  ////////////enable track local audio  //////////
  public void setAudioTrackEnabled(final boolean enable) {
    executor.execute(new Runnable() {
      @Override
      public void run() {
        enableAudioTrack = enable;
        if (localAudioTrack != null) {
          localAudioTrack.setEnabled(enableAudioTrack);
        }
      }
    });
  }
  
  ////////////////////////////////////////////////
  ////////////////////////////////////////////////
  ////////////enable track local video  //////////
  public void setVideoTrackEnabled(final boolean enable) {
    executor.execute(new Runnable() {
      @Override
      public void run() {
        enableVideoTrack = enable;
        if (localVideoTrack != null) {
          localVideoTrack.setEnabled(enableVideoTrack);
        }
      }
    });
  }
  
  ////////////////////////////////////////////////
  ////////////////////////////////////////////////
  ////////////switch camera///////////////////////
  private void switchCameraInternal() {
	if (videoCapturer instanceof CameraVideoCapturer) {
	      if (!videoCallEnabled || isError || videoCapturer == null) {
	        Log.e(TAG, "Failed to switch camera. Video: " + videoCallEnabled + ". Error : " + isError);
	    return; // No video is sent or only one camera is available or error happened.
	  }
	  Log.d(TAG, "Switch camera");
	  CameraVideoCapturer cameraVideoCapturer = (CameraVideoCapturer) videoCapturer;
	  cameraVideoCapturer.switchCamera(null);
	} else {
	  Log.d(TAG, "Will not switch camera, video caputurer is not a camera");
	}
  }

  public void switchCamera() {
    executor.execute(new Runnable() {
      @Override
      public void run() {
        switchCameraInternal();
      }
    });
  }
 
  ////////////////////////////////////////////////
  ////////////////////////////////////////////////
  ////////////change capture format///////////////
  public void changeCaptureFormat(final int width, final int height, final int framerate) {
    executor.execute(new Runnable() {
      @Override
      public void run() {
        changeCaptureFormatInternal(width, height, framerate);
      }
    });
  }

  private void changeCaptureFormatInternal(int width, int height, int framerate) {
    if (!videoCallEnabled || isError || videoCapturer == null) {
      Log.e(TAG,
          "Failed to change capture format. Video: " + videoCallEnabled + ". Error : " + isError);
      return;
    }
    Log.d(TAG, "changeCaptureFormat: " + width + "x" + height + "@" + framerate);
    videoSource.adaptOutputFormat(width, height, framerate);
  }
	  
  
  
  
  
  public void close() {
    executor.execute(new Runnable() {
      @Override
      public void run() {
        closeInternal();
      }
    });
  }
  
  private void closeInternal() {
 
    Log.d(TAG, "Closing peer connection.");
    statsTimer.cancel();
  
    Log.d(TAG, "Closing audio source.");
    if (audioSource != null) {
      audioSource.dispose();
      audioSource = null;
    }
    Log.d(TAG, "Stopping capture.");
    if (videoCapturer != null) {
      try {
        videoCapturer.stopCapture();
      } catch (InterruptedException e) {
        throw new RuntimeException(e);
      }
      videoCapturerStopped = true;
      videoCapturer.dispose();
      videoCapturer = null;
    }
    Log.d(TAG, "Closing video source.");
    if (videoSource != null) {
      videoSource.dispose();
      videoSource = null;
    }
    localRender = null;
    
    Log.d(TAG, "Closing peer connection factory.");
    if (factory != null) {
      factory.dispose();
      factory = null;
    }
 
    PeerConnectionFactory.stopInternalTracingCapture();
    PeerConnectionFactory.shutdownInternalTracer();
  }
  
  
}
























