package com.example.PLDroidMediaStreaming;


import android.content.Context;
import android.content.Intent;
import android.hardware.Camera;
import android.util.Log;
import android.view.View;
import android.view.ViewGroup;

import androidx.annotation.NonNull;

import com.qiniu.pili.droid.streaming.AVCodecType;
import com.qiniu.pili.droid.streaming.AudioSourceCallback;
import com.qiniu.pili.droid.streaming.CameraStreamingSetting;
import com.qiniu.pili.droid.streaming.MediaStreamingManager;
import com.qiniu.pili.droid.streaming.MicrophoneStreamingSetting;
import com.qiniu.pili.droid.streaming.StreamStatusCallback;
import com.qiniu.pili.droid.streaming.StreamingProfile;
import com.qiniu.pili.droid.streaming.StreamingSessionListener;
import com.qiniu.pili.droid.streaming.StreamingState;
import com.qiniu.pili.droid.streaming.StreamingStateChangedListener;
import com.qiniu.pili.droid.streaming.WatermarkSetting;

import java.nio.ByteBuffer;
import java.util.List;
import java.util.Map;

import io.flutter.plugin.platform.PlatformView;

public class PLDroidMediaStreamingRenderView implements PlatformView , StreamingStateChangedListener, StreamStatusCallback, AudioSourceCallback, StreamingSessionListener
  {
    private CameraPreviewFrameView mCameraPreviewSurfaceView;
    private MediaStreamingManager mMediaStreamingManager;
    private StreamingProfile mProfile;
    private MicrophoneStreamingSetting mMicrophoneStreamingSetting;
    private WatermarkSetting mWatermarkSetting;
    private CameraStreamingSetting mCameraStreamingSetting;

    private String TAG = "PLDroidMediaStreamingRenderView";

    private final long uid;
    private final Context context;
    private String publishURL;

    PLDroidMediaStreamingRenderView(Context context, int uid, Map<String, Object> params) {
      this.publishURL = (String)params.get("publishURL");
      this.mCameraPreviewSurfaceView = new CameraPreviewFrameView(context.getApplicationContext());
      this.uid = uid;
      this.context = context;

      DataParser dataParser = new DataParser(params, context);

      mCameraStreamingSetting = dataParser.cameraStreamingSetting;
      mProfile = dataParser.streamingProfile;
      mMicrophoneStreamingSetting = dataParser.microphoneStreamingSetting;
      mWatermarkSetting = dataParser.watermarkSetting;

      this.init();
    }


  @Override
  public void onFlutterViewAttached(@NonNull View flutterView) {
    mMediaStreamingManager.resume();
  }

  @Override
  public void onFlutterViewDetached() {
    mMediaStreamingManager.pause();
  }

  @Override
  public View getView() {
    return mCameraPreviewSurfaceView;
  }

  @Override
  public void dispose() {

  }

  private void init() {
      //streaming engine init and setListener
      mMediaStreamingManager = new MediaStreamingManager(context, mCameraPreviewSurfaceView, AVCodecType.SW_VIDEO_WITH_SW_AUDIO_CODEC);  // soft codec
      mMediaStreamingManager.setNativeLoggingEnabled(true);
      if(mMicrophoneStreamingSetting != null && mWatermarkSetting != null) {
        mMediaStreamingManager.prepare(mCameraStreamingSetting, mMicrophoneStreamingSetting, mWatermarkSetting, mProfile);
      } else if(mMicrophoneStreamingSetting != null) {
        mMediaStreamingManager.prepare(mCameraStreamingSetting, mMicrophoneStreamingSetting, mProfile);
      } else {
        mMediaStreamingManager.prepare(mCameraStreamingSetting, mProfile);
      }

      mMediaStreamingManager.setStreamingStateListener(this);
      mMediaStreamingManager.setStreamingSessionListener(this);
      mMediaStreamingManager.setStreamStatusCallback(this);
      mMediaStreamingManager.setAudioSourceCallback(this);
  }

  @Override
  public void onStateChanged(StreamingState streamingState, Object extra) {

    Log.e(TAG, "streamingState = " + streamingState + "extra = " + extra);
    switch (streamingState) {
      case PREPARING:
        Log.e(TAG, "PREPARING");
        break;
      case READY:
        Log.e(TAG, "READY");
        // start streaming when READY
        new Thread(new Runnable() {
          @Override
          public void run() {
            if (mMediaStreamingManager != null) {
              mMediaStreamingManager.startStreaming();
            }
          }
        }).start();
        break;
      case CONNECTING:
        Log.e(TAG, "连接中");
        break;
      case STREAMING:
        Log.e(TAG, "推流中");
        // The av packet had been sent.
        break;
      case SHUTDOWN:
        Log.e(TAG, "直播中断");
        // The streaming had been finished.
        break;
      case IOERROR:
        // Network connect error.
        Log.e(TAG, "网络连接失败");
        break;
      case OPEN_CAMERA_FAIL:
        Log.e(TAG, "摄像头打开失败");
        // Failed to open camera.
        break;
      case DISCONNECTED:
        Log.e(TAG, "已经断开连接");
        // The socket is broken while streaming
        break;
      case TORCH_INFO:
        Log.e(TAG, "开启闪光灯");
        break;

    }
  }

  @Override
  public void notifyStreamStatusChanged(StreamingProfile.StreamStatus status) {
    Log.e(TAG, "StreamStatus = " + status);
  }

  @Override
  public void onAudioSourceAvailable(ByteBuffer srcBuffer, int size, long tsInNanoTime, boolean isEof) {

  }

  @Override
  public boolean onRecordAudioFailedHandled(int code) {
    Log.i(TAG, "onRecordAudioFailedHandled");
    return false;
  }

  @Override
  public boolean onRestartStreamingHandled(int code) {
    Log.i(TAG, "onRestartStreamingHandled");
    new Thread(new Runnable() {
      @Override
      public void run() {
        if (mMediaStreamingManager != null) {
          mMediaStreamingManager.startStreaming();
        }
      }
    }).start();
    return false;
  }


  @Override
  public Camera.Size onPreviewSizeSelected(List<Camera.Size> list) {
    return null;
  }

  @Override
  public int onPreviewFpsSelected(List<int[]> list) {
    return -1;
  }
}
