package com.qiniu.pili.droid.rtcstreaming.demo.test;

import android.app.ProgressDialog;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.PixelFormat;
import android.hardware.Camera;
import android.hardware.display.DisplayManager;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.Image;
import android.media.ImageReader;
import android.media.MediaRecorder;
import android.media.projection.MediaProjection;
import android.media.projection.MediaProjectionManager;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Handler;
import android.os.SystemClock;
import android.support.v7.app.AppCompatActivity;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.FrameLayout;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;

import com.qiniu.pili.droid.rtcstreaming.RTCConferenceOptions;
import com.qiniu.pili.droid.rtcstreaming.RTCStartConferenceCallback;
import com.qiniu.pili.droid.rtcstreaming.RTCStreamingManager;
import com.qiniu.pili.droid.rtcstreaming.demo.R;
import com.qiniu.pili.droid.rtcstreaming.demo.core.ExtAudioCapture;
import com.qiniu.pili.droid.rtcstreaming.demo.core.StreamUtils;
import com.qiniu.pili.droid.rtcstreaming.demo.utlis.ImpDataCollecter;
import com.qiniu.pili.droid.streaming.AVCodecType;
import com.qiniu.pili.droid.streaming.ScreenStreamingManager;
import com.qiniu.pili.droid.streaming.StreamStatusCallback;
import com.qiniu.pili.droid.streaming.StreamingProfile;
import com.qiniu.pili.droid.streaming.StreamingSessionListener;
import com.qiniu.pili.droid.streaming.StreamingState;
import com.qiniu.pili.droid.streaming.StreamingStateChangedListener;
import com.qiniu.pili.droid.streaming.av.common.PLFourCC;

import org.json.JSONException;
import org.json.JSONObject;

import java.net.URISyntaxException;
import java.nio.ByteBuffer;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;

public class ImageReaderActivity extends AppCompatActivity implements View.OnClickListener, StreamingSessionListener, StreamStatusCallback, StreamingStateChangedListener {
    private static final int SCREEN_SHOT = 1010;
    private static final String TAG = "Leo";
    private EditText mEt_rtmp_address;
    private Button mPush;
    private TextView mStream_status;


    private String TAGE = "Leo";
    private String pushUrl = "rtmp://pili-publish.internal-i-focusing.com/com-i-focusing-live-dev/GID_MARSAR_CLOUD_153188254426124288?e=1516775806&token=0xkNXdzEAVclERzD_DBlZknsmJRdJhVeCuDuKPdI:ae49DhUDK_rqR8561d7izAz7AYU=";
    private String roomId = "GID_MARSAR_CLOUD_153188254426124288";
    private String roomToken = "0xkNXdzEAVclERzD_DBlZknsmJRdJhVeCuDuKPdI:ic1IjRp0jM-D9Y8zvoty5S5Qt3E=:eyJyb29tX25hbWUiOiJHSURfTUFSU0FSX0NMT1VEXzE1MzE4ODI1NDQyNjEyNDI4OCIsInVzZXJfaWQiOiJHSURfTUFSU0FSX0NMT1VEXzE1MzE4ODI1NDQyNjEyNDI4OCIsInBlcm0iOiJhZG1pbiIsImV4cGlyZV9hdCI6MTUxNjc3NTgwNn0=";
    private String userId = "GID_MARSAR_CLOUD@@@153188254426124288";
    private MediaProjectionManager mManager;
    private MediaProjection mMediaProjection;
    private PWMediaCallBack mMediaCallback;

    private int width = 1280;
    private int height = 720;
    private int dpi = 1;

    int mResultCode;
    Intent mData;
    private ImageReader mImageReader;
    private ImageView mImageView;
    private Handler backgroundHandler;
    private RTCStreamingManager mStreamingManager;
    private StreamingProfile mStreamingProfile;
    private MediaCallBack mMediaCallBack;
    private ExtAudioCapture mExtAudioCapture;
    private Button mAddConference;
    private FrameLayout mFrameLayout;
    private Camera mCamera;
    private CameraPreview mPreview;
    private ProgressDialog mDialog;
    private AudioCapturer mAudioCapturer;
    private Button mQuitConference;


    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_image_reader);

        RTCStreamingManager.init(getApplicationContext());

        //1.获取 MediaProjectionManager 服务      
        mManager = (MediaProjectionManager) getSystemService(MEDIA_PROJECTION_SERVICE);
        //2. 

        DisplayMetrics metric = new DisplayMetrics();
        getWindowManager().getDefaultDisplay().getMetrics(metric);


        bindViews();

        initStream();


    }

    private void initStream() {
        mStreamingManager = new RTCStreamingManager(this, AVCodecType.HW_VIDEO_YUV_AS_INPUT_WITH_HW_AUDIO_CODEC);
        //2.连麦参数设置
        RTCConferenceOptions options = new RTCConferenceOptions();
        options.setVideoEncodingSizeRatio(RTCConferenceOptions.VIDEO_ENCODING_SIZE_RATIO.RATIO_16_9);
        options.setVideoEncodingSizeLevel(RTCConferenceOptions.VIDEO_ENCODING_SIZE_HEIGHT_720);
        options.setVideoBitrateRange(300 * 1000, 800 * 1000);
        // 15 fps is enough
        options.setVideoEncodingFps(15);
        //3.主播配置推流参数
        mStreamingProfile = new StreamingProfile();
        mStreamingProfile
                .setVideoQuality(StreamingProfile.VIDEO_QUALITY_HIGH2)//视频流
                .setAudioQuality(StreamingProfile.AUDIO_QUALITY_HIGH1)
                .setEncoderRCMode(StreamingProfile.EncoderRCModes.QUALITY_PRIORITY)
                .setEncodingOrientation(StreamingProfile.ENCODING_ORIENTATION.PORT)
                .setPreferredVideoEncodingSize(width, height) // 配置推流的尺寸，建议与连麦尺寸一致
                .setYuvFilterMode(StreamingProfile.YuvFilterMode.Linear)
//                .setBitrateAdjustMode(StreamingProfile.BitrateAdjustMode.Auto)
//                .setFpsControllerEnable(true)
//                .setVideoAdaptiveBitrateRange(300 * 1024 * 8, 800 * 1024 * 8)

        ;
        mStreamingManager.setConferenceOptions(options);

        //5.prepare完成配置
        mStreamingManager.prepare(mStreamingProfile);
        //6.推流监听回调
        mStreamingManager.setStreamingSessionListener(this);
        mStreamingManager.setStreamStatusCallback(this);
        mStreamingManager.setStreamingStateListener(this);

        mExtAudioCapture = new ExtAudioCapture();

        //开启相机
        if (checkCameraHardware(this)) {
            // Create an instance of Camera
            mCamera = getCameraInstance();
            // Create our Preview view and set it as the content of our activity.
            mPreview = new CameraPreview(this, mCamera);
            mFrameLayout.addView(mPreview);
        }

        //加载dialog
        mDialog = new ProgressDialog(this);
        mAudioCapturer = new AudioCapturer();
    }


    private void bindViews() {
        mEt_rtmp_address = (EditText) findViewById(R.id.et_rtmp_address);
        mPush = (Button) findViewById(R.id.push);
        mStream_status = (TextView) findViewById(R.id.stream_status);
        mImageView = (ImageView) findViewById(R.id.iv_bitmap);
        mAddConference = (Button) findViewById(R.id.add_conference);
        mQuitConference = (Button) findViewById(R.id.quit_conference);
        mFrameLayout = (FrameLayout) findViewById(R.id.camera_preview);
        mPush.setOnClickListener(this);
        mAddConference.setOnClickListener(this);
        mQuitConference.setOnClickListener(this);

    }

    //------------------------ 验证截屏是否能截到SurfaceView的数据（通过相机来验证）  start--------------------------
    private boolean checkCameraHardware(Context context) {
        if (context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA)) {
            // this device has a camera
            return true;
        } else {
            // no camera on this device
            return false;
        }
    }

    public static Camera getCameraInstance() {
        // getNumberOfCameras()-1. 2个  0代表后置（默认） 1代表前置
        Camera c = null;
        try {
            c = Camera.open(); // attempt to get a Camera instance
        } catch (Exception e) {
            // Camera is not available (in use or does not exist)
        }
        return c; // returns null if camera is unavailable
    }

    //------------------------ end --------------------------

    @Override
    public void onClick(View v) {
        switch (v.getId()) {
            case R.id.push:
                mDialog.setMessage("开始推流！");
                mDialog.show();
//                Toast.makeText(this, "开始推流！", Toast.LENGTH_SHORT).show();
                startActivityForResult(mManager.createScreenCaptureIntent(), SCREEN_SHOT);
                break;
            case R.id.add_conference:

                mDialog.setMessage("正在加入连麦。。。");
                mDialog.show();
                mAudioCapturer.startCapture();
                new Thread(new Runnable() {
                    @Override
                    public void run() {
                        startConferenceInternal();
                    }
                }).start();
                break;
            case R.id.quit_conference:
                stopConference();
                break;
        }
    }

    private void startConferenceInternal() {
        if (roomToken == null) {
//            dismissProgressDialog();
            showToast("无法获取房间信息 !");
        }

        try {
            Thread.sleep(1000);
            mStreamingManager.startConference(userId, roomId, roomToken, new RTCStartConferenceCallback() {
                @Override
                public void onStartConferenceSuccess() {
                    if (mAudioCapturer.isCaptureStarted()) {
                        mAudioCapturer.setOnAudioFrameCapturedListener(new AudioCapturer.OnAudioFrameCapturedListener() {
                            @Override
                            public void onAudioFrameCaptured(byte[] audioData, long ts) {
                                if (audioData != null) {
                                    mStreamingManager.inputAudioFrame(audioData, ts);
                                } else {
                                    showToast("音频数据为空!");
                                }
                            }
                        });
                    }

                    runOnUiThread(new Runnable() {
                        @Override
                        public void run() {
                            showToast(getString(R.string.add_audio));
                            mDialog.dismiss();
                        }
                    });

                }

                @Override
                public void onStartConferenceFailed(final int errorCode) {

                    runOnUiThread(new Runnable() {
                        @Override
                        public void run() {
                            showToast(getString(R.string.failed_to_start_conference) + errorCode);
                            mDialog.dismiss();
                        }
                    });
                }
            });
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
//        mStreamingManager.resume();

    }

    private void showToast(String s) {
        Toast.makeText(ImageReaderActivity.this, s, Toast.LENGTH_SHORT).show();
    }


    @Override
    protected void onActivityResult(int requestCode, int resultCode, Intent data) {
        if (resultCode == RESULT_OK) {
            mResultCode = resultCode;
            mData = data;
            setUpMediaProjection();
            setUpVirtualDisplay();
//            mExtAudioCapture = new ExtAudioCapture();
//            mExtAudioCapture.setOnAudioFrameCapturedListener(new ExtAudioCapture.OnAudioFrameCapturedListener() {
//                @Override
//                public void onAudioFrameCaptured(byte[] audioData, long tsInNanoTime) {
//                    // Log.d(TAG, "onAudioFrameCaptured: " + tsInNanoTime);
//                    if (mStreamingManager.isStreamingStarted() || mStreamingManager.isConferenceStarted()) {
//                        mStreamingManager.inputAudioFrame(audioData, tsInNanoTime);
//                    }
//                }});
//            mExtAudioCapture.startCapture();

            new Thread() {
                @Override
                public void run() {
                    SystemClock.sleep(1000);
                    while (true) {
                        try {
                            startCapture();
                        } catch (Exception e) {
                            Log.e(TAG, "run: " + e.getMessage());
                        }
                        SystemClock.sleep(40);
                    }
                }
            }.start();
        }

    }

    private byte[] lastFrame;

    private void startCapture() {
        String s = System.currentTimeMillis() + ".png";
        Image image = mImageReader.acquireLatestImage();
        int imageFormat = mImageReader.getImageFormat();

        byte[] nv21Bytes;
        if (image == null) {
            if (lastFrame != null) {
                nv21Bytes = lastFrame;
            } else {
                return;
            }
        } else {
            int width = image.getWidth();
            int height = image.getHeight();
            final Image.Plane[] planes = image.getPlanes();
            final ByteBuffer buffer = planes[0].getBuffer();
            int pixelStride = planes[0].getPixelStride();
            int rowStride = planes[0].getRowStride();
            int rowPadding = rowStride - pixelStride * width;
            Bitmap bitmap = Bitmap.createBitmap(width + rowPadding / pixelStride, height, Bitmap.Config.ARGB_8888);
            bitmap.copyPixelsFromBuffer(buffer);


            if (bitmap != null) {
                //  mImageView.setImageBitmap(bitmap);
            }


            //        transformStream();
            nv21Bytes = getNV21(width, height, bitmap);
            lastFrame = nv21Bytes;

            image.close();
        }
        mStreamingManager.inputVideoFrame(nv21Bytes, width, height, 0, false, PLFourCC.FOURCC_NV21, System.nanoTime());
//        mExtAudioCapture.setOnAudioFrameCapturedListener(new ExtAudioCapture.OnAudioFrameCapturedListener() {
//            @Override
//            public void onAudioFrameCaptured(byte[] audioData, long tsInNanoTime) {
//                mStreamingManager.inputAudioFrame(audioData, tsInNanoTime);
//            }
//        });

//        mMediaCallBack.onFrame(width, height, nv21Bytes, image.getTimestamp());
        if (!isStreamingStared.get()) {
            new Thread(new Runnable() {
                @Override
                public void run() {
                    startPublishStreamingInternal();
                    isStreamingStared.set(true);
                }
            }).start();
        }
    }

    private final AtomicBoolean isStreamingStared = new AtomicBoolean(false);

    private byte[] getNV21(int inputWidth, int inputHeight, Bitmap scaled) {

        int[] argb = new int[inputWidth * inputHeight];

        scaled.getPixels(argb, 0, inputWidth, 0, 0, inputWidth, inputHeight);

        byte[] yuv = new byte[inputWidth * inputHeight * 3 / 2];
        encodeYUV420SP(yuv, argb, inputWidth, inputHeight);

        scaled.recycle();

        return yuv;
    }

    private void encodeYUV420SP(byte[] yuv420sp, int[] argb, int width, int height) {
        final int frameSize = width * height;

        int yIndex = 0;
        int uvIndex = frameSize;

        int a, R, G, B, Y, U, V;
        int index = 0;
        for (int j = 0; j < height; j++) {
            for (int i = 0; i < width; i++) {

                a = (argb[index] & 0xff000000) >> 24; // a is not used obviously
                R = (argb[index] & 0xff0000) >> 16;
                G = (argb[index] & 0xff00) >> 8;
                B = (argb[index] & 0xff) >> 0;

                // well known RGB to YUV algorithm
                Y = ((66 * R + 129 * G + 25 * B + 128) >> 8) + 16;
                U = ((-38 * R - 74 * G + 112 * B + 128) >> 8) + 128;
                V = ((112 * R - 94 * G - 18 * B + 128) >> 8) + 128;

                // NV21 has a plane of Y and interleaved planes of VU each sampled by a factor of 2
                //    meaning for every 4 Y pixels there are 1 V and 1 U.  Note the sampling is every other
                //    pixel AND every other scanline.
                yuv420sp[yIndex++] = (byte) ((Y < 0) ? 0 : ((Y > 255) ? 255 : Y));
                if (j % 2 == 0 && index % 2 == 0) {
                    yuv420sp[uvIndex++] = (byte) ((V < 0) ? 0 : ((V > 255) ? 255 : V));
                    yuv420sp[uvIndex++] = (byte) ((U < 0) ? 0 : ((U > 255) ? 255 : U));
                }

                index++;
            }
        }
    }

    private void setUpVirtualDisplay() {
        mImageReader = ImageReader.newInstance(width, height, PixelFormat.RGBA_8888, 24);
        mMediaProjection.createVirtualDisplay("ScreenShot",
                width, height, dpi,
                DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR,
                mImageReader.getSurface(), null, null);

    }


    private void setUpMediaProjection() {
        mMediaProjection = mManager.getMediaProjection(mResultCode, mData);
    }


    @Override
    protected void onResume() {
        super.onResume();
        mStreamingManager.resume();
//        mAudioCapturer.setOnAudioFrameCapturedListener(mOnAudioFrameCapturedListener);
////        mExtAudioCapture.setOnAudioFrameCapturedListener(mOnAudioFrameCapturedListener);
////        mExtAudioCapture.startCapture();
//        mAudioCapturer.startCapture();

    }

    @Override
    protected void onPause() {
        super.onPause();
        mStreamingManager.pause();
        stopConference();
        stopPublishStreaming();
        mAudioCapturer.setOnAudioFrameCapturedListener(null);
//        mAudioCapturer.stopCapture();
    }

    private void stopPublishStreaming() {

        mStreamingManager.stopStreaming();
        showToast(getString(R.string.stop_streaming));
    }

    private void stopConference() {
        mAudioCapturer.stopCapture();
        mStreamingManager.stopConference();

        showToast(getString(R.string.stop_conference));
    }

    //    private ExtAudioCapture.OnAudioFrameCapturedListener mOnAudioFrameCapturedListener = new ExtAudioCapture.OnAudioFrameCapturedListener() {
//        @Override
//        public void onAudioFrameCaptured(byte[] audioData, long tsInNanoTime) {
//            // Log.d(TAG, "onAudioFrameCaptured: " + tsInNanoTime);
//            if (mStreamingManager.isStreamingStarted() || mStreamingManager.isConferenceStarted()) {
//                mStreamingManager.inputAudioFrame(audioData, System.nanoTime());
//            }
//        }
//    };
    private AudioCapturer.OnAudioFrameCapturedListener mOnAudioFrameCapturedListener = new AudioCapturer.OnAudioFrameCapturedListener() {
        @Override
        public void onAudioFrameCaptured(byte[] audioData, long ts) {
            if (mStreamingManager.isStreamingStarted() || mStreamingManager.isConferenceStarted()) {
                mStreamingManager.inputAudioFrame(audioData, ts);
            }
//            mStreamingManager.inputAudioFrame(audioData,ts);
        }
    };

    @Override
    public boolean onRecordAudioFailedHandled(int i) {
        return false;
    }

    @Override
    public boolean onRestartStreamingHandled(int i) {
        return false;
    }

    @Override
    public Camera.Size onPreviewSizeSelected(List<Camera.Size> list) {
        return null;
    }

    @Override
    public int onPreviewFpsSelected(List<int[]> list) {
        return 0;
    }

    @Override
    public void notifyStreamStatusChanged(StreamingProfile.StreamStatus streamStatus) {

    }

    @Override
    public void onStateChanged(StreamingState streamingState, Object o) {

    }


    public interface MediaCallBack {
        void onFrame(int width, int height, byte[] array, long timestamp);
    }


    private boolean startPublishStreamingInternal() {

//        String publishAddr = "rtmp://pili-publish.internal-i-focusing.com/com-i-focusing-live-dev/GID_MARSAR_CLOUD_140154302685827072?e=1513668575&token=0xkNXdzEAVclERzD_DBlZknsmJRdJhVeCuDuKPdI:7Q2VLGqNKTYBep1vxAsQ6NQZ7IE=";
//        String publishAddr = StreamUtils.requestPublishAddress(mRoomName);
        if (pushUrl == null) {
//            dismissProgressDialog();
//            showToast("无法获取房间信息/推流地址 !", Toast.LENGTH_SHORT);
            return false;
        }

        try {
            if (StreamUtils.IS_USING_STREAMING_JSON) {
                mStreamingProfile.setStream(new StreamingProfile.Stream(new JSONObject(pushUrl)));
            } else {
                ScreenStreamingManager streamingManager = new ScreenStreamingManager();
                mStreamingProfile.setPublishUrl(pushUrl);
            }
        } catch (JSONException e) {
            e.printStackTrace();
//            dismissProgressDialog();
//            showToast("无效的推流地址 !", Toast.LENGTH_SHORT);
            return false;
        } catch (URISyntaxException e) {
            e.printStackTrace();
//            dismissProgressDialog();
//            showToast("无效的推流地址 !", Toast.LENGTH_SHORT);
            return false;
        }

        mStreamingManager.setStreamingProfile(mStreamingProfile);
        if (!mStreamingManager.startStreaming()) {
            mStreamingManager.startStreaming();
//            dismissProgressDialog();
//            showToast(getString(R.string.failed_to_start_streaming), Toast.LENGTH_SHORT);
            return false;
        }

        ImpDataCollecter impDataCollecter = new ImpDataCollecter();

//        dismissProgressDialog();
//        showToast(getString(R.string.start_streaming), Toast.LENGTH_SHORT);
//        updateControlButtonText();
//        mIsPublishStreamStarted = true;
        /**
         * Because `startPublishStreaming` need a long time in some weak network
         * So we should check if the activity paused.
         */

        if (mStreamingManager.isStreamingStarted()) {
            mDialog.dismiss();
        }
//        if (mIsActivityPaused) {
//            stopPublishStreaming();
//        }
        return true;
    }

    // 记录是否正在进行录制
    private boolean isRecording = false;

    //录制音频参数
    private int frequence = 44100; //录制频率，单位hz.这里的值注意了，写的不好，可能实例化AudioRecord对象的时候，会出错。我开始写成11025就不行。这取决于硬件设备
    private int channelConfig = AudioFormat.CHANNEL_IN_STEREO;
    private int audioEncoding = AudioFormat.ENCODING_PCM_16BIT;


    //录音线程
    class RecordTask extends AsyncTask<Void, Integer, Void> {

        @Override
        protected Void doInBackground(Void... voids) {
            try {
                //开通输出流到指定的文件
                //DataOutputStream dos = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(audioFile)));
                //根据定义好的几个配置，来获取合适的缓冲大小
                int bufferSize = AudioRecord.getMinBufferSize(frequence, channelConfig, audioEncoding);
                //实例化AudioRecord
                AudioRecord record = new AudioRecord(MediaRecorder.AudioSource.MIC, frequence, channelConfig, audioEncoding, bufferSize);

                //开始录制
                record.startRecording();

//                AacEncode aacMediaEncode = new AacEncode();
                //定义缓冲
                byte[] buffer = new byte[bufferSize];

                //定义循环，根据isRecording的值来判断是否继续录制
                while (isRecording) {

                    //从bufferSize中读取字节。
                    int bufferReadResult = record.read(buffer, 0, bufferSize);

                    //获取字节流
                    if (AudioRecord.ERROR_INVALID_OPERATION != bufferReadResult) {

//                        PWMediaCallBack
//                        //转成AAC编码
//                        byte[] ret = aacMediaEncode.offerEncoder(buffer);
//                        if (ret.length > 0) {
//
//                            byte[] out = aacDecode.offerDecoder(ret);
//
//                            //发送数据到VLC，这个方法在视频发送那篇文章有，这里就不重复了。需要的可以去看看
//                            netSendTask.pushBuf(ret, ret.length);
//
//                        }
                    }
                }
                //录制结束
                record.stop();
                //释放编码器
//                aacMediaEncode.close();
                // dos.close();
            } catch (Exception e) {
                e.printStackTrace();
            }
            return null;
        }
    }

}
