package com.vogue.live.activity;

import android.Manifest;
import android.content.pm.PackageManager;
import android.graphics.ImageFormat;
import android.hardware.Camera;
import android.media.AudioManager;
import android.media.CamcorderProfile;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.media.MediaRecorder;
import android.media.SoundPool;
import android.net.Uri;
import android.os.Environment;
import android.os.Handler;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.ContextCompat;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
import android.widget.FrameLayout;

import com.vogue.live.R;
import com.vogue.live.util.AudioLive;
import com.vogue.live.util.Common;
import com.vogue.live.util.FlvMuxer;
import com.vogue.live.view.CameraPreview;
import com.vogue.rtmp.RtmpHandler;

import java.io.File;
import java.io.IOException;
import java.net.SocketException;
import java.nio.ByteBuffer;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Date;

public class RecorderActivity extends AppCompatActivity  implements RtmpHandler.RtmpListener{

    private static final String TAG = "RecorderActivity";

    private static final int MEDIA_TYPE_VIDEO = 2;
    private static final int MEDIA_TYPE_IMAGE = 1;
    private Camera mCamera = null;
    private Camera.Parameters mParam = null;
    private int mCameraId = 1;
    public int mOrientation = 90;
    public static String dsturl = "rtmp://117.78.38.75:1935/stream/m2";
    private CameraPreview mPreview;
    private MediaRecorder mMediaRecorder = null;
    public MediaCodec mEncoder = null;
    public MediaFormat mEncFormat= null;

    public static FlvMuxer mFlvMuxer;
    public static int vtrack = -1;
    public static int atrack = -1;

    public AudioLive mAL;

    private SoundPool mSoundPool;
    int mSoundID;
    int mSoundID1;

    public boolean isRuning = false;
    private int TIMEOUT_USEC = 12000;
    Thread EncoderThread;

    private void CheckPermission(){
        if (ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA)
                != PackageManager.PERMISSION_GRANTED){
            ActivityCompat.requestPermissions(this,
                    new String[]{Manifest.permission.CAMERA,
                            Manifest.permission.WRITE_EXTERNAL_STORAGE,
                            Manifest.permission.INTERNET,
                            Manifest.permission.RECORD_AUDIO},1);
        }
    }
    private void initCamera(){
        try{
            mCamera =  Camera.open(mCameraId);
        }catch(Exception e){
            e.printStackTrace();
            Log.e(TAG, "Camera.open"+e);
        }

        mParam = mCamera.getParameters();
        mParam.setPictureFormat(ImageFormat.NV21);
        mParam.setPreviewSize(Common.mResolutionX,Common.mResolutionY);
        mParam.setPreviewFrameRate(Common.mFps);
        mCamera.setDisplayOrientation(mOrientation);
        mCamera.setParameters(mParam);

        mPreview = new CameraPreview(this, mCamera);
        FrameLayout preview = (FrameLayout) findViewById(R.id.mCameraPreview);
        preview.addView(mPreview);
    }
    private void releaseCamera(){
        FrameLayout preview = (FrameLayout) findViewById(R.id.mCameraPreview);
        preview.removeView(mPreview);
        mCamera.stopPreview();
        mCamera.release();
    }

    /** Create a file Uri for saving an image or video */
    private static Uri getOutputMediaFileUri(int type){
        return Uri.fromFile(getOutputMediaFile(type));
    }
    /** Create a File for saving an image or video */
    private static File getOutputMediaFile(int type){
        // To be safe, you should check that the SDCard is mounted
        // using Environment.getExternalStorageState() before doing this.

        File mediaStorageDir = new File(Environment.getExternalStoragePublicDirectory(
                Environment.DIRECTORY_MOVIES), "MyCameraApp");
        // This location works best if you want the created images to be shared
        // between applications and persist after your app has been uninstalled.

        // Create the storage directory if it does not exist
        if (! mediaStorageDir.exists()){
            if (! mediaStorageDir.mkdirs()){
                Log.d("MyCameraApp", "failed to create directory");
                return null;
            }
        }

        // Create a media file name
        String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
        File mediaFile;
        if (type == MEDIA_TYPE_IMAGE){
            mediaFile = new File(mediaStorageDir.getPath() + File.separator +
                    "IMG_"+ timeStamp + ".jpg");
        } else if(type == MEDIA_TYPE_VIDEO) {
            mediaFile = new File(mediaStorageDir.getPath() + File.separator +
                    "VID_"+ timeStamp + ".mp4");
        } else {
            return null;
        }

        return mediaFile;
    }
    private int getSupportColorFormat() {
        int numCodecs = MediaCodecList.getCodecCount();
        MediaCodecInfo codecInfo = null;
        for (int i = 0; i < numCodecs && codecInfo == null; i++) {
            MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);
            if (!info.isEncoder()) {
                continue;
            }
            String[] types = info.getSupportedTypes();
            boolean found = false;
            for (int j = 0; j < types.length && !found; j++) {
                if (types[j].equals(Common.Vmimetype)) {
                    System.out.println("found");
                    found = true;
                }
            }
            if (!found)
                continue;
            codecInfo = info;
        }

        Log.e("AvcEncoder", "Found " + codecInfo.getName() + " supporting " + Common.Vmimetype);

        // Find a color profile that the codec supports
        MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(Common.Vmimetype);
        Log.e("AvcEncoder",
                "length-" + capabilities.colorFormats.length + "==" + Arrays.toString(capabilities.colorFormats));

        for (int i = 0; i < capabilities.colorFormats.length; i++) {

            switch (capabilities.colorFormats[i]) {
                case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
                case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
                case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
                case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
                case MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar:
                case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:

                    Log.e("AvcEncoder", "supported color format::" + capabilities.colorFormats[i]);
                    return capabilities.colorFormats[i];
                default:
                    Log.e("AvcEncoder", "unsupported color format " + capabilities.colorFormats[i]);
                    break;
            }
        }

        return -1;
    }

    public void MediaRecorderStart(){
        mMediaRecorder = new MediaRecorder();
        mCamera.unlock();
        mMediaRecorder.setCamera(mCamera);

        mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.CAMCORDER);
        mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
        mMediaRecorder.setProfile(CamcorderProfile.get(CamcorderProfile.QUALITY_720P));

        mMediaRecorder.setOutputFile(getOutputMediaFile(MEDIA_TYPE_VIDEO).toString());

        mMediaRecorder.setOrientationHint(270);
        try {
            mMediaRecorder.prepare();
        } catch (IllegalStateException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }

        mMediaRecorder.start();
    }
    public void MediaRecorderStop(){
        if (mMediaRecorder != null) {
            mMediaRecorder.stop();
            mMediaRecorder.reset();   // clear recorder configuration
            mMediaRecorder.release(); // release the recorder object
            mMediaRecorder = null;
            mCamera.lock();           // lock camera for later use
        }
    }

    public void MediaStreamStart(){
        /*init video encoder*/
        try {
            mEncoder = MediaCodec.createEncoderByType(Common.Vmimetype);
        } catch (IOException e) {
            e.printStackTrace();
        }
        int m_SupportColorFormat = getSupportColorFormat();
        mEncFormat = MediaFormat.createVideoFormat(Common.Vmimetype, Common.mResolutionX, Common.mResolutionY);
        mEncFormat.setInteger(MediaFormat.KEY_BIT_RATE, Common.mBitrate);
        mEncFormat.setInteger(MediaFormat.KEY_FRAME_RATE,Common.mFps);
        mEncFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
        mEncFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, m_SupportColorFormat);
        mEncoder.configure(mEncFormat,null,null,MediaCodec.CONFIGURE_FLAG_ENCODE);
        mEncoder.start();

        /*init audio encoder*/
        mAL = new AudioLive(0, Common.AACQueue);

        /*init rtmp publisher*/
        mFlvMuxer = new FlvMuxer(new RtmpHandler(this));
        vtrack = mFlvMuxer.addTrack(mEncFormat);
        atrack = mFlvMuxer.addTrack(mAL.getFormat());
        mFlvMuxer.setVideoResolution(Common.mResolutionX,Common.mResolutionY);
        mFlvMuxer.start(dsturl);

        StartEncoderThread();
        mCamera.setPreviewCallback(new Camera.PreviewCallback(){
            @Override
            public void onPreviewFrame(byte[] data, Camera camera) {
                putYUVData(data,data.length);
            }
        });

    }
    public void MediaStreamStop(){
        mAL.stop();
        StopEncoderThread();
        mFlvMuxer.stop();
        mFlvMuxer = null;
        mCamera.setPreviewCallback(null);
        mEncoder.stop();
        mEncoder.release();
    }

    public void StopEncoderThread(){
        isRuning = false;
        try {
            EncoderThread.join();
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }
    public void StartEncoderThread(){
        EncoderThread = new Thread(new Runnable() {
            @Override
            public void run() {
                isRuning = true;
                byte[] input = null;
                long pts =  0;
                long generateIndex = 0;
                MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
                ByteBuffer[] inputBuffers = mEncoder.getInputBuffers();
                ByteBuffer[] outputBuffers = mEncoder.getOutputBuffers();

                byte[] yuv420sp = new byte[Common.mResolutionX*Common.mResolutionY*3/2];

                while (isRuning) {
                    if (Common.YUVQueue.size() >0){
                        input = Common.YUVQueue.poll();
                        NV21ToNV12(input,yuv420sp,Common.mResolutionX,Common.mResolutionY);
                        input = yuv420sp;
                    }
                    if (input != null) {
                        try {
                            long startMs = System.currentTimeMillis();
                            int inputBufferIndex = mEncoder.dequeueInputBuffer(-1);
                            if (inputBufferIndex >= 0) {
                                pts = (System.nanoTime() / 1000);//computePresentationTime(generateIndex);
                                ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
                                inputBuffer.clear();
                                inputBuffer.put(input);
                                mEncoder.queueInputBuffer(inputBufferIndex, 0, input.length, pts, 0);
                                generateIndex += 1;
                            }

                            int outputBufferIndex = mEncoder.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
                            if (outputBufferIndex >= 0) {
                                ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
                                mFlvMuxer.writeSampleData(vtrack, outputBuffer, bufferInfo);
                                mEncoder.releaseOutputBuffer(outputBufferIndex, false);
                            }

                        } catch (Throwable t) {
                            t.printStackTrace();
                        }
                    } else {
                        try {
                            Thread.sleep(500);
                        } catch (InterruptedException e) {
                            e.printStackTrace();
                        }
                    }
                }
            }
        });
        EncoderThread.start();
        mAL.start();
    }


    private long computePresentationTime(long frameIndex) {
        return 132 + frameIndex * 1000000 / Common.mFps;
    }
    public void putYUVData(byte[] buffer, int length) {
        if (Common.YUVQueue.size() >= Common.YUVQ_num) {
            Common.YUVQueue.poll();
        }
        Common.YUVQueue.add(buffer);
    }
    private void NV21ToNV12(byte[] nv21,byte[] nv12,int width,int height){
        if(nv21 == null || nv12 == null)return;
        int framesize = width*height;
        int i = 0,j = 0;
        System.arraycopy(nv21, 0, nv12, 0, framesize);
        for(i = 0; i < framesize; i++){
            nv12[i] = nv21[i];
        }
        for (j = 0; j < framesize/2; j+=2)
        {
            nv12[framesize + j-1] = nv21[j+framesize];
        }
        for (j = 0; j < framesize/2; j+=2)
        {
            nv12[framesize + j] = nv21[j+framesize-1];
        }
    }
    public void RecordCmd(){
        initCamera();
        mSoundPool.play(mSoundID, 1, 1, 0, 0, 1);
        new Handler().postDelayed(new Runnable(){
            public void run() {
                MediaStreamStart();
            }
        }, 500);
    }
    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_recorder);
        mSoundPool = new SoundPool(10, AudioManager.STREAM_SYSTEM, 5);
        mSoundID = mSoundPool.load(this,R.raw.stream, 1);
        mSoundID1 = mSoundPool.load(this,R.raw.play, 1);
        CheckPermission();
        Common.type=2;
        RecordCmd();
    }
    @Override
    public void onRtmpConnecting(String msg) {

    }

    @Override
    public void onRtmpConnected(String msg) {

    }

    @Override
    public void onRtmpVideoStreaming() {

    }

    @Override
    public void onRtmpAudioStreaming() {

    }

    @Override
    public void onRtmpStopped() {

    }

    @Override
    public void onRtmpDisconnected() {

    }

    @Override
    public void onRtmpVideoFpsChanged(double fps) {

    }

    @Override
    public void onRtmpVideoBitrateChanged(double bitrate) {

    }

    @Override
    public void onRtmpAudioBitrateChanged(double bitrate) {

    }

    @Override
    public void onRtmpSocketException(SocketException e) {

    }

    @Override
    public void onRtmpIOException(IOException e) {

    }

    @Override
    public void onRtmpIllegalArgumentException(IllegalArgumentException e) {

    }

    @Override
    public void onRtmpIllegalStateException(IllegalStateException e) {

    }
}
