package pushengine;
import android.hardware.display.DisplayManager;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.media.MediaRecorder;
import android.media.projection.MediaProjection;
import android.util.Log;
import android.view.Surface;

import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.concurrent.atomic.AtomicInteger;

/**
 * Created by he jinghai.
 */
public class SrsEncoder {
    private static final String TAG = "SrsEncoder";

    public static final String VCODEC = "video/avc";
    public static final String ACODEC = "audio/mp4a-latm";
    public static int vEncodeWidth = 720;   // Note: the stride of resolution must be set as 16x for hard encoding with some chip like MTK
    public static int vEncodeHeight = 1280;  // Since Y component is quadruple size as U and V component, the stride must be set as 32x
    public static int vBitrate = 1200 * 1024;  // 1200 kbps
    public static final int VFPS = 24;
    public static final int VGOP = 48;
    public static final int ASAMPLERATE = 44100;
    public static int aChannelConfig = AudioFormat.CHANNEL_IN_STEREO;
    public static final int ABITRATE = 128 * 1024;  // 128 kbps

    private YuerEncodeHandler mHandler;
    private Surface mInputSurface = null;
    private MediaProjection mMediaProjection = null;
    private int mScreenDensity = 480;
    private Thread mVideoPushTread = null;
    private boolean mStopPushVideo = false;

    private SrsFlvMuxer flvMuxer;

    private MediaCodecInfo vmci;
    private MediaCodec vencoder;
    private MediaCodec aencoder;
    private MediaCodec.BufferInfo vebi = new MediaCodec.BufferInfo();
    private MediaCodec.BufferInfo aebi = new MediaCodec.BufferInfo();

    private boolean networkWeakTriggered = false;
    private boolean isReconnecting = false;
    private long mPresentTimeUs;

    private int mVideoColorFormat;

    private int videoFlvTrack;
    private int audioFlvTrack;
	byte[] yuvbuff ;

    public SrsEncoder(YuerEncodeHandler handler) {
        mHandler = handler;
        mVideoColorFormat = chooseVideoEncoder();
    }

    public void setFlvMuxer(SrsFlvMuxer flvMuxer) {
        this.flvMuxer = flvMuxer;
    }

    public void setScreenCaptureParameter(MediaProjection MediaProjection, int ScreenDensity){
		mMediaProjection = MediaProjection;
        mScreenDensity = ScreenDensity;

	}
    public boolean start(boolean useCamera) {
        if (flvMuxer == null) {
            return false;
        }
		if(useCamera && yuvbuff == null){
            yuvbuff = new byte[vEncodeWidth * vEncodeHeight * 3/2];
		}

        // the referent PTS for video and audio encoder.
        mPresentTimeUs = System.nanoTime() / 1000;

        // Note: the stride of resolution must be set as 16x for hard encoding with some chip like MTK
        // Since Y component is quadruple size as U and V component, the stride must be set as 32x
        if (vEncodeWidth % 32 != 0 || vEncodeHeight % 32 != 0) {
            if (vmci.getName().contains("MTK")) {
                //throw new AssertionError("MTK encoding revolution stride must be 32x");
            }
        }

        setVideoResolution(vEncodeWidth, vEncodeHeight);
        // aencoder pcm to aac raw stream.
        // requires sdk level 16+, Android 4.1, 4.1.1, the JELLY_BEAN
        try {
            aencoder = MediaCodec.createEncoderByType(ACODEC);
        } catch (IOException e) {
            Log.e(TAG, "create aencoder failed.");
            e.printStackTrace();
            return false;
        }
        // setup the aencoder.
        // @see https://developer.android.com/reference/android/media/MediaCodec.html
        int ach = aChannelConfig == AudioFormat.CHANNEL_IN_STEREO ? 2 : 1;
        MediaFormat audioFormat = MediaFormat.createAudioFormat(ACODEC, ASAMPLERATE, ach);
        audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, ABITRATE);
        audioFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 0);
        aencoder.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        // add the audio tracker to muxer.
        audioFlvTrack = flvMuxer.addTrack(audioFormat);

        // vencoder yuv to 264 es stream.
        // requires sdk level 16+, Android 4.1, 4.1.1, the JELLY_BEAN
        try {
            vencoder = MediaCodec.createByCodecName(vmci.getName());
        } catch (IOException e) {
            Log.e(TAG, "create vencoder failed.");
            e.printStackTrace();
            return false;
        }

        // setup the vencoder.
        // Note: landscape to portrait, 90 degree rotation, so we need to switch width and height in configuration
        if(!useCamera){
			Log.e(TAG, "CodecCapabilities.COLOR_FormatSurface");
			mVideoColorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface;
		}
        MediaFormat videoFormat = MediaFormat.createVideoFormat(VCODEC, vEncodeWidth, vEncodeHeight);
        videoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, mVideoColorFormat);
        videoFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 0);
        videoFormat.setInteger(MediaFormat.KEY_BIT_RATE, vBitrate);
        videoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, VFPS);
        videoFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, VGOP / VFPS);
        vencoder.configure(videoFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        // add the video tracker to muxer.
        videoFlvTrack = flvMuxer.addTrack(videoFormat);
		
        if(!useCamera){
			mInputSurface = vencoder.createInputSurface();
		}
        // start device and encoder.
        vencoder.start();
        if(!useCamera){
			mMediaProjection.createVirtualDisplay("Recording Display", vEncodeWidth, vEncodeHeight,
					mScreenDensity, DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR /* flags */, mInputSurface, null /* callback */, null /* handler */);
			startVideoPusher();
		}
        aencoder.start();
        return true;
    }

    public void stop() {
        if (aencoder != null) {
            Log.i(TAG, "stop aencoder");
            aencoder.stop();
            aencoder.release();
            aencoder = null;
        }
		stopVideoPusher();
        if (vencoder != null) {
            Log.i(TAG, "stop vencoder");
            vencoder.stop();
            vencoder.release();
            vencoder = null;
        }
        if (mInputSurface != null) {
            mInputSurface.release();
            mInputSurface = null;
        }
        mMediaProjection = null;
    }
    public void setReconnectFlag(boolean flag) {
		isReconnecting = flag;
    }
	
    public boolean isWorking() {
        return vencoder != null;
    }

    public void setVideoEncodeParameter(int width, int height,int bitrate){
	    vBitrate = bitrate;
        vEncodeWidth = width;
        vEncodeHeight = height;
    }
    protected long getPTSUs() {
        long result = System.nanoTime() / 1000L - mPresentTimeUs;
        return result;
    }
	
    public void startVideoPusher() {
		Log.e(TAG, "startVideoPusher");
        mVideoPushTread = new Thread(new Runnable() {
            @Override
            public void run() {
                while (!mStopPushVideo && !Thread.interrupted()) {
                    int bufferIndex = vencoder.dequeueOutputBuffer(vebi, 0);
					if(bufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
						// nothing available yet
					}else if (bufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                        // should happen before receiving buffers, and should only happen once
                        ByteBuffer spsBuffer = vencoder.getOutputFormat().getByteBuffer("csd-0");
						ByteBuffer ppsBuffer = vencoder.getOutputFormat().getByteBuffer("csd-1");
						int spsppsLength = spsBuffer.array().length + ppsBuffer.array().length;
						ByteBuffer spsppsBuffer = ByteBuffer.allocate(spsppsLength);
                        spsppsBuffer.put(spsBuffer.array());
                        spsppsBuffer.put(ppsBuffer.array());
                        spsppsBuffer.clear();
                        vebi.offset = 0;
                        vebi.size = spsppsLength;
                        vebi.presentationTimeUs = getPTSUs();
                        vebi.flags = MediaCodec.BUFFER_FLAG_KEY_FRAME;
                        if (flvMuxer != null) {
                            flvMuxer.writeSampleData(videoFlvTrack, spsppsBuffer, vebi);
                        }
					}else if (bufferIndex < 0) {
                        // not sure what's going on, ignore it
                    } else {
                        ByteBuffer encodedData = vencoder.getOutputBuffer(bufferIndex);
                        if (encodedData == null) {
                            throw new RuntimeException("couldn't fetch buffer at index " + bufferIndex);
                        }
                        if ((vebi.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
                            vebi.size = 0;
                        }
                        if (vebi.size > 0) {
                            if (flvMuxer != null && !isReconnecting) {
                                vebi.presentationTimeUs = getPTSUs();
                                flvMuxer.writeSampleData(videoFlvTrack, encodedData, vebi);
							}
						}
                        else{
                            Log.e(TAG, "pushStream() mBufferInfo.size < 0 ");
						}
                        vencoder.releaseOutputBuffer(bufferIndex, false);
                    }
                }
            }
        });
        mStopPushVideo = false;
        mVideoPushTread.start();
    }
	
    public void stopVideoPusher() {
        mStopPushVideo = true;
        if (mVideoPushTread != null) {
            mVideoPushTread.interrupt();
            try {
                mVideoPushTread.join();
            } catch (InterruptedException e) {
                mVideoPushTread.interrupt();
            }
            mVideoPushTread = null;
        }
    }

    private void onProcessedYuvFrame(byte[] yuvFrame, long pts) {
        ByteBuffer[] inBuffers = vencoder.getInputBuffers();
        ByteBuffer[] outBuffers = vencoder.getOutputBuffers();

        int inBufferIndex = vencoder.dequeueInputBuffer(-1);
        if (inBufferIndex >= 0) {
            ByteBuffer bb = inBuffers[inBufferIndex];
            bb.clear();
            bb.put(yuvFrame, 0, yuvFrame.length);
            vencoder.queueInputBuffer(inBufferIndex, 0, yuvFrame.length, pts, 0);
        }

        for (; ; ) {
            int outBufferIndex = vencoder.dequeueOutputBuffer(vebi, 0);
            if (outBufferIndex >= 0) {
                ByteBuffer bb = outBuffers[outBufferIndex];
			    flvMuxer.writeSampleData(videoFlvTrack, bb, vebi);
                vencoder.releaseOutputBuffer(outBufferIndex, false);
            } else {
                break;
            }
        }
    }
    public void onGetPcmFrame(byte[] data, int size) {
		if(isReconnecting){
			return;
		}	
        ByteBuffer[] inBuffers = aencoder.getInputBuffers();
        ByteBuffer[] outBuffers = aencoder.getOutputBuffers();

        int inBufferIndex = aencoder.dequeueInputBuffer(-1);
        if (inBufferIndex >= 0) {
            ByteBuffer bb = inBuffers[inBufferIndex];
            bb.clear();
            bb.put(data, 0, size);
            aencoder.queueInputBuffer(inBufferIndex, 0, size, 0, 0);
        }

        for (; ; ) {
            int outBufferIndex = aencoder.dequeueOutputBuffer(aebi, 0);
            if (outBufferIndex >= 0) {
                ByteBuffer bb = outBuffers[outBufferIndex];
				aebi.presentationTimeUs = getPTSUs();
			    flvMuxer.writeSampleData(audioFlvTrack, bb, aebi);
                aencoder.releaseOutputBuffer(outBufferIndex, false);
            } else {
                break;
            }
        }
    }

    public void onGetRgbaFrame(byte[] data, int width, int height) {
		if(isReconnecting){
			return;
		}
        // Check video frame cache number to judge the networking situation.
        // Just cache GOP / FPS seconds data according to latency.
        AtomicInteger videoFrameCacheNumber = flvMuxer.getVideoFrameCacheNumber();
        if (videoFrameCacheNumber != null && videoFrameCacheNumber.get() < VGOP) {
            long pts = getPTSUs();
			byte[] processData = hwRgbaFrame(data,yuvbuff,width, height);
            if (processData != null) {
                onProcessedYuvFrame(processData, pts);
            } else {
                mHandler.notifyEncodeIllegalArgumentException(new IllegalArgumentException("libyuv failure"));
            }

            if (networkWeakTriggered) {
                networkWeakTriggered = false;
                mHandler.notifyNetworkResume();
            }
        } else {
            mHandler.notifyNetworkWeak();
            networkWeakTriggered = true;
        }
    }

    private byte[] hwRgbaFrame(byte[] data,byte[] yuvbuff, int width, int height) {
        switch (mVideoColorFormat) {
            case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
                return RGBAToI420(data, width, height, true, 180);
            case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
                return RGBAToNV12(data,yuvbuff,width, height, true, 180);
            default:
                throw new IllegalStateException("Unsupported color format!");
        }
    }

    public AudioRecord chooseAudioRecord() {
        AudioRecord mic = new AudioRecord(MediaRecorder.AudioSource.DEFAULT, SrsEncoder.ASAMPLERATE,
                AudioFormat.CHANNEL_IN_STEREO, AudioFormat.ENCODING_PCM_16BIT, getPcmBufferSize() * 4);
        if (mic.getState() != AudioRecord.STATE_INITIALIZED) {
            mic = new AudioRecord(MediaRecorder.AudioSource.DEFAULT, SrsEncoder.ASAMPLERATE,
                    AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, getPcmBufferSize() * 4);
            if (mic.getState() != AudioRecord.STATE_INITIALIZED) {
                mic = null;
            } else {
                SrsEncoder.aChannelConfig = AudioFormat.CHANNEL_IN_MONO;
            }
        } else {
            SrsEncoder.aChannelConfig = AudioFormat.CHANNEL_IN_STEREO;
        }

        return mic;
    }

    private int getPcmBufferSize() {
        int pcmBufSize = AudioRecord.getMinBufferSize(ASAMPLERATE, AudioFormat.CHANNEL_IN_STEREO,
                AudioFormat.ENCODING_PCM_16BIT) + 8191;
        return pcmBufSize - (pcmBufSize % 8192);
    }

    // choose the video encoder by name.
    private MediaCodecInfo chooseVideoEncoder(String name) {
        int nbCodecs = MediaCodecList.getCodecCount();
        for (int i = 0; i < nbCodecs; i++) {
            MediaCodecInfo mci = MediaCodecList.getCodecInfoAt(i);
            if (!mci.isEncoder()) {
                continue;
            }

            String[] types = mci.getSupportedTypes();
            for (int j = 0; j < types.length; j++) {
                if (types[j].equalsIgnoreCase(VCODEC)) {
                    Log.i(TAG, String.format("vencoder %s types: %s", mci.getName(), types[j]));
                    if (name == null) {
                        return mci;
                    }

                    if (mci.getName().contains(name)) {
                        return mci;
                    }
                }
            }
        }

        return null;
    }

    // choose the right supported color format. @see below:
    private int chooseVideoEncoder() {
        // choose the encoder "video/avc":
        //      1. select default one when type matched.
        //      2. google avc is unusable.
        //      3. choose qcom avc.
        vmci = chooseVideoEncoder(null);
        int matchedColorFormat = 0;
        MediaCodecInfo.CodecCapabilities cc = vmci.getCapabilitiesForType(VCODEC);
        for (int i = 0; i < cc.colorFormats.length; i++) {
            int cf = cc.colorFormats[i];
            Log.i(TAG, String.format("vencoder %s supports color fomart 0x%x(%d)", vmci.getName(), cf, cf));

            // choose YUV for h.264, prefer the bigger one.
            // corresponding to the color space transform in onPreviewFrame
            if (cf >= cc.COLOR_FormatYUV420Planar && cf <= cc.COLOR_FormatYUV420SemiPlanar) {
                if (cf > matchedColorFormat) {
                    matchedColorFormat = cf;
                }
            }
        }

        for (int i = 0; i < cc.profileLevels.length; i++) {
            MediaCodecInfo.CodecProfileLevel pl = cc.profileLevels[i];
            Log.i(TAG, String.format("vencoder %s support profile %d, level %d", vmci.getName(), pl.profile, pl.level));
        }

        Log.i(TAG, String.format("vencoder %s choose color format 0x%x(%d)", vmci.getName(), matchedColorFormat, matchedColorFormat));
        return matchedColorFormat;
    }

    private native void setVideoResolution(int outWidth, int outHeight);
    private native byte[] RGBAToI420(byte[] rgbaFrame, int width, int height, boolean flip, int rotate);
    private native byte[] RGBAToNV12(byte[] rgbaFrame, byte[] yuvbuff,int width, int height, boolean flip, int rotate);
    static {
        System.loadLibrary("yuv");
    }
}
