package com.chenjim.glrecorder.util;

import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaExtractor;
import android.media.MediaFormat;
import android.text.TextUtils;
import android.util.Log;
import android.view.Surface;

import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Arrays;

public class SimpleDecodeVideoPlayer {

    public void init(String mp4Path, Surface surface) {
        new Thread(() -> {
            try {
                initInternal(mp4Path, surface);
            } catch (IOException e) {
                e.printStackTrace();
            }
        }).start();
    }

    private void initInternal(String mp4Path, Surface surface) throws IOException {
        if (TextUtils.isEmpty(mp4Path)) {
            return;
        }
        MediaExtractor mediaExtractor = new MediaExtractor();
        mediaExtractor.setDataSource(mp4Path);
        MediaFormat videoMediaFormat = null;
        int videoTrackIndex = -1;
        for (int i = 0; i < mediaExtractor.getTrackCount(); i++) {
            MediaFormat mediaFormat = mediaExtractor.getTrackFormat(i);
            String mime = mediaFormat.getString(MediaFormat.KEY_MIME);
            if (mime.startsWith("video/")) {
                videoMediaFormat = mediaFormat;
                videoTrackIndex = i;
            }
        }
        if (videoMediaFormat == null) {
            return;
        }

        int width = videoMediaFormat.getInteger(MediaFormat.KEY_WIDTH);
        int height = videoMediaFormat.getInteger(MediaFormat.KEY_HEIGHT);
        long time = videoMediaFormat.getLong(MediaFormat.KEY_DURATION);

        Log.i("TAG", "outputBuffer.remaining()" + width);
        Log.i("TAG", "outputBuffer.remaining()" + height);
        Log.i("TAG", "outputBuffer.remaining()" + time);

        // 只会返回此轨道的信息
        mediaExtractor.selectTrack(videoTrackIndex);

        //videoMediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible);
        String string = videoMediaFormat.getString(MediaFormat.KEY_MIME);
        MediaCodec videoCodec = MediaCodec.createDecoderByType(string);
        videoCodec.configure(videoMediaFormat, surface, null, 0);
        videoCodec.start();


        MediaCodec.BufferInfo videoBufferInfo = new MediaCodec.BufferInfo();

        boolean isVideoEOS = false;
        boolean end = false;
        long startMs = System.currentTimeMillis();
        while (!end) {
            //将资源传递到解码器
            if (!isVideoEOS) {
                // dequeue:出列，拿到一个输入缓冲区的index，因为有好几个缓冲区来缓冲数据，所以需要先请求拿到一个InputBuffer的index，-1表示暂时没有可用的
                int inputBufferIndex = videoCodec.dequeueInputBuffer(-1);
                if (inputBufferIndex >= 0) {
                    // 使用返回的inputBuffer的index得到一个ByteBuffer，可以放数据了
                    ByteBuffer inputBuffer = videoCodec.getInputBuffer(inputBufferIndex);
                    // 使用extractor往MediaCodec的InputBuffer里面写入数据，-1表示已全部读取完
                    int sampleSize = mediaExtractor.readSampleData(inputBuffer, 0);
                    byte[] frameData = new byte[inputBuffer.remaining()];
                    inputBuffer.get(frameData);

                    Log.i("TAG", "frameData" + Arrays.toString(frameData));

                    if (sampleSize < 0) {
                        videoCodec.queueInputBuffer(inputBufferIndex, 0, 0, 0,
                                MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                        isVideoEOS = true;
                    } else {
                        // 填充好的数据写入第inputBufferIndex个InputBuffer，分贝设置size和sampleTime，这里sampleTime不一定是顺序来的，所以需要缓冲区来调节顺序。
                        videoCodec.queueInputBuffer(inputBufferIndex, 0, sampleSize,
                                mediaExtractor.getSampleTime(), 0);
                        // 在MediaExtractor执行完一次readSampleData方法后，需要调用advance()去跳到下一个sample，然后再次读取数据
                        mediaExtractor.advance();
                        isVideoEOS = false;
                    }
                }
            }

            // 获取outputBuffer的index，
            int outputBufferIndex = videoCodec.dequeueOutputBuffer(videoBufferInfo, 10000);
            switch (outputBufferIndex) {
                case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
                    break;
                case MediaCodec.INFO_TRY_AGAIN_LATER:
                    break;
                case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
                    //outputBuffers = videoCodec.getOutputBuffers();
                    break;
                default:
                    //直接渲染到Surface时使用不到outputBuffer
                    ByteBuffer outputBuffer = videoCodec.getOutputBuffer(outputBufferIndex);
                    //int size = videoBufferInfo.size;
                    //sendData(outputBuffer, null);
                    //如果缓冲区里的可展示时间>当前视频播放的进度，就休眠一下
                    sleepRender(videoBufferInfo, startMs);
                    // 将该ByteBuffer释放掉，以供缓冲区的循环使用。
                    videoCodec.releaseOutputBuffer(outputBufferIndex, true);
                    break;
            }

            if ((videoBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                end = true;
            }
        }//end while
        mediaExtractor.release();
        videoCodec.stop();
        videoCodec.release();
    }

    private void sleepRender(MediaCodec.BufferInfo audioBufferInfo, long startMs) {
        while (audioBufferInfo.presentationTimeUs / 1000 > System.currentTimeMillis() - startMs) {
            try {
                Thread.sleep(16);
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
        }
    }

    byte[] outData = null;

    private void sendData(ByteBuffer outputBuffer, byte[] sps_pps) {
        Log.i("TAG", "outputBuffer.remaining()" + outputBuffer.remaining());
        if (outData == null) {
            outData = new byte[outputBuffer.remaining()];
        }
        outputBuffer.get(outData);
        //UdpSend.getInstance("192.168.5.142").sendPack(outData);
        //Log.d("=========>", Arrays.toString(outData));

        /*boolean iFrame = CommonUtils.isH264(outData);
        if (iFrame) {
            Log.d("=========>", "iFrame");
        }else {
            Log.d("=========>", "不是h264");
        }*/
        int frame = findFrame(0, outData.length);
        Log.i("TAG", "frame location" + frame);

        /*if (CommonUtils.isIFrame(outData)) {
            byte[] iframeData = new byte[sps_pps.length + outData.length];
            System.arraycopy(sps_pps, 0, iframeData, 0, sps_pps.length);
            System.arraycopy(outData, 0, iframeData, sps_pps.length, outData.length);

            UdpSend.getInstance("192.168.5.142").sendPack(iframeData);
        } else {
            UdpSend.getInstance("192.168.5.142").sendPack(outData);
        }*/
    }

    private int findFrame(int startIndex, int totalSize) {
        for (int i = startIndex; i < totalSize - 4; i++) {
            if (outData[i] == 0x00 && outData[i + 1] == 0x00
                    && outData[i + 2] == 0x00 && outData[i + 3] == 0x01) {
                return i;
            }
        }
        return -1;
    }
}
