package com.qmcy.demux;

import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible;
import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar;
import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar;

import android.Manifest;
import android.annotation.SuppressLint;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.graphics.YuvImage;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.Toast;

import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.app.ActivityCompat;

import java.io.BufferedOutputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Arrays;

public class FFDemuxActivity extends AppCompatActivity implements FFDemuxJava.EventCallback {
    private MediaCodecInfo codecInfo = null;
    private static final String TAG = "FFDemuxActivityTag";
    private static final String[] REQUEST_PERMISSIONS = {
            Manifest.permission.READ_EXTERNAL_STORAGE,
            Manifest.permission.WRITE_EXTERNAL_STORAGE
    };
    private static final int PERMISSION_REQUEST_CODE = 1;
    private FFDemuxJava m_demuxer = null;

    private EditText editText;

    private boolean mIsTouch = false;
    private final String mVideoPath = "rtsp://admin:yjzn123456@192.168.2.64:554";
    private SurfaceView mSurfaceView;
    private ByteBuffer[] inputBuffers;
    private ImageView mIvShowImage;

    private int dstWidth = 1280;
    private int dstHeight = 720;
    private NV21ToBitmap nv21ToBitmap;

    @SuppressLint("MissingInflatedId")
    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.demux);
        nv21ToBitmap = new NV21ToBitmap(this);
        getMediaCodecList();

        editText = findViewById(R.id.output);
        mIvShowImage = findViewById(R.id.mIvShowImage);
        mSurfaceView = findViewById(R.id.surfaceView);
        mSurfaceView.getHolder().addCallback(new SurfaceHolder.Callback() {
            @Override
            public void surfaceCreated(@NonNull SurfaceHolder holder) {
                onGetNetVideoData(holder.getSurface());
                m_demuxer = new FFDemuxJava();
                m_demuxer.addEventCallback(FFDemuxActivity.this);
                m_demuxer.init(mVideoPath);
                m_demuxer.Start();
            }

            @Override
            public void surfaceChanged(@NonNull SurfaceHolder holder, int format, int width, int height) {

            }

            @Override
            public void surfaceDestroyed(@NonNull SurfaceHolder holder) {

            }
        });

    }

    @Override
    protected void onResume() {
        super.onResume();

        if (!hasPermissionsGranted(REQUEST_PERMISSIONS)) {
            ActivityCompat.requestPermissions(this, REQUEST_PERMISSIONS, PERMISSION_REQUEST_CODE);
        }

        if (m_demuxer != null)
            m_demuxer.Start();
    }

    @Override
    public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
        if (requestCode == PERMISSION_REQUEST_CODE) {
            if (!hasPermissionsGranted(REQUEST_PERMISSIONS)) {
                Toast.makeText(this, "We need the permission: WRITE_EXTERNAL_STORAGE", Toast.LENGTH_SHORT).show();
            }
        } else {
            super.onRequestPermissionsResult(requestCode, permissions, grantResults);
        }
    }

    @Override
    protected void onPause() {
        super.onPause();
        if (m_demuxer != null) {

        }
    }

    @Override
    protected void onDestroy() {
        m_demuxer.unInit();
        super.onDestroy();
    }

    private MediaCodec mediaCodec;

    private void onGetNetVideoData(Surface surface) {

        try {
            Log.d(TAG, "onGetNetVideoData: ");
            //创建解码器 H264的Type为  AAC
            mediaCodec = MediaCodec.createDecoderByType("video/avc");
            //创建配置
            MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", 1280, 720);
            //设置解码预期的帧速率【以帧/秒为单位的视频格式的帧速率的键】
            mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 20);

            mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, COLOR_FormatYUV420Flexible);//
//            byte[] headerSps = {0, 0, 0, 1, 103, 66, 0, 41, -115, -115, 64, 80, 30, -48, 15, 8, -124, 83, -128};
//            byte[] headerPps = {0, 0, 0, 1, 104, -54, 67, -56};
//
//            mediaFormat.setByteBuffer("csd-0", ByteBuffer.wrap(headerSps));
//            mediaFormat.setByteBuffer("csd-1", ByteBuffer.wrap(headerPps));
            //配置绑定mediaFormat和surface
            mediaCodec.configure(mediaFormat, null, null, 0);
            mediaCodec.start();
        } catch (IOException e) {
            e.printStackTrace();
            //创建解码失败
            Log.e(TAG, "创建解码失败");
        }

        inputBuffers = mediaCodec.getInputBuffers();

    }

    //开始位置
//    private int startIndex = 0;
    private int mCount;
    private boolean isPrintImage = true;

    private void onFrame(byte[] buf, int offset, int length) {
        //判断是否符合
//        if (buf.length == 0 || startIndex >= buf.length) {
//            Log.i(TAG, "onFrame: 11");
//            return;
//        }

        Log.d(TAG, "onFrame-buf: " + Arrays.toString(buf));
        Log.d(TAG, "onFrame-buf-length: " + buf.length);
        //寻找索引
//        int nextFrameStart = findByFrame(buf, startIndex + 1, length);
//        Log.i(TAG, "nextFrameStart: " + nextFrameStart + ", startIndex:" + startIndex);
//        if (nextFrameStart == -1) return;

        MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
        //查询10000毫秒后，如果dSP芯片的buffer全部被占用，返回-1；存在则大于0
        int inIndex = mediaCodec.dequeueInputBuffer(10000);
        Log.i(TAG, "inIndex: " + inIndex);
        if (inIndex >= 0) {
            //根据返回的index拿到可以用的buffer
            ByteBuffer byteBuffer = inputBuffers[inIndex];
            //清空缓存
            byteBuffer.clear();
            //开始为buffer填充数据
//            byteBuffer.put(buf, startIndex, nextFrameStart - startIndex);
            byteBuffer.put(buf);
            //填充数据后通知mediacodec查询inIndex索引的这个buffer,
            mediaCodec.queueInputBuffer(inIndex, 0, length, mCount * 20, 0);
            //为下一帧做准备，下一帧首就是前一帧的尾。
//            startIndex = nextFrameStart;
            mCount++;
        } else {
            Log.i(TAG, "inIndex < 0");
            //等待查询空的buffer
            return;
        }
        //mediaCodec 查询 "mediaCodec的输出方队列"得到索引
        int outIndex = mediaCodec.dequeueOutputBuffer(info, 10000);
        Log.e(TAG, "解码输出outIndex " + outIndex);
        if (outIndex >= 0) {
//            try {
//                //暂时以休眠线程方式放慢播放速度
//                Thread.sleep(33);
//            } catch (InterruptedException e) {
//                e.printStackTrace();
//            }
            ////////////////////////////////////////新增START/////////////////////////////////////////////
            if (isPrintImage) {
                //dsp的byteBuffer无法直接使用
                ByteBuffer byteBuffer = mediaCodec.getOutputBuffer(outIndex);
                //设置偏移量
                byteBuffer.position(info.offset);
                byteBuffer.limit(info.size + info.offset);

                byte[] ba = new byte[byteBuffer.remaining()];
                byteBuffer.get(ba);

                //检查所支持的颜色格式
                int colorFormat = 0;
                MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType("video/avc");
                Log.i(TAG, "=============capabilities.colorFormats.length=================" + capabilities.colorFormats.length);
                for (int i = 0; i < capabilities.colorFormats.length; i++) {
                    int format = capabilities.colorFormats[i];
                    Log.i(TAG, "============formatformat============" + format);
                    //华为平板：COLOR_FormatYUV420SemiPlanar、COLOR_FormatYUV420Planar
                    //魅族手机：COLOR_FormatYUV420SemiPlanar
                    switch (format) {
                        case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar://(对应 I420 or YV12)
                            Log.i("COLOR_Format_TAG", "=========COLOR_FormatYUV420Planar");
                            byte[] convertNv21 = new byte[ba.length];
                            //不确定是什么颜色格式，挨个试的
//                            convertI420ToNV21(ba, convertNv21, 1280, 720);
//                            convertYV12toNV21(ba, convertNv21, 1280, 720);
                            convertNV12toNV21(ba, convertNv21, 1280, 720);
                            NV21Data(convertNv21);

                            continue;

                        case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar://NV12
                            Log.i("COLOR_Format_TAG", "=======COLOR_FormatYUV420SemiPlanar");
                            byte[] nv21 = new byte[ba.length];
                            convertNV12toNV21(ba, nv21, 1280, 720);
                            NV21Data(nv21);

                            continue;
                        case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
                            Log.i("COLOR_Format_TAG", "=======COLOR_FormatYUV420PackedSemiPlanar");
                            continue;
                        case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible:

                            Log.i("COLOR_Format_TAG", "=======COLOR_FormatYUV420Flexible");
                            continue;
                        default:
                            continue;
                    }
                }


            }

            try {
                Thread.sleep(25);
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
            ////////////////////////////////////////////END/////////////////////////////////////////

            //如果surface绑定了，则直接输入到surface渲染并释放
            mediaCodec.releaseOutputBuffer(outIndex, false);
        } else {
            Log.e(TAG, "没有解码成功");
        }
    }

    private void NV21Data(byte[] nv21) {
        YuvImage yuvImage = new YuvImage(nv21, ImageFormat.NV21, 1280, 720, null);
        ByteArrayOutputStream baos = new ByteArrayOutputStream();
        yuvImage.compressToJpeg(new Rect(0, 0, 1280, 720), 100, baos);
        byte[] data = baos.toByteArray();

        wirte2file(data, data.length);
        Bitmap bitmap = BitmapFactory.decodeByteArray(data, 0, data.length);

        if (bitmap != null) {
            runOnUiThread(new Runnable() {
                @Override
                public void run() {
                    mIvShowImage.setImageBitmap(bitmap);
                }
            });

            if (printImageStatus == 0) {
                printImageStatus = 1;
                try {
                    File myCaptureFile = new File(Environment.getExternalStorageDirectory(), "img.png");
                    BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(myCaptureFile));
                    bitmap.compress(Bitmap.CompressFormat.JPEG, 100, bos);
                    bos.flush();
                    bos.close();
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        }
    }

    public static void convertI420ToNV21(byte[] i420, byte[] nv21, int width, int height) {
        System.arraycopy(i420, 0, nv21, 0, width * height);
        int offset = width * height;
        for (int i = 0; i < width * height / 4; i++) {
            nv21[offset + 2 * i] = i420[offset + i + width * height / 4];
            nv21[offset + 2 * i + 1] = i420[offset + i];
        }
    }

    public static void convertYV12toNV21(byte[] yv12, byte[] nv21, int width, int height) {
        int size = width * height;
        int vOffset = size;
        int uOffset = size + (size / 4);

        // Copy Y channel as it is
        System.arraycopy(yv12, 0, nv21, 0, size);

        for (int i = 0; i < size / 4; i++) {
            nv21[vOffset + (i * 2)] = yv12[vOffset + i];      // V
            nv21[vOffset + (i * 2) + 1] = yv12[uOffset + i];  // U
        }
    }


    public static void convertNV12toNV21(byte[] nv12, byte[] nv21, int width, int height) {
        int size = width * height;
        int offset = size;

        // copy Y channel as it is
        System.arraycopy(nv12, 0, nv21, 0, offset);

        for (int i = 0; i < size / 4; i++) {
            nv21[offset + (i * 2) + 1] = nv12[offset + (i * 2)];       // U
            nv21[offset + (i * 2)] = nv12[offset + (i * 2) + 1];       // V
        }
    }


    @SuppressLint("NewApi")
    //获取video/avc 对应的编码器的颜色格式
    public void getMediaCodecList() {
        //获取所有的解码器
        int codecsNums = MediaCodecList.getCodecCount();

        for (int i = 0; i < codecsNums && codecInfo == null; i++) {
            MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);
            if (info.isEncoder()) {
                Log.i(TAG, "========这是一个编码器==========");
            } else {
                Log.i(TAG, "========这是一个解码器==========");
                continue;
            }
            String[] types = info.getSupportedTypes();
            boolean found = false;
            for (int j = 0; j < types.length && !found; j++) {
                if (types[j].equals("video/avc")) {
                    found = true;
                }
            }
            if (!found) {
                continue;
            }
            codecInfo = info;
        }
        Log.d(TAG, codecInfo.getName() + "对应" + "video/avc");

        //检查所支持的颜色格式
        int colorFormat = 0;
        MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType("video/avc");
        Log.i(TAG, "=============capabilities.colorFormats.length=================" + capabilities.colorFormats.length);
        for (int i = 0; i < capabilities.colorFormats.length; i++) {
            int format = capabilities.colorFormats[i];
            Log.i(TAG, "============formatformat==============" + format);
            switch (format) {
                case COLOR_FormatYUV420Planar:
                    Log.i("COLOR_FormatYUVTag", "=========COLOR_FormatYUV420Planar");
                    continue;
                case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
                    Log.i("COLOR_FormatYUVTag", "========COLOR_FormatYUV420PackedPlanar");
                    continue;
                case COLOR_FormatYUV420SemiPlanar:
                    Log.i("COLOR_FormatYUVTag", "=======COLOR_FormatYUV420SemiPlanar");
                    continue;
                case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
                    Log.i("COLOR_FormatYUVTag", "=======COLOR_FormatYUV420PackedSemiPlanar");
                    continue;
                case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
                    colorFormat = format;
                    Log.i("COLOR_FormatYUVTag", "=======COLOR_TI_FormatYUV420PackedSemiPlanar");
                    continue;
                default:
                    continue;
            }
        }
    }


    private int printImageStatus;

    //读取一帧数据
    private int findByFrame(byte[] bytes, int start, int totalSize) {
        for (int i = start; i < totalSize - 4; i++) {

            Log.d(TAG, "findByFrame--bytes[i]: " + bytes[i] + ", bytes[i + 1]:" + bytes[i + 1] + ", bytes[i + 2]:" + bytes[i + 2] + ", bytes[i + 3]:" + bytes[i + 3]);
            //对output.h264文件分析 可通过分隔符 0x00000001 读取真正的数据
            if (bytes[i] == 0x00 && bytes[i + 1] == 0x00 && bytes[i + 2] == 0x00 && bytes[i + 3] == 0x01) {
                return i;
            }

        }
        return -1;
    }


    @Override
    public void onPacketEvent(byte[] data) {
        Log.d(TAG, "onPacketEvent() called with: size = [" + data.length + "]" + "[" + data[0] + " " + data[1] + " " + data[2] + " " + data[3] + " " + data[4] + " " + data[5] + "]");

        String str = "onPacketEvent() called with: size = [" + data.length + "]" + "[" + data[0] + " " + data[1] + " " + data[2] + " " + data[3] + " " + data[4] + " " + data[5] + "]";
        runOnUiThread(new Runnable() {
            @Override
            public void run() {
                editText.setText(str);
            }
        });

        onFrame(data, 0, data.length);

//        wirte2file(data, data.length);

    }

    @Override
    public void onMessageEvent(final int msgType, final float msgValue) {
        Log.d(TAG, "onPlayerEvent() called with: msgType = [" + msgType + "], msgValue = [" + msgValue + "]");
        runOnUiThread(new Runnable() {
            @Override
            public void run() {
                switch (msgType) {

                    case 1:
                        break;
                    default:
                        break;
                }
            }
        });

    }


    protected boolean hasPermissionsGranted(String[] permissions) {
        for (String permission : permissions) {
            if (ActivityCompat.checkSelfPermission(this, permission)
                    != PackageManager.PERMISSION_GRANTED) {
                return false;
            }
        }
        return true;
    }


    private BufferedOutputStream BufOs = null;
    private File destfile = null;
    private FileOutputStream destfs = null;
    //    private String dsetfilePath = Environment.getExternalStorageDirectory() + "/" + "dest.h264";
    private String dsetfilePath = Environment.getExternalStorageDirectory() + "/" + "dest111.yuv";

    private void wirte2file(byte[] buf, int length) {
        if (isStart) {
            if (BufOs == null) {
                destfile = new File(dsetfilePath);
                try {
                    destfs = new FileOutputStream(destfile);
                    BufOs = new BufferedOutputStream(destfs);
                    Log.d(TAG, "wirte2file-new ");
                } catch (FileNotFoundException e) {
                    // TODO: handle exception
                    Log.i("TRACK", "initerro" + e.getMessage());
                    Log.d(TAG, "wirte2file-FileNotFoundException:" + e.getMessage());
                    e.printStackTrace();
                }
            }

            try {
                BufOs.write(buf, 0, length);
                BufOs.flush();
                Log.d(TAG, "wirte2file-write");
            } catch (Exception e) {
                Log.d(TAG, "wirte2file-e: " + e.getMessage());
                // TODO: handle exception
            }

        }
    }

    private boolean isStart;

    public void onStop(View view) {
        isStart = false;
        Toast.makeText(this, "停止保存", Toast.LENGTH_SHORT).show();
    }

    public void onStart(View view) {
        isStart = true;
        Toast.makeText(this, "开始保存", Toast.LENGTH_SHORT).show();
    }


}
