package com.yfjin.gl.filter;

import android.content.Context;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.opengl.EGLContext;
import android.os.Handler;
import android.os.HandlerThread;
import android.view.Surface;

import java.io.IOException;
import java.nio.ByteBuffer;

class MediaRecord_H264 {


    private MediaCodec mMediaCodec;
    private int mWidth;
    private int mHeight;
    private String mPath;
    private Surface mSurface;
    private Handler mHandler;


    private EGLEnv eglEnv;
    private boolean isStart;
    private EGLContext mGlContext;
    private Context mContext;
    private float mSpeed;


    public MediaRecord_H264(Context context, EGLContext glContext, String path, int width, int height) {
        mContext = context.getApplicationContext();
        mGlContext = glContext;
        mPath = path;
        mWidth = width;
        mHeight = height;
    }


    public void start(float speed) throws IOException {
        mSpeed = speed;


        MediaFormat format = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC,
                mWidth, mHeight);
        //颜色空间 从 surface当中获得
        format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities
                .COLOR_FormatSurface);
        //码率
        format.setInteger(MediaFormat.KEY_BIT_RATE, 1500_000);
        //帧率
        format.setInteger(MediaFormat.KEY_FRAME_RATE, 25);
        //关键帧间隔
        format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 10);
        //创建编码器
        mMediaCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC);
        //配置编码器
        mMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
//输入数据     byte[]    gpu  mediaprojection

        mSurface = mMediaCodec.createInputSurface();

//        视频  编码一个可以播放的视频
        //混合器 (复用器) 将编码的h.264封装为mp4
        //开启编码
        mMediaCodec.start();


        HandlerThread handlerThread = new HandlerThread("codec-gl");
        handlerThread.start();
        mHandler = new Handler(handlerThread.getLooper());
        mHandler.post(() -> {
            eglEnv = new EGLEnv(mContext, mGlContext, mSurface, mWidth, mHeight);
            isStart = true;
        });
    }

    public void fireFrame(int textureId, long timestamp) {
        if (!isStart) {
            return;
        }
        //录制用的opengl已经和handler的线程绑定了 ，所以需要在这个线程中使用录制的opengl
        mHandler.post(new Runnable() {
            @Override
            public void run() {
//                opengl   能 1  不能2  draw  ---》surface
                eglEnv.draw(textureId, timestamp);
//                获取对应的数据
                codec(false);
            }
        });
    }

    private long startTime;
    private void codec(boolean endOfStream) {
        MediaCodec.BufferInfo bufferInfo=new MediaCodec.BufferInfo();

        int index=mMediaCodec.dequeueOutputBuffer(bufferInfo,10_1000);
        while (index>=0){
            ByteBuffer byteBuffer=mMediaCodec.getOutputBuffer(index);
            MediaFormat mediaFormat=mMediaCodec.getOutputFormat(index);
            byte[] outData=new byte[bufferInfo.size];
            byteBuffer.get(outData);
            if (startTime==0){
                startTime=bufferInfo.presentationTimeUs/1000;
            }
            FileUtils.writeContent(outData);
            FileUtils.writeBytes(outData);
            mMediaCodec.releaseOutputBuffer(index,false);
            index=mMediaCodec.dequeueOutputBuffer(bufferInfo,10_1000);
        }
    }

    public void stop() {
        // 释放
        isStart = false;
        mHandler.post(() -> {
            codec(true);
            mMediaCodec.stop();
            mMediaCodec.release();
            mMediaCodec = null;
            eglEnv.release();
            eglEnv = null;
            mSurface = null;
            mHandler.getLooper().quitSafely();
            mHandler = null;
        });
    }
}
