package it.jack.practicertmp;

import android.hardware.display.DisplayManager;
import android.hardware.display.VirtualDisplay;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.projection.MediaProjection;
import android.os.Bundle;
import android.view.Surface;

import java.io.IOException;
import java.nio.ByteBuffer;

/**
 * @创建者 Jack
 * @创建时间 2022/7/6 9:53
 * @描述
 */
public class VideoCodec extends Thread {

    //数据源
    private MediaProjection mediaProjection;
    //虚拟的画布
    private VirtualDisplay  virtualDisplay;
    //编码器
    private MediaCodec      mediaCodec;
    private ScreenLive      screenLive;
    //编码
    private boolean         isLiving = false;

    private int width;
    private int height;

    public VideoCodec(ScreenLive screenLive) {
        this.screenLive = screenLive;
        this.width = 720;
        this.height = 1280;
    }

    public void startLive(MediaProjection mediaProjection) {
        this.mediaProjection = mediaProjection;
        //编码的时候需要提供宽高
        MediaFormat format = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC,
                width, height);
        //原始数据的来源
        format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
                MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
        //码率            参数2的写法 咋可以这样？
        format.setInteger(MediaFormat.KEY_BIT_RATE, 400_000);
        //帧率
        format.setInteger(MediaFormat.KEY_FRAME_RATE, 15);
        //I帧间隔
        format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);

        try {
            mediaCodec = MediaCodec.createEncoderByType("video/avc");
            //参数1：配置信息
            //参数2：不需要借助surface进行显示
            //参数3：加密相关
            //参数4：编码标志位
            mediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
            Surface surface = mediaCodec.createInputSurface();
            //创建虚拟屏幕需要的场地
            virtualDisplay = mediaProjection.createVirtualDisplay(
                    "rtmp",
                    width, height, 1,
                    DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC,
                    surface, null, null);

        } catch (IOException e) {
            e.printStackTrace();
        }
        isLiving = true;
        start();
    }

    private long timeStamp;
    private long startTime;
    @Override
    public void run() {
        super.run();
        mediaCodec.start();
        MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
        while (isLiving) {

            //这段代码的目标：手动触发I帧（模板代码通知dsp芯片触发I帧）
            if (System.currentTimeMillis() - timeStamp >= 2000) {
                Bundle params = new Bundle();
                //立即刷新 让下一帧是关键帧
                params.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
                mediaCodec.setParameters(params);
                timeStamp = System.currentTimeMillis();
            }

            //输入已经被实现，关心输出即可
            int index = mediaCodec.dequeueOutputBuffer(bufferInfo, 10000);
            if (index >= 0) {
                if (startTime == 0) {
                    //微秒转为毫秒 dsp芯片 按照当时编码的系统时间 打出来的
                    startTime = bufferInfo.presentationTimeUs / 1000;
                }
                //编码的数据
                ByteBuffer byteBuffer = mediaCodec.getOutputBuffer(index);
                //创建byte[]数组，将byteBuffer的数据转到byte[]数组中
                byte[] outData = new byte[byteBuffer.remaining()];
                //将容器的byteBuffer内部的数据 转移到 byte[]中
                byteBuffer.get(outData);

                //除以1000  将毫秒转为秒
                RTMPPackage rtmpPackage = new RTMPPackage(outData, (bufferInfo.presentationTimeUs / 1000) - startTime);
                rtmpPackage.setType(RTMPPackage.RTMP_PACKET_TYPE_VIDEO);
                screenLive.addPackage(rtmpPackage);

                //释放
                mediaCodec.releaseOutputBuffer(index, false);
            }
        }
    }
}
