package com.breeze.breezelive.record;

import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.util.Log;

import com.breeze.breezelive.record.bean.MutexData;

import java.io.IOException;
import java.lang.ref.WeakReference;
import java.nio.ByteBuffer;
import java.util.LinkedList;
import java.util.Queue;

public class VideoRecordThread extends Thread {
    private static final String TAG = "VideoRecordThread";
    private static final int TIMEOUT_S = 10000;
    private int mFrameRate = 30;
    private int mBitRate;
    private int mIFrameInterval = 10;
    private long generateIndex = 0;
    public Queue<byte[]> dataQueue; // 存储的是I420数据
    private boolean isRecording;
    private MediaCodec mMediaCodec;
    private int width, height;
    private WeakReference<MediaMuxerThread> mMutex;

    public VideoRecordThread(MediaMuxerThread mMutex, int width, int height) {
        this.mMutex = new WeakReference(mMutex);
        this.width = width;
        this.height = height;
        this.dataQueue = new LinkedList<>();
        this.isRecording = false;
        this.mBitRate = height * width * 3 * 8 * mFrameRate / 256;
    }

    private boolean initMediaCodec(int width, int height) {
        try {
            // 编码为AVC格式，也就是使用H2.64编码
            MediaFormat mediaFormat = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, width, height);
            mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible);
            mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
            mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, mFrameRate);
            mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, mIFrameInterval);
            mMediaCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC);
            mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
            mMediaCodec.start();
        } catch (IOException e) {
            e.printStackTrace();
            return false;
        }
        return true;
    }

    public void frame(byte[] data) {
        if (isRecording) {
            dataQueue.offer(data);
        }
    }

    public void prepare() {
        initMediaCodec(width, height);
    }

    public void begin() {
        dataQueue.clear();
        isRecording = true;
        generateIndex = 0;
        start();
    }

    public void end() {
        isRecording = false;
    }

    @Override
    public void run() {
        while (isRecording) {
            encode(dataQueue.poll());
        }
        release();
    }

    private void release() {
        // 停止编解码器并释放资源
        try {
            mMediaCodec.stop();
            mMediaCodec.release();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    /**
     * 编码
     */
    private void encode(byte[] input) {
        if (input != null) {
            try {
                int inputBufferIndex = mMediaCodec.dequeueInputBuffer(TIMEOUT_S); // 获取inputbuffer最多等待TIMEOUT_S事件
                long pts = getPts();
                if (inputBufferIndex >= 0) {
                    ByteBuffer inputBuffer = mMediaCodec.getInputBuffer(inputBufferIndex);
                    inputBuffer.clear();
                    inputBuffer.put(input);

                    mMediaCodec.queueInputBuffer(inputBufferIndex, 0, input.length, pts, 0); // 调用后底层会从inputBuffer中取数据进行编码
                    generateIndex += 1;
                }

                MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
                int outputBufferIndex = mMediaCodec.dequeueOutputBuffer(info, TIMEOUT_S);
                if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                    Log.w(TAG, "run: INFO_OUTPUT_FORMAT_CHANGED");
                    MediaMuxerThread mediaMutex = mMutex.get();
                    if (mediaMutex != null && !mediaMutex.isVideoTrackExist()) {
                        mediaMutex.addVideoTrack(mMediaCodec.getOutputFormat());
                    }
                }
                // 大于0表示底层已经有编码好的数据
                while (outputBufferIndex >= 0) {
                    ByteBuffer outputBuffer = mMediaCodec.getOutputBuffer(outputBufferIndex);
                    if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
                        Log.w(TAG, "run: BUFFER_FLAG_CODEC_CONFIG");
                        info.size = 0;
                    }

                    if (info.size > 0) {
                        MediaMuxerThread mediaMuxer = mMutex.get();
                        if (mediaMuxer != null) {
                            byte[] outData = new byte[info.size];
                            outputBuffer.get(outData);
                            outputBuffer.position(info.offset);
                            outputBuffer.limit(info.offset + info.size);
                            info.presentationTimeUs = getPts();
                            // 将编码好的数据从outputBuffer取出写入outData数组，并封装在MutexBean中交给MediaMuxerThread
                            mediaMuxer.putMutexData(new MutexData(true, outData, info));
                            Log.i(TAG, "video inputPTS=" + pts + ", outputPTS:" + info.presentationTimeUs);
                        }
                    }
                    mMediaCodec.releaseOutputBuffer(outputBufferIndex, false);
                    info = new MediaCodec.BufferInfo();
                    outputBufferIndex = mMediaCodec.dequeueOutputBuffer(info, TIMEOUT_S);
                }

            } catch (Throwable t) {
                t.printStackTrace();
            }
        }
    }

    private long getPts() {
        return System.nanoTime() / 1000L;
    }
}