package com.nan.gbd.library.codec.livecodec;

import android.media.MediaCodec;
import android.opengl.EGLContext;
import android.opengl.GLSurfaceView;
import android.util.Log;

import com.nan.gbd.library.JNIBridge;
import com.nan.gbd.library.codec.FinishCallback;
import com.nan.gbd.library.codec.MediaCodecConstant;
import com.nan.gbd.library.codec.configuration.VideoConfiguration;
import com.nan.gbd.library.codec.mediacodec.VideoMediaCodec;
import com.nan.gbd.library.codec.audio.AudioCapture;
import com.nan.gbd.library.codec.surface.EglRenderThread;
import com.nan.gbd.library.codec.surface.EglSurfaceView;
import com.nan.gbd.library.utils.Logger;

import java.nio.ByteBuffer;


/**
 * 编解码管理类
 */
public class MediaLiveManager implements MediaLiveChangeListener {
    private static final String TAG = "MediaEncodeManager";

    private EglSurfaceView.Render eglSurfaceRender;


    /*视频硬编码相关参数*/
    private MediaCodec videoCodec;
    private MediaCodec.BufferInfo videoBuffer;
    private FinishCallback finishCallback;

    private AudioCapture audioCapture;


    private VideoCodecLiveThread videoThread;
    private EglRenderThread eglThread;
    private VideoConfiguration videoConfig;
    private EGLContext eglContext;
    private int renderMode;
    public MediaLiveManager(EglSurfaceView.Render eglSurfaceRender, EGLContext eglContext,FinishCallback finishCallback) {
        this.eglSurfaceRender = eglSurfaceRender;
        this.eglContext = eglContext;
        this.finishCallback = finishCallback;
        this.renderMode = GLSurfaceView.RENDERMODE_CONTINUOUSLY;
    }

    public void initMediaCodec() {
        initVideoCodec();
        initAudioCapture();
    }

    public void initThread() {
        eglThread = new EglRenderThread(videoCodec.createInputSurface(), eglContext, eglSurfaceRender, renderMode, videoConfig.width, videoConfig.height);
        videoThread = new VideoCodecLiveThread(videoCodec, videoBuffer, this);
    }


    /**
     * 初始化麦克风
     */
    private void initAudioCapture() {
        audioCapture = new AudioCapture();
    }


    private void initVideoCodec() {
        videoConfig = VideoConfiguration.getLivingConfig();
        videoCodec = VideoMediaCodec.getVideoMediaCodec(videoConfig);
        videoBuffer = new MediaCodec.BufferInfo();
    }

    /**
     * 开始编码
     */
    public void startEncode() {
        if (videoCodec == null) {
            Log.e(TAG, "startEncode: createInputSurface创建失败");
            return;
        }
        eglThread.start();
        videoThread.start();
        audioCapture.start();

        MediaCodecConstant.surfaceCreate = true;
        MediaCodecConstant.surfaceChange = true;

        MediaCodecConstant.videoStop = false;
    }

    /**
     * 编码结束
     */
    public void stop() {
        if (videoThread != null)
            videoThread.stopVideoCodec();
        if (eglThread != null)
            eglThread.stopEglRender();
        if (audioCapture != null)
            audioCapture.stop();
    }

    /**
     * 销毁/释放资源
     */
    private void destroy() {
        videoThread = null;
        eglThread = null;
        audioCapture = null;
        MediaCodecConstant.surfaceCreate = false;
        MediaCodecConstant.surfaceChange = false;
        if (finishCallback != null) {
            finishCallback.onFinished(1);
            finishCallback = null;
        }
    }

    @Override
    public void onMediaLiveChangeListener(int type) {
        if (type == MediaCodecConstant.MUXER_START) {
            setPcmRecordListener();
        }

        if (type == MediaCodecConstant.MUXER_STOP) {
            destroy();
        }
    }

    //录音线程数据回调
    private void setPcmRecordListener() {
        if (audioCapture.getCaptureListener() == null)
            audioCapture.setCaptureListener(new AudioCapture.AudioCaptureListener() {
                @Override
                public void onCaptureListener(byte[] audioSource, int audioReadSize) {
                    if (MediaCodecConstant.videoStop) {
                        return;
                    }
                    long curTs = System.currentTimeMillis();
                    JNIBridge.nanPushAudioFrame(audioSource, curTs);
                }
            });
    }

    @Override
    public void onMediaInfoListener(int time) {
        Logger.w("直播播放时长 ：" + time);
    }

    byte[] mConfigByte;

    @Override
    public void pushVideoPacket(ByteBuffer byteBuffer, MediaCodec.BufferInfo bufferInfo) {
        final byte[] bytes = new byte[bufferInfo.size];
        if (bufferInfo.flags == MediaCodec.BUFFER_FLAG_CODEC_CONFIG) {
            mConfigByte = new byte[bufferInfo.size];
            byteBuffer.get(mConfigByte);
            Logger.e("配置贞来了:" + mConfigByte.length);
            return;
        }
        byteBuffer.get(bytes);
        byte[] framebytes = null;
        switch (bufferInfo.flags) {
            case MediaCodec.BUFFER_FLAG_KEY_FRAME:
                framebytes = new byte[bufferInfo.size + mConfigByte.length];
                System.arraycopy(mConfigByte, 0, framebytes, 0, mConfigByte.length);
                System.arraycopy(bytes, 0, framebytes, mConfigByte.length, bytes.length);
                Logger.e("关键贞:" + framebytes.length);
                break;
            default:
                framebytes = new byte[bufferInfo.size];
                System.arraycopy(bytes, 0, framebytes, 0, bytes.length);
                Logger.d("B/P贞贞来了:" + framebytes.length);
                break;
        }
        long nt = System.currentTimeMillis();
        boolean keyframe = (bufferInfo.flags == MediaCodec.BUFFER_FLAG_KEY_FRAME);
        JNIBridge.nanPushVideoFrame(framebytes, nt, keyframe);
    }

}
