package com.yfjin.webrtc.ws;

import android.graphics.SurfaceTexture;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.util.Size;
import android.view.TextureView;
import android.view.ViewGroup;

import androidx.camera.core.CameraX;
import androidx.camera.core.ImageAnalysis;
import androidx.camera.core.ImageAnalysisConfig;
import androidx.camera.core.ImageProxy;
import androidx.camera.core.Preview;
import androidx.camera.core.PreviewConfig;
import androidx.lifecycle.LifecycleOwner;

import com.yfjin.webrtc.util.ImageUtil;

import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Arrays;

public class EncoderPlayerLiveH264 implements Preview.OnPreviewOutputUpdateListener, ImageAnalysis.Analyzer {
    private static final String TAG = "123123";

    private int mWidth = 720, mHeight = 1280;
    private TextureView mTextureView;
    private HandlerThread mHandlerThread;
    private CameraX.LensFacing mCurrentFacing = CameraX.LensFacing.BACK;
    private MediaCodec mMediaCodec;
    private SocketLive mSocketLive;

    public EncoderPlayerLiveH264(LifecycleOwner lifecycleOwner, TextureView mTextureView) {
        this.mTextureView = mTextureView;
        mHandlerThread = new HandlerThread("Analyze-thread");
        mHandlerThread.start();
        CameraX.bindToLifecycle(lifecycleOwner, getPreView(), getAnalysis());
    }

    private Preview getPreView() {
        PreviewConfig previewConfig = new PreviewConfig.Builder()
                .setTargetResolution(new Size(mWidth, mHeight))
                .setLensFacing(mCurrentFacing).build();
        Preview preview = new Preview(previewConfig);
        preview.setOnPreviewOutputUpdateListener(this);
        return preview;
    }

    //作图向 获取
    private ImageAnalysis getAnalysis() {
        ImageAnalysisConfig imageAnalysisConfig = new ImageAnalysisConfig.Builder()
                .setCallbackHandler(new Handler(mHandlerThread.getLooper()))
                .setLensFacing(mCurrentFacing)
                .setImageReaderMode(ImageAnalysis.ImageReaderMode.ACQUIRE_LATEST_IMAGE)
                .setTargetResolution(new Size(mWidth, mHeight))
                .build();
        ImageAnalysis imageAnalysis = new ImageAnalysis(imageAnalysisConfig);
        imageAnalysis.setAnalyzer(this);
        return imageAnalysis;
    }

    @Override
    public void onUpdated(Preview.PreviewOutput output) {
        SurfaceTexture surfaceTexture = output.getSurfaceTexture();
        if (mTextureView.getSurfaceTexture() != surfaceTexture) {
            if (mTextureView.isAvailable()) {
                // 当切换摄像头时，会报错
                ViewGroup parent = (ViewGroup) mTextureView.getParent();
                parent.removeView(mTextureView);
                parent.addView(mTextureView, 0);
                parent.requestLayout();
            }
            mTextureView.setSurfaceTexture(surfaceTexture);
        }
    }

    private byte[] y;
    private byte[] u;
    private byte[] v;
    private byte[] nv21;
    private byte[] nv21_rotated;
    private byte[] nv12;
    private boolean isStart = false;
    private int mFrameIndex;

    @Override
    public void analyze(ImageProxy image, int rotationDegrees) {
        if (!isStart) {
            return;
        }
        Log.i(TAG, "analyze: " + image.getPlanes()[0].getRowStride());
        ImageProxy.PlaneProxy[] planes = image.getPlanes();
        synchronized (this) {
            if (y == null) {
                y = new byte[planes[0].getBuffer().limit() - planes[0].getBuffer().position()];
                u = new byte[planes[1].getBuffer().limit() - planes[1].getBuffer().position()];
                v = new byte[planes[2].getBuffer().limit() - planes[2].getBuffer().position()];
            }
        }
        planes[0].getBuffer().get(y);
        planes[1].getBuffer().get(u);
        planes[2].getBuffer().get(v);

        Size size = new Size(image.getWidth(), image.getHeight());
        int width = size.getHeight();
        int heigth = image.getWidth();
        if (nv21 == null) {
            nv21 = new byte[heigth * width * 3 / 2];
            nv21_rotated = new byte[heigth * width * 3 / 2];
        }

        ImageUtil.yuvToNv21(y, u, v, nv21, heigth, width);
        ImageUtil.nv21_rotate_to_90(nv21, nv21_rotated, heigth, width);
        byte[] temp = ImageUtil.nv21toNV12(nv21_rotated, nv12);

        Log.i(TAG, "analyze: 帧大小" + temp.length);

        if (mMediaCodec == null) {
            initCodec(size);
        }

        MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();

        int inIndex = mMediaCodec.dequeueInputBuffer(100000);
        if (inIndex >= 0) {
            ByteBuffer byteBuffer = mMediaCodec.getInputBuffer(inIndex);
            byteBuffer.clear();
            byteBuffer.put(temp, 0, temp.length);
            long pts = computePresentationTime(mFrameIndex);
            //通知dsp编码 1.30音视频
            mMediaCodec.queueInputBuffer(inIndex, 0, temp.length, pts, 0);
            mFrameIndex++;
        }
        int outIndex = mMediaCodec.dequeueOutputBuffer(info, 100000);
        if (outIndex >= 0) {
            //H264数据在这里
            ByteBuffer byteBuffer = mMediaCodec.getOutputBuffer(outIndex);
            //输出 纯纯实验
            //            byte[] ba = new byte[byteBuffer.remaining()];
            //            byteBuffer.get(ba);
            //            FileUtils.writeBytes(ba);
            //            FileUtils.writeContent(ba);
            dealFrame(byteBuffer, info);

            mMediaCodec.releaseOutputBuffer(outIndex, false);

        }

//
    }

    private byte[] sps_pps_buf;
    public static final int NAL_I = 5;
    public static final int NAL_SPS = 7;

    private void dealFrame(ByteBuffer bb, MediaCodec.BufferInfo bufferInfo) {
        int type = bb.get(4) & 0x1F;
        if (type == NAL_SPS) {
//            缓存到全局变量中
            sps_pps_buf = new byte[bufferInfo.size];
            bb.get(sps_pps_buf);
//此时不发送 会议其他人
        } else if (type == NAL_I) {
//            I帧
            final byte[] bytes = new byte[bufferInfo.size];
//200833+67
            bb.get(bytes);
            byte[] newBuf = new byte[sps_pps_buf.length + bytes.length];
//            放到最前面
            System.arraycopy(sps_pps_buf, 0, newBuf, 0, sps_pps_buf.length);
//            I帧的数据放到后面
            System.arraycopy(bytes, 0, newBuf, sps_pps_buf.length, bytes.length);
            mSocketLive.sendData(newBuf);
            Log.v(TAG, "视频数据  " + Arrays.toString(bytes));
//            网络方式的fasongchuq
        } else {
            final byte[] bytes = new byte[bufferInfo.size];
            bb.get(bytes);
//            写到了这里 我们成功推送给这个会议室的其他人
            mSocketLive.sendData(bytes);
            Log.v(TAG, "视频数据  " + Arrays.toString(bytes));


        }


    }

    private long computePresentationTime(long frameIndex) {
        return frameIndex * 1000000 / 15;
    }

    private void initCodec(Size size) {

        try {
            mMediaCodec = MediaCodec.createEncoderByType("video/avc");

            final MediaFormat format = MediaFormat.createVideoFormat("video/avc", size.getHeight(), size.getWidth());
            format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible);
            format.setInteger(MediaFormat.KEY_FRAME_RATE, 15);
            format.setInteger(MediaFormat.KEY_BIT_RATE, 8000_000);
            format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 2);//2s一个I帧
            mMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
            mMediaCodec.start();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    public void startCapture(SocketLive socketLive) {
        isStart = true;
        this.mSocketLive = socketLive;
    }
}
