package com.example.cameraxpush;

import android.content.Context;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.util.Log;
import android.util.Size;
import android.view.TextureView;


import androidx.annotation.NonNull;
import androidx.camera.core.CameraSelector;
import androidx.camera.core.CameraX;
import androidx.camera.core.ImageAnalysis;
import androidx.camera.core.ImageProxy;
import androidx.camera.core.Preview;
import androidx.camera.lifecycle.ProcessCameraProvider;
import androidx.camera.view.PreviewView;
import androidx.core.content.ContextCompat;
import androidx.lifecycle.LifecycleOwner;

import com.google.common.util.concurrent.ListenableFuture;

import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.ReentrantLock;


public class CameraXHelper {

    private static final String TAG = "CameraXHelper";
    private static ThreadPoolExecutor threadPoolExecutor = new ThreadPoolExecutor(2, 10, 10L,
            TimeUnit.SECONDS, new LinkedBlockingQueue(100));

    private PreviewView previewView;
    int width = 1280 ;
    int height = 720;

    private byte[] y;
    private byte[] u;
    private byte[] v;

    private byte[] nv21;
    byte[] nv21_rotated;
    byte[] nv12;

    MediaCodec mediaCodec;

    String savePath;// 保存文件路径

    ListenableFuture<ProcessCameraProvider> cameraProviderFuture;
    LifecycleOwner lifecycleOwner;
    public CameraXHelper(LifecycleOwner lifecycleOwner,PreviewView previewView, int width, int height) {
        this.previewView = previewView;
        this.width = width;
        this.height = height;
        this.lifecycleOwner = lifecycleOwner;
        cameraProviderFuture = ProcessCameraProvider.getInstance((Context) lifecycleOwner);
        cameraProviderFuture.addListener(() -> {
            try {
                ProcessCameraProvider cameraProvider = cameraProviderFuture.get();
                bind(cameraProvider);
            } catch (ExecutionException | InterruptedException e) {
                // No errors need to be handled for this Future.
                // This should never be reached.
            }
        }, ContextCompat.getMainExecutor((Context) lifecycleOwner));


    }

    public void setSavePath(String savePath) {
        this.savePath = savePath;
    }

    private void bind(ProcessCameraProvider cameraProvider) {
        ImageAnalysis imageAnalysis =
                new ImageAnalysis.Builder()
                        // enable the following line if RGBA output is needed.
                        //.setOutputImageFormat(ImageAnalysis.OUTPUT_IMAGE_FORMAT_RGBA_8888)
                        .setTargetResolution(new Size(width, height))
                        .setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
                        .build();

        imageAnalysis.setAnalyzer(threadPoolExecutor, new ImageAnalysis.Analyzer() {
            @Override
            public void analyze(@NonNull ImageProxy imageProxy) {

                myAnalyze(imageProxy);
                imageProxy.close();
            }
        });
        Preview preview = new Preview.Builder()
                .setTargetResolution(new Size(width,height))
                .build();
        preview.setSurfaceProvider(previewView.getSurfaceProvider());

        CameraSelector cameraSelector = new CameraSelector.Builder()
                .requireLensFacing(CameraSelector.LENS_FACING_BACK)
                .build();
        cameraProvider.bindToLifecycle(lifecycleOwner,cameraSelector, imageAnalysis, preview);
    }

    private ReentrantLock lock =  new ReentrantLock();
    private void myAnalyze(ImageProxy imageProxy) {
        lock.lock();
        ImageProxy.PlaneProxy[] planes = imageProxy.getPlanes();

        if (y == null) {
            y = new byte[planes[0].getBuffer().limit() - planes[0].getBuffer().position()];
            u = new byte[planes[1].getBuffer().limit() - planes[1].getBuffer().position()];
            v = new byte[planes[2].getBuffer().limit() - planes[2].getBuffer().position()];
        }
        if (imageProxy.getPlanes()[0].getBuffer().remaining() == y.length) {
            planes[0].getBuffer().get(y);
            planes[1].getBuffer().get(u);
            planes[2].getBuffer().get(v);

        Size size = new Size(imageProxy.getWidth(),imageProxy.getHeight());
        int w = size.getWidth();
        int h = size.getHeight();
        if (nv12 == null) {
            nv12 = new byte[w * h *3/2]; // y +u+v 长度
        }
        // 吧yuv 组装成 nv12   传递给 编码器 编码 yyyy  yyyy  yyyy  yyyy  uvuv  uvuv

//        // 拷贝y
        System.arraycopy(y,0,nv12,0,y.length);
        //y+ 1/4 +1/4 = y*6/4   剩余1/2 空间

        // 一共长度 是 分别 u v 取一半 放进去 因为是 uv 间隔
        int length =  y.length +u.length/2 + v.length/2;

        Log.e(TAG,"length  -y:"+ y.length);
        Log.e(TAG,"length  -u"+ u.length);
        Log.e(TAG,"length  -v"+ v.length);
        Log.e(TAG,"总大小 "+ w*h*3/2);
        int uvIndex= 0;
        // 把后面得 uv 都写进去
        for (int i = y.length; i< h*w*3/2;i+= 2)  {
            nv12[i] = u[uvIndex];
            nv12[i+1] = v[uvIndex];
            uvIndex ++;
        }
        if (mediaCodec == null) {
            initCodec(size);
        }

        int inIndex = mediaCodec.dequeueInputBuffer(10000);
        if (inIndex >= 0) {
            ByteBuffer inputBuffer = mediaCodec.getInputBuffer(inIndex);
            inputBuffer.clear();
            inputBuffer.put(nv12, 0, nv12.length);
            mediaCodec.queueInputBuffer(inIndex,0,nv12.length,0,0);
        }
        MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();

        int outIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 10000);
        if (outIndex >=0) {
            ByteBuffer outputBuffer = mediaCodec.getOutputBuffer(outIndex);
            byte[] bytes = new byte[outputBuffer.remaining()];
            outputBuffer.get(bytes);
            writeFile(bytes);
            FileUtils.writeContent(bytes);
            mediaCodec.releaseOutputBuffer(outIndex,false);
        }
        }
        lock.unlock();
    }

    private void initCodec(Size size) {

        try {
            mediaCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC);

            MediaFormat videoFormat = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, size.getWidth(), size.getHeight());
            videoFormat.setInteger(MediaFormat.KEY_FRAME_RATE,15);
            videoFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL,2);
            videoFormat.setInteger(MediaFormat.KEY_BIT_RATE,width*height);
            videoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);
            mediaCodec.configure(videoFormat,null,null,MediaCodec.CONFIGURE_FLAG_ENCODE);
            mediaCodec.start();

        } catch (IOException e) {
            e.printStackTrace();
        }
    }
    void writeFile(byte[] buff) {
//      String path =  Environment.getExternalStorageDirectory().getAbsolutePath() + "/ScreenPush";
        File file = new File(savePath);

        if (!file.exists()) {
            boolean mkdir = file.mkdirs();
            Log.e(TAG,"path 创建成分股"+mkdir);
        }else  {
            Log.e(TAG,"path 存在"+savePath);
        }

        try {
            FileOutputStream fileOutputStream = new FileOutputStream(savePath+"//screen.h264",true);
            try {
                fileOutputStream.write(buff);
            } catch (IOException e) {
                e.printStackTrace();
            }
        } catch (FileNotFoundException e) {
            e.printStackTrace();
        }
    }
}
