package com.example.myx264test;

import android.Manifest;
import android.app.Activity;
import android.content.Context;
import android.content.pm.PackageManager;
import android.graphics.ImageFormat;
import android.graphics.Matrix;
import android.graphics.Point;
import android.graphics.RectF;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image;
import android.media.ImageReader;
import android.os.Environment;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.util.Range;
import android.util.Size;
import android.view.Surface;

import androidx.annotation.NonNull;
import androidx.core.app.ActivityCompat;

import java.io.FileWriter;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.concurrent.locks.ReentrantLock;

public class CameraUtil {
    private final String TAG = "CameraUtil";
    private CameraManager mCameraManager;
    private ArrayList<String> mFront_facing_Ids;
    private ArrayList<String> mBack_facing_Ids;
    private HashMap<String, ArrayList<Size>> mId_Sizes_Map;
    private HashMap<String, List<int[]>> mId_FpsRange_Map;

    public enum CameraFaceing {
        LENS_FACING_FRONT, LENS_FACING_BACK
    }

    ;
    private CameraFaceing mCameraFacing = CameraFaceing.LENS_FACING_BACK;
    private int[] mFpsRange;
    private Size mSize;
    private double mTargetRatio = 9.0 / 16.0; // 画面比例写死成 16 ： 9，不考虑其他手机的画
    private Size mPhoneSize;
    private ImageReader mImageReader;
    private HandlerThread mBackgroundThread;
    private Handler mBackgroundHandler;
    private CameraDevice mCameraDevice;
    private Context mContext;
    private AutoFitTextureView mTextureView;
    private SurfaceTexture mSurfaceTexture;
    private CaptureRequest.Builder mPreviewRequestBuilder;
    private HandlerThread mHandlerThread;
    private Handler mCameraHandler;

    public CameraUtil(CameraManager cameraManager, Size phoneSize, Context context, AutoFitTextureView autoFitTextureView) {
        mCameraManager = cameraManager;
        mPhoneSize = phoneSize;
        mContext = context;
        mTextureView = autoFitTextureView;
        init();
    }

    public void init() {
        mFront_facing_Ids = new ArrayList<>();
        mBack_facing_Ids = new ArrayList<>();
        mId_Sizes_Map = new HashMap<>();
        mId_FpsRange_Map = new HashMap<>();
        mFpsRange = new int[2];
        try {
            String[] ids = mCameraManager.getCameraIdList();
            for (String id : ids) {
                CameraCharacteristics characteristics = mCameraManager.getCameraCharacteristics(id);
                // LENS_FACING_FRONT, LENS_FACING_BACK, LENS_FACING_EXTERNAL
                int facing = characteristics.get(CameraCharacteristics.LENS_FACING);
                if (facing == CameraCharacteristics.LENS_FACING_FRONT) {
                    mFront_facing_Ids.add(id);
                } else if (facing == CameraCharacteristics.LENS_FACING_BACK) {
                    mBack_facing_Ids.add(id);
                } else {
                    // LENS_FACING_EXTERNAL 不做处理
                }
                // 获取这颗Camera支持的Size
                StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
                ArrayList<Size> sizeList = new ArrayList<Size>(Arrays.asList(map.getOutputSizes(SurfaceTexture.class)));
                // sizeList 中的 宽和高都是反的，我们要正过来
                ArrayList<Size> sizeList2 = new ArrayList<Size>();
                for (Size size : sizeList) {
                    Log.e(TAG, "init: " + size.getWidth() + " " + size.getHeight());
                    int width = size.getWidth();
                    int height = size.getHeight();
                    sizeList2.add(new Size(height, width));
                }
                mId_Sizes_Map.put(id, sizeList2);
                Range<Integer>[] aeFpsRanges = characteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
                List<int[]> fpsRanges = new ArrayList<>();
                for (Range<Integer> r : aeFpsRanges) {
                    fpsRanges.add(new int[]{r.getLower(), r.getUpper()});
                }
                mId_FpsRange_Map.put(id, fpsRanges);
            }
        } catch (CameraAccessException e) {
            e.printStackTrace();
        }
        initFps();
        initSize();
        // 864, 1536   1080, height = 1920, ratio = 0.5625
        Log.e(TAG, "init: mSize width = "+mSize.getWidth()+", height = "+mSize.getHeight() );
        Log.e(TAG, "init: "+mPhoneSize.getWidth()+", "+mPhoneSize.getHeight() );//0.4615384615384615
//        width = 3968 height = 2976;    width = 1080, height = 1440,
//        mSize = new Size(2976, 3968);
//        mSize = new Size(960, 1280);
//        mSize = new Size(720, 960);
//        mSize = new Size(720, 960);


    }

    public void openCamera(CameraFaceing facing) {
        Log.e(TAG, "openCamera: facing = "+facing+", mSize.getHeight() = "+mSize.getHeight()+", mSize.getWidth() = "+mSize.getWidth());
        mImageReader = ImageReader.newInstance(mSize.getHeight(), mSize.getWidth(), ImageFormat.YUV_420_888, 2);
//        mImageReader = ImageReader.newInstance(mSize.getHeight(), mSize.getWidth(), ImageFormat.NV21, 2);
//        mImageReader = ImageReader.newInstance(mSize.getHeight(), mSize.getWidth(), ImageFormat.PRIVATE, 2);
        mHandlerThread = new HandlerThread("camera2222222");
        mHandlerThread.start();
        mCameraHandler = new Handler(mHandlerThread.getLooper());
        mImageReader.setOnImageAvailableListener(new MyImageAvailableListener(), mCameraHandler);
        if (mCameraFacing == CameraFaceing.LENS_FACING_BACK) {
            if (ActivityCompat.checkSelfPermission(mContext, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
                Log.e(TAG, "openCamera: 权限不足！！！" );
//                mCameraHandler.removeCallbacks(mHandlerThread);
                // 调用上面和下面的方法均可
                mCameraHandler.removeCallbacksAndMessages(null);
                mHandlerThread.quit();
                mImageReader.close();
                mImageReader = null;
                mHandlerThread = null;
                mCameraHandler = null;
                return;
            }
            try {
                mCameraManager.openCamera(mBack_facing_Ids.get(0), mDeviceStateCallback, mBackgroundHandler);
            } catch (CameraAccessException e) {
                e.printStackTrace();
            }
        }
        else if (mCameraFacing == CameraFaceing.LENS_FACING_FRONT) {
            if (ActivityCompat.checkSelfPermission(mContext, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
                Log.e(TAG, "openCamera: 权限不足！！！" );
//                mCameraHandler.removeCallbacks(mHandlerThread);
                // 调用上面和下面的方法均可
                mCameraHandler.removeCallbacksAndMessages(null);
                mHandlerThread.quit();
                mImageReader.close();
                mImageReader = null;
                mHandlerThread = null;
                mCameraHandler = null;
                return;
            }
            try {
                mCameraManager.openCamera(mFront_facing_Ids.get(0), mDeviceStateCallback, mBackgroundHandler);
            } catch (CameraAccessException e) {
                e.printStackTrace();
            }
        }
        else {

        }
    }

    private void initFps(){
        if (mCameraFacing == CameraFaceing.LENS_FACING_BACK){
            String back_id = mBack_facing_Ids.get(0);
            List<int[]> back_fps = mId_FpsRange_Map.get(back_id);
            Collections.sort(back_fps, new RangeSorter());
            int[] min_fps_range = back_fps.get(0);
            mFpsRange = min_fps_range;
        } else if (mCameraFacing == CameraFaceing.LENS_FACING_FRONT){
            String front_id = mFront_facing_Ids.get(0);
            List<int[]> front_fps = mId_FpsRange_Map.get(front_id);
            Collections.sort(front_fps, new RangeSorter());
            // 取最小的fps range
            int[] min_fps_range = front_fps.get(0);
            mFpsRange = min_fps_range;
        } else {

        }
        Log.e(TAG, "initFps: "+mFpsRange[0]+ ", "+mFpsRange[1]);
    }
    static class RangeSorter implements Comparator<int[]>, Serializable {
        private static final long serialVersionUID = 5802214721073728212L;
        @Override
        public int compare(int[] o1, int[] o2) {
            if (o1[0] == o2[0]) return o1[1] - o2[1];
            return o1[0] - o2[0];
        }
    }
    private void initSize(){
        // 在 mCameraFacing 的id对应的size中选一个比例最接近 9 ：16 的
        if (mCameraFacing == CameraFaceing.LENS_FACING_BACK){
            String back_id = mBack_facing_Ids.get(0);
            ArrayList<Size> sizes = mId_Sizes_Map.get(back_id);
            ArrayList<Size> sizes_less_phone = new ArrayList<Size>();
            // 先剔除宽高大于手机宽高的
            for (Size size : sizes){
                if (size.getWidth() < mPhoneSize.getWidth() && size.getHeight() < mPhoneSize.getHeight()){
                    sizes_less_phone.add(size);
                }
            }

            double minDiff = Double.MAX_VALUE;
            Size optimalSize = null;
            for (Size size : sizes_less_phone) {
                double ratio = (double)size.getWidth() / (double)size.getHeight();
                Log.e(TAG, "initSize: size = "+size.getWidth()+", "+size.getHeight() + ", ratio = "+ratio+", diff = "+(Math.abs(ratio - mTargetRatio)));
                if( Math.abs(ratio - mTargetRatio) < minDiff ) {
                    optimalSize = size;
                    minDiff = Math.abs(ratio - mTargetRatio);
                }
            }
            mSize = optimalSize;
        }
        else if (mCameraFacing == CameraFaceing.LENS_FACING_FRONT) {
            String front_id = mFront_facing_Ids.get(0);
            ArrayList<Size> sizes = mId_Sizes_Map.get(front_id);
            ArrayList<Size> sizes_less_phone = new ArrayList<Size>();
            // 先剔除宽高大于手机宽高的
            for (Size size : sizes){
                if (size.getWidth() < mPhoneSize.getWidth() && size.getHeight() < mPhoneSize.getHeight()){
                    sizes_less_phone.add(size);
                }
            }

            double minDiff = Double.MAX_VALUE;
            Size optimalSize = null;
            for (Size size : sizes_less_phone) {
                double ratio = (double)size.getWidth() / (double)size.getHeight();
                Log.e(TAG, "initSize: size = "+size.getWidth()+", "+size.getHeight() + ", ratio = "+ratio+", diff = "+(Math.abs(ratio - mTargetRatio)));
                if( Math.abs(ratio - mTargetRatio) < minDiff ) {
                    optimalSize = size;
                    minDiff = Math.abs(ratio - mTargetRatio);
                }
            }
            mSize = optimalSize;
        }
        else {

        }
        mSize = new Size(760, 960);
        Log.e(TAG, "initSize: Size = "+mSize.getWidth()+", "+mSize.getHeight());
    }
    public int mSurfaceTextureWidth = 0;
    public int mSurfaceTextureHeight = 0;

    public void configureTransform() {
        Log.e(TAG, "configureTransform: mSurfaceTextureWidth = "+mSurfaceTextureWidth+", mSurfaceTextureHeight = "+mSurfaceTextureHeight );
        Log.e(TAG, "configureTransform: mSize.getWidth() = "+mSize.getWidth()+", mSize.getHeight() = "+mSize.getHeight() );

        Matrix matrix = new Matrix();
//        RectF viewRect = new RectF(0, 0, mSurfaceTextureHeight, mSurfaceTextureWidth);
        RectF viewRect = new RectF(0, 0, mSurfaceTextureWidth, mSurfaceTextureHeight);
        RectF bufferRect = new RectF(0, 0, mSize.getWidth(), mSize.getHeight());
        float centerX = viewRect.centerX();
        float centerY = viewRect.centerY();
        bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
        matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
        float scale = Math.max(
                (float) mSurfaceTextureHeight / (float) mSize.getHeight(),
                (float) mSurfaceTextureWidth / (float) mSize.getWidth());
        Log.e(TAG, "configureTransform: scale = "+scale );
        matrix.postScale(scale, scale, centerX, centerY);
//        matrix.postRotate(90 * (Surface.ROTATION_270 - 2), centerX, centerY);
        mTextureView.setTransform(matrix);
    }
    public void setTransform() {
        // 预览 View 的大小，比如 SurfaceView
        int viewHeight = mSurfaceTextureHeight;
        int viewWidth = mSurfaceTextureWidth;
        // 相机选择的预览尺寸
        int cameraHeight = mSize.getHeight();
        int cameraWidth = mSize.getWidth();
        // 计算出将相机的尺寸 => View 的尺寸需要的缩放倍数
        float ratioPreview = (float) cameraWidth / cameraHeight;
        float ratioView = (float) viewWidth / viewHeight;
        float scaleX, scaleY;
        if (ratioView < ratioPreview) {
            scaleX = ratioPreview / ratioView;
            scaleY = 1;
        } else {
            scaleX = 1;
            scaleY = ratioView / ratioPreview;
        }
        // 计算出 View 的偏移量
        float scaledWidth = viewWidth * scaleX;
        float scaledHeight = viewHeight * scaleY;
        float dx = (viewWidth - scaledWidth) / 2;
        float dy = (viewHeight - scaledHeight) / 2;

        Matrix matrix = new Matrix();
        matrix.postScale(scaleX, scaleY);
        matrix.postTranslate(dx, dy);
        mTextureView.setTransform(matrix);
    }
    private void setCameraFacing(CameraFaceing facing){
        if(facing != mCameraFacing){
            mCameraFacing = facing;
            openCamera(mCameraFacing);
        }
    }
    public Size getSize(){
        return mSize;
    }
    private Surface mTextureSurface = null;
    private boolean mIsCameraOpened = false;
    public boolean isCameraOpened(){
        return mIsCameraOpened;
    }
    private void createCameraPreviewSession() {
        mSurfaceTexture = mTextureView.getSurfaceTexture();
        mTextureSurface = new Surface(mSurfaceTexture);
        try {
            mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
            mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,
                    CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
            Range<Integer> ae_fps_range = new Range<>(mFpsRange[0], mFpsRange[1]);
            mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, ae_fps_range);
            mPreviewRequestBuilder.addTarget(mTextureSurface);
            mPreviewRequestBuilder.addTarget(mImageReader.getSurface());

            mBackgroundThread = new HandlerThread("CameraBackground");
            mBackgroundThread.start();
            mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
//            mCameraDevice.createCaptureSession(Arrays.asList(surface), mCaptureStateCallback, mBackgroundHandler);

            mCameraDevice.createCaptureSession(Arrays.asList(mTextureSurface, mImageReader.getSurface()), mCaptureStateCallback, mBackgroundHandler);
        } catch (CameraAccessException e) {
            e.printStackTrace();
        }
    }
    private CameraCaptureSession mCaptureSession;
    private CameraCaptureSession.StateCallback mCaptureStateCallback = new CameraCaptureSession.StateCallback() {
        @Override
        public void onConfigured(@NonNull CameraCaptureSession session) {
            if (null == mCameraDevice) {
                return;
            }
            Log.e(TAG, "onConfigured: setRepeatingRequest");
            mCaptureSession = session;
            try {
                mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(),
                        new CameraCaptureSession.CaptureCallback() {
                        }, mBackgroundHandler);
            } catch (CameraAccessException e) {
                e.printStackTrace();
            }
        }

        @Override
        public void onConfigureFailed(@NonNull CameraCaptureSession session) {

        }
    };
    boolean flag = true;
    private class MyImageAvailableListener implements ImageReader.OnImageAvailableListener{
        private byte[] y;
        private byte[] u;
        private byte[] v;
        private ReentrantLock lock = new ReentrantLock();
        @Override
        public void onImageAvailable(ImageReader reader) {
            Log.e(TAG, "onImageAvailable: 2222222222" );

//            Image image = reader.acquireNextImage();
            Image image = reader.acquireNextImage();
//             Y:U:V == 4:2:2
            if (image.getFormat() == ImageFormat.YUV_420_888) {
                Log.e(TAG, "onImageAvailable: 3333333" );

                Image.Plane[] planes = image.getPlanes();
                // 加锁确保y、u、v来源于同一个Image
                lock.lock();
                // 重复使用同一批byte数组，减少gc频率
                if (y == null) {
                    Log.e(TAG, "onImageAvailable: 4444444444" );

                    y = new byte[planes[0].getBuffer().limit() - planes[0].getBuffer().position()];
                    u = new byte[planes[1].getBuffer().limit() - planes[1].getBuffer().position()];
                    v = new byte[planes[2].getBuffer().limit() - planes[2].getBuffer().position()];
                    native_setVideoEncInfo(image.getHeight(), image.getWidth(), mFpsRange[0], (image.getHeight() * image.getWidth()));
                    Log.e(TAG, "onImageAvailable: 4444444444  end" );

                }
                if (image.getPlanes()[0].getBuffer().remaining() == y.length) {
                    Log.e(TAG, "onImageAvailable: 5555555" );
                    Log.e(TAG, "onImageAvailable: y.length = "+y.length+", u.length = "+u.length+", v.length = "+v.length );
                    planes[0].getBuffer().get(y);
                    planes[1].getBuffer().get(u);
                    planes[2].getBuffer().get(v);
                    Log.e(TAG, "onImageAvailable: image.getHeight() = "+image.getHeight()+", image.getWidth() = "+image.getWidth() );
                    if (flag){
                        writeContent(u, true);
                        writeContent(v, false);
                        flag = false;
                    }
                    //一定要传实际的y.length, u.length, v.length，不要想当然的只传image.getHeight()和image.getWidth()，因为很可能和实际的对不上
                    native_sendYUVData(y, u, v, y.length, u.length, v.length);
//                    currentTime = System.currentTimeMillis();
//                    Log.e(TAG, "onImageAvailable: sendData");
//                    sendData(y, u, v, y.length, u.length, v.length, currentTime);
//                    Log.e(TAG, "onImageAvailable: sendData end");
                }
                lock.unlock();
            }
            image.close();
        }
    }

    private void postData(byte[] data) {
        Log.e(TAG, "postData: data : "+data );
    }
    private native void native_setVideoEncInfo(int width, int height, int fps, int bitrate);
    private native void native_sendYUVData(byte[] y, byte[] u, byte[] v, int ylength, int ulength, int vlength);
    private CameraDevice.StateCallback mDeviceStateCallback = new CameraDevice.StateCallback() {
        @Override
        public void onOpened(@NonNull CameraDevice cameraDevice) {
            Log.e(TAG, "onOpened: " );
            mIsCameraOpened = true;
            mCameraDevice = cameraDevice;
            createCameraPreviewSession();
        }

        @Override
        public void onDisconnected(@NonNull CameraDevice cameraDevice) {
            mIsCameraOpened = false;
        }

        @Override
        public void onError(@NonNull CameraDevice cameraDevice, int error) {
            mIsCameraOpened = false;
        }
    };
    public void close(){
        mIsCameraOpened = false;

        mCameraDevice.close();
        mBackgroundHandler.removeCallbacksAndMessages(null);
        mBackgroundThread.quit();

        mCameraHandler.removeCallbacksAndMessages(null);
        mHandlerThread.quit();

        mImageReader.close();
        mImageReader = null;

        mHandlerThread = null;
        mCameraHandler = null;
        mSurfaceTexture = null;
        mTextureView = null;
        mCameraManager = null;
        mBackgroundThread = null;
        mBackgroundHandler = null;
    }
    public  static String writeContent(byte[] array, boolean isU) {
        char[] HEX_CHAR_TABLE = {
                '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F'
        };
        StringBuilder sb = new StringBuilder();
//        for (int i = 0; i<100; i++){
//            byte b = array[i];
//            sb.append(HEX_CHAR_TABLE[(b & 0xf0) >> 4]);
//            sb.append(HEX_CHAR_TABLE[b & 0x0f]);
//        }
        for (byte b : array) {
            sb.append(HEX_CHAR_TABLE[(b & 0xf0) >> 4]);
            sb.append(HEX_CHAR_TABLE[b & 0x0f]);
        }
        Log.i("TAG", "writeContent: " + sb.toString());
        FileWriter writer = null;
        try {
            // 打开一个写文件器，构造函数中的第二个参数true表示以追加形式写文件
//            writer = new FileWriter(Environment.getExternalStorageDirectory() + "/codecH264.txt", true);

            if (isU){
                writer = new FileWriter("/data/data/com.example.myx264test/files/u.txt", true);
                writer.write(sb.toString());
                writer.write("\n");
            } else {
                writer = new FileWriter("/data/data/com.example.myx264test/files/v.txt", true);
                writer.write(sb.toString());
                writer.write("\n");
            }
        } catch (IOException e) {
            e.printStackTrace();
        } finally {
            try {
                if (writer != null) {
                    writer.close();
                }
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        return sb.toString();
    }
}
