package gqz.avdemo.cam;

import android.Manifest;
import android.content.Context;
import android.content.pm.PackageManager;
import android.content.res.Configuration;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.graphics.YuvImage;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureFailure;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.TotalCaptureResult;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image;
import android.media.ImageReader;
import android.media.MediaRecorder;
import android.nfc.cardemulation.CardEmulation;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.util.Size;
import android.view.OrientationEventListener;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.TextureView;

import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.app.ActivityCompat;

import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Collections;
import java.util.concurrent.Semaphore;
import java.util.concurrent.atomic.AtomicInteger;

import gqz.avdemo.R;

import static android.hardware.camera2.CameraCharacteristics.SENSOR_ORIENTATION;
import static gqz.avdemo.cam.CamUtils.*;

/**
 * Description
 *
 * @author gqz
 * @date 2020/12/22
 */
public class CamAct extends AppCompatActivity implements SurfaceHolder.Callback {
   private final String TAG = "CamAct-->";

   AutoFitTextureView surfaceView;

   private Semaphore mCameraOpenCloseLock = new Semaphore(1);//信号量（类似计数器+可重入锁）
   CameraDevice cameraDevice;
   ImageReader reader;
   CaptureRequest.Builder previewBuild;
   private CameraCaptureSession previewSession;
   private HandlerThread handlerThread;
   Handler handler;
   private Integer mSensorOrientation;
   private Size mPreviewSize;
   private SurfaceTexture mPreviewSurfaceTexture;
   private Surface previewSuface;
   AtomicInteger atomicInt = new AtomicInteger(0);
   private ImageReader.OnImageAvailableListener imgReaderListener =
           new ImageReader.OnImageAvailableListener() {
              @Override
              public void onImageAvailable(ImageReader reader) {
                 //注意：一定要调用reader.acquireLatestImage()和close()方法，否则预览画面就会卡住.
                 Image image = reader.acquireLatestImage();

                 if (atomicInt.get() == 1) {
                    if (image != null) {
                       long s = System.currentTimeMillis();

                       Image.Plane[] planes = image.getPlanes();
                       Image.Plane yPlane = planes[0];
                       Image.Plane uPlane = planes[1];
                       Image.Plane vPlane = planes[2];
                       ByteBuffer yBuffer = yPlane.getBuffer();// Data from Y channel
                       ByteBuffer uBuffer = uPlane.getBuffer();// Data from U channel
                       ByteBuffer vBuffer = vPlane.getBuffer();// Data from V channel
                       byte[] ydata = new byte[yBuffer.remaining()];
                       byte[] udata = new byte[uBuffer.remaining()];
                       byte[] vdata = new byte[vBuffer.remaining()];
                       yBuffer.get(ydata);
                       uBuffer.get(udata);
                       vBuffer.get(vdata);

                       byte[] yuv = new byte[ydata.length * 3 / 2];

                       System.arraycopy(ydata, 0, yuv, 0, ydata.length);
                       //提取uv数据
                       for (int i = 0; i < ydata.length / 4; i++) {
                          yuv[ydata.length + i] = udata[i * 2];//u
//                       if (i < ydata.length / 4 - 1)
//                          yuv[ydata.length + ydata.length / 4 + i] = vdata[i * 2 + 1];//v
                          yuv[ydata.length + ydata.length / 4 + i] = vdata[i * 2];//v
                       }
                       Log.d(TAG, "onImageAvailable: timeDiff getYUVData = " +
                               (System.currentTimeMillis() - s));
                       s = System.currentTimeMillis();

                       udata = new byte[ydata.length / 4];
                       vdata = new byte[ydata.length / 4];
                       System.arraycopy(yuv, ydata.length, udata, 0, udata.length);
                       System.arraycopy(yuv, ydata.length + udata.length, vdata, 0, vdata.length);

                       yuv = CamUtils.rotateYuvData(ydata, udata, vdata, image.getWidth(),
                               image.getHeight());
                       Log.d(TAG, "onImageAvailable: timeDiff rotateYuvData()=" +
                               (System.currentTimeMillis() - s));
                       s = System.currentTimeMillis();

                       yuv = I420Tonv21(yuv, image.getHeight(), image.getWidth());
                       Log.d(TAG, "onImageAvailable: timeDiff convertYuvData()=" +
                               (System.currentTimeMillis() - s));
                       s = System.currentTimeMillis();

                       YuvImage yuvImage = new YuvImage(yuv, ImageFormat.NV21,
                               image.getHeight(), image.getWidth(), null);
                       saveJPEG(yuvImage);
                       Log.d(TAG, "onImageAvailable: timeDiff saveJPG()=" +
                               (System.currentTimeMillis() - s));
                       atomicInt.set(0);
                    }
                 }

                 if (image != null)
                    image.close();

              }
           };

   //设备自然方向，手机为竖直，平板为横平
   //局部坐标系，相对于设备自然方向，不会因为设备方向的变化而改变，y轴水平于屏幕指向上，x轴水平于屏幕指向右，z轴垂直于屏幕指向屏幕外
   //显示方向监听，显示方向指的是屏幕上显示画面与局部坐标系 y 轴的顺时针夹角。
   private OrientationEventListener orientationEventListener;
   private CameraCharacteristics characteristics;

   //***************************************************************************************************************************
   //***************************************************************************************************************************

   public void checkPermission() {
      if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && checkSelfPermission(
              Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) {
         requestPermissions(new String[]{Manifest.permission.READ_EXTERNAL_STORAGE,
                 Manifest.permission.WRITE_EXTERNAL_STORAGE, Manifest.permission.CAMERA
         }, 1);
      }
   }

   @Override
   protected void onCreate(@Nullable Bundle savedInstanceState) {
      super.onCreate(savedInstanceState);
      setContentView(R.layout.act_cam);
      checkPermission();
      findViewById(R.id.cam_btn).setOnClickListener(v -> {
         atomicInt.set(1);
      });
      surfaceView = findViewById(R.id.cam_surface);
   }

   @Override
   protected void onResume() {
      super.onResume();
      handlerThread = new HandlerThread("*Camera2");
      handlerThread.start();
      handler = new Handler(handlerThread.getLooper());
      if (surfaceView.isAvailable()) {
         openCam(surfaceView.getWidth(), surfaceView.getHeight());
      } else {
         surfaceView.setSurfaceTextureListener(new TextureView.SurfaceTextureListener() {
            @Override
            public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
               mPreviewSurfaceTexture = surface;
               openCam(width, height);
            }

            @Override
            public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
               configureTransform(CamAct.this, surfaceView, width, height, mPreviewSize);
            }

            @Override
            public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
               return false;
            }

            @Override
            public void onSurfaceTextureUpdated(SurfaceTexture surface) {

            }
         });
      }
   }

   @Override
   protected void onPause() {
      closeCamera();
      stopBackgroundThread();
      super.onPause();
   }

   @Override
   protected void onDestroy() {
      super.onDestroy();
      orientationEventListener.disable();
   }

   private void closeCamera() {
      try {
         mCameraOpenCloseLock.acquire();
         closePreviewSession();
         if (null != cameraDevice) {
            cameraDevice.close();
            cameraDevice = null;
         }
      } catch (InterruptedException e) {
         throw new RuntimeException("Interrupted while trying to lock camera closing.");
      } finally {
         mCameraOpenCloseLock.release();
      }
   }

   private void closePreviewSession() {
      if (previewSession != null) {
         try {
            previewSession.stopRepeating();//停止重复capture
         } catch (CameraAccessException e) {
            e.printStackTrace();
         }
         previewSession.close();
         previewSession = null;
      }
   }

   private void stopBackgroundThread() {
      handlerThread.quitSafely();
      try {
         handlerThread.join();
         handlerThread = null;
         handler = null;
      } catch (InterruptedException e) {
         e.printStackTrace();
      }
   }

   private void openCam(int w, int h) {

      //1.获取相机管理器
      CameraManager cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
      try {
         //2.获取相机id集合
         String[] ids = cameraManager.getCameraIdList();
         for (String id : ids) {
            Log.v(TAG, "openCam: cam.id = " + id);
            //3.遍历相机id，根据id获取相机特性
            characteristics = cameraManager.getCameraCharacteristics(id);
            Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);//获取相机朝向
            if (facing != null && facing == CameraCharacteristics.LENS_FACING_BACK) {//获取后置摄像头
               if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
                  Log.w(TAG, "openCam: no camera permission!");
                  return;
               }

               Boolean canFlash = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE);
               if (canFlash) {
                  Log.i(TAG, "openCam: 闪光灯可用");
               }
               int[] afModes =
                       characteristics.get(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES);
               Log.v(TAG, "openCam: af modes: " + Arrays.toString(afModes));
               for (int af_mode : afModes) {
                  switch (af_mode) {
                     case CameraCharacteristics.CONTROL_AF_MODE_OFF:
                        Log.i(TAG, "openCam: 自动聚焦-关闭");
                        break;
                     case CameraCharacteristics.CONTROL_AF_MODE_AUTO:
                        Log.i(TAG, "openCam: 自动聚焦-自动");
                        break;
                     case CameraCharacteristics.CONTROL_AF_MODE_MACRO:
                        Log.i(TAG, "openCam: 自动聚焦-MACRO");
                        break;
                     case CameraCharacteristics.CONTROL_AF_MODE_CONTINUOUS_VIDEO:
                        Log.i(TAG, "openCam: 自动聚焦-CONTINUOUS_VIDEO");
                        break;
                     case CameraCharacteristics.CONTROL_AF_MODE_CONTINUOUS_PICTURE:
                        Log.i(TAG, "openCam: 自动聚焦-CONTINUOUS_PICTURE");
                        break;
                     case CameraCharacteristics.CONTROL_AF_MODE_EDOF:
                        Log.i(TAG, "openCam: 自动聚焦-EDOF");
                        break;
                     default:
                  }
               }

               //选择最小16(height):9(width)的尺寸
               StreamConfigurationMap map = characteristics
                       .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
               mSensorOrientation = characteristics.get(SENSOR_ORIENTATION);//获取传感器旋转方向
               if (map == null) {
                  throw new RuntimeException("Cannot get available preview/video sizes");
               }

//               map.isOutputSupportedFor(SurfaceTexture.class);//判断类型是否支持输出
               mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class),
                       w, h, new Size(1920, 1080));
//               mPreviewSize = chooseOptimalSize2(map.getOutputSizes(SurfaceTexture.class),
//                       w, h, new Size(1920, 1080));

               //创建ImageReader用户获取照片
               int imageFormat = ImageFormat.YUV_420_888;
               if (map.isOutputSupportedFor(imageFormat)) {
                  Log.i(TAG, "openCam: YUV_420_888 is supported");
                  Size largest = Collections.max(Arrays.asList(map.getOutputSizes(imageFormat)),
                          new CompareSizesByArea());
                  reader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(),
                          imageFormat, 1);

//                  reader = ImageReader.newInstance(mPreviewSize.getWidth(), mPreviewSize
//                          .getHeight(), ImageFormat.JPEG, 2);

                  reader.setOnImageAvailableListener(imgReaderListener, handler);
               } else {
                  Log.e(TAG, "openCam: not support yuv_420_888");
               }

               int orientation = getResources().getConfiguration().orientation;
               if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
                  surfaceView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
               } else {
                  surfaceView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
               }
               mPreviewSurfaceTexture.setDefaultBufferSize(mPreviewSize.getWidth(),
                       mPreviewSize.getHeight());
               previewSuface = new Surface(mPreviewSurfaceTexture);

               //设置预览view的纵横比
               configureTransform(this, surfaceView, w, h, mPreviewSize);
               //4.开启相机，传入相机id和相机状态监听
               cameraManager.openCamera(id, new CameraDevice.StateCallback() {
                  @Override
                  public void onOpened(@NonNull CameraDevice camera) {
                     cameraDevice = camera;
                     //5.创建previewsession开始预览，一个cameraDevice只能开启一个session
                     createPreviewSession();
                     mCameraOpenCloseLock.release();
                     if (null != surfaceView) {
                        configureTransform(CamAct.this, surfaceView,
                                surfaceView.getWidth(), surfaceView.getHeight(), mPreviewSize);
                     }
                  }

                  @Override
                  public void onDisconnected(@NonNull CameraDevice camera) {
                     Log.w(TAG, "onDisconnected: camera disconnected");
                     mCameraOpenCloseLock.release();
                  }

                  @Override
                  public void onError(@NonNull CameraDevice camera, int error) {
                     Log.e(TAG, "onError: open camera get errors, close camera device");
                     mCameraOpenCloseLock.release();
                     cameraDevice = camera;
                     closeCamera();
                  }
               }, handler);
               break;
            }
         }
      } catch (CameraAccessException e) {
         e.printStackTrace();
      }
   }

   private void createPreviewSession() {
      try {

         //参数1：outputs，传入要接收图像数据的surface，参数2：状态监听
         cameraDevice.createCaptureSession(Arrays.asList(previewSuface, reader.getSurface()),

                 new CameraCaptureSession.StateCallback() {
                    @Override
                    public void onConfigured(@NonNull CameraCaptureSession session) {
                       if (cameraDevice != null) {
                          previewSession = session;

                          /**
                           6.captureRequest是提交capture请求的参数载体，camera2中任何对相机的操作都抽象成capture了，
                           通过向session提交captureRequest来操作相机，如开关闪光灯，聚焦AF，曝光AE，白平衡AWB等;
                           也就是说，完成一次夜间拍照需要传递至少3个captureRequest，一个开闪光灯，一个聚焦，一个拍照

                           Capture 从执行方式上又被细分为【单次模式】、【多次模式】和【重复模式】三种

                           单次模式（One-shot）：指的是只执行一次的 Capture 操作，例如设置闪光灯模式、对焦模式和拍一张照片等。多个一次性模式的
                           Capture 会进入队列按顺序执行。

                           多次模式（Burst）：指的是连续多次执行指定的 Capture 操作，该模式和多次执行单次模式的最大区别是连续多次 Capture
                           期间不允许插入其他任何 Capture 操作，例如连续拍摄 100 张照片，在拍摄这 100 张照片期间任何新的 Capture
                           请求都会排队等待，直到拍完 100 张照片。多组多次模式的 Capture 会进入队列按顺序执行。

                           重复模式（Repeating）：指的是不断重复执行指定的 Capture 操作，当有其他模式的 Capture
                           提交时会暂停该模式，转而执行其他被模式的 Capture，当其他模式的 Capture 执行完毕后又会自动恢复继续执行该模式的
                           Capture，例如显示预览画面就是不断 Capture 获取每一帧画面。该模式的 Capture 是全局唯一的，也就是新提交的重复模式
                           Capture 会覆盖旧的重复模式 Capture。

                           此处要实现的预览是一个不断重复执行的capture，有其他操作会中断执行其他操作，当其他操作执行完毕会恢复继续执行
                           该模式的 Capture 是全局唯一的，也就是新提交的重复模式 Capture 会覆盖旧的重复模式 Capture。
                           */
                          try {
                             /**
                              通过指定模板创建CaptureRequest

                              TEMPLATE_PREVIEW：适用于配置预览的模板。
                              TEMPLATE_RECORD：适用于视频录制的模板。
                              TEMPLATE_STILL_CAPTURE：适用于拍照的模板。
                              TEMPLATE_VIDEO_SNAPSHOT：适用于在录制视频过程中支持拍照的模板。
                              TEMPLATE_MANUAL：适用于希望自己手动配置大部分参数的模板。
                              */
                             previewBuild =
                                     cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
//                             if (android.os.Build.VERSION.SDK_INT >= android.os.Build
//                             .VERSION_CODES.Q) {
//                                CaptureRequest.Key<Integer> key = new CaptureRequest
//                                .Key<Integer>();
//                             }
//                             previewBuild.set(SENSOR_ORIENTATION,
//                                     getDisplayRotation(characteristics, CamAct.this));
                          } catch (CameraAccessException e) {
                             e.printStackTrace();
                          }

                          if (previewBuild == null) return;

                          /**
                           一个 CaptureRequest 除了需要配置很多参数之外，还要求至少配置一个
                           Surface（任何相机操作的本质都是为了捕获图像），并且配置的 Surface 必须属于创建 Session 时添加的那些
                           Surface，涉及的方法是 CaptureRequest.Builder.addTarget()，可以多次调用该方法添加多个Surface。
                           */
                          // SurfaceTexture texture = surfaceView.getSurfaceTexture();
                          // assert texture != null;
                          // texture.setDefaultBufferSize(surfaceView.getHeight(), surfaceView
                          // .getWidth());
                          // Surface surface = new Surface(texture);

                          previewBuild.addTarget(previewSuface);//传surface也行，预览画面的surface
                          previewBuild.addTarget(reader.getSurface());//接收画面数据的surface
//                          previewBuild.set(CaptureRequest.CONTROL_AF_MODE,
//                                  CaptureRequest.CONTROL_AF_MODE_AUTO);
                          previewBuild.set(CaptureRequest.CONTROL_MODE,
                                  CameraMetadata.CONTROL_MODE_AUTO);
//                           previewBuild.set(CaptureRequest.FLASH_MODE,CaptureRequest
//                           .CONTROL_AE_MODE_ON_AUTO_FLASH);
                          try {
                             //7.开启预览
                             previewSession.setRepeatingRequest(previewBuild.build(),
                                     new CameraCaptureSession.CaptureCallback() {
                                        @Override
                                        public void onCaptureStarted(@NonNull CameraCaptureSession session,
                                                                     @NonNull CaptureRequest request,
                                                                     long timestamp,
                                                                     long frameNumber) {
                                           super.onCaptureStarted(session, request, timestamp,
                                                   frameNumber);
                                           //一次capture开始
                                        }

                                        @Override
                                        public void onCaptureProgressed(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull CaptureResult partialResult) {
                                           super.onCaptureProgressed(session, request,
                                                   partialResult);
                                        }

                                        @Override
                                        public void onCaptureCompleted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) {
                                           super.onCaptureCompleted(session, request, result);
                                           //一次capture结束
                                        }

                                        @Override
                                        public void onCaptureFailed(@NonNull CameraCaptureSession session
                                                , @NonNull CaptureRequest request,
                                                                    @NonNull CaptureFailure failure) {
                                           super.onCaptureFailed(session, request, failure);
                                           Log.e(TAG, "onCaptureFailed: " + failure.toString());
                                        }
                                     }, handler);
                          } catch (CameraAccessException e) {
                             e.printStackTrace();
                          }
                       }
                    }

                    @Override
                    public void onConfigureFailed(@NonNull CameraCaptureSession session) {
                       Log.e(TAG, "onConfigureFailed: CameraCaptureSession configure failed");
                       previewSession = session;
                       closePreviewSession();
                    }
                 }, handler
         );
      } catch (CameraAccessException e) {
         e.printStackTrace();
      }
   }

   @Override
   public void surfaceCreated(SurfaceHolder holder) {
   }

   @Override
   public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
   }

   @Override
   public void surfaceDestroyed(SurfaceHolder holder) {
   }
}