package com.zhangchi.cameraphotograph;

import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.appcompat.app.AppCompatActivity;
import androidx.appcompat.widget.AppCompatImageView;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;

import android.Manifest;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.content.ContentResolver;
import android.content.ContentUris;
import android.content.ContentValues;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.ImageFormat;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Point;
import android.graphics.Rect;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.hardware.biometrics.BiometricManager;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureFailure;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.TotalCaptureResult;
import android.hardware.camera2.params.MeteringRectangle;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image;
import android.media.ImageReader;
import android.media.MediaCodec;
import android.media.MediaRecorder;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.os.Environment;
import android.os.HandlerThread;
import android.os.SystemClock;
import android.provider.DocumentsContract;
import android.provider.MediaStore;
import android.util.AttributeSet;
import android.util.DisplayMetrics;
import android.util.Log;
import android.util.Size;
import android.view.Display;
import android.view.MotionEvent;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.TextureView;
import android.view.View;
import android.widget.Button;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.SeekBar;
import android.widget.Toast;

import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.text.SimpleDateFormat;
import java.time.LocalDate;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import java.util.concurrent.Semaphore;
import java.util.logging.Handler;
import java.util.logging.LogRecord;

public class MainActivity extends AppCompatActivity {

    private static final String TAG = "CameraActivity";

//    private static Context mContext;//全局上下文
//    //获取全局的上下文
//    public static Context getContext(){
//        return mContext;
//    }

    //预览
    TextureView textureView;
    TextureView.SurfaceTextureListener surfaceTextureListener;
    CameraManager cameraManager;
    CameraDevice.StateCallback cam_stateCallback;
    CameraDevice mCameraDevice;
    Surface texture_surface;
    CameraCaptureSession.StateCallback cam_session_stateCallback;
    CameraCaptureSession.CaptureCallback still_capture_callback;
    CameraCaptureSession cameraCaptureSession;  //一个Camera只能开一个Session
    CaptureRequest.Builder requestBuilder;
    CaptureRequest request;
    Point screenSize;
    //拍照
    ImageView takephoto_imageView;  //显示拍照结果
    Button takephoto_btn, change;   //触发拍照
    Surface imageReaderSurface;
    CaptureRequest.Builder requestBuilder_image_reader;
    CaptureRequest takephoto_request;
    ImageReader imageReader;    //ImageReader类允许应用程序直接访问呈现表面的图像数据
    Bitmap bitmap;
    //转换摄像头
    String cameraId = String.valueOf(CameraCharacteristics.LENS_FACING_FRONT);
    ;//0代表前置摄像头，1代表后置摄像头
    //录像
    Button mVideo;
    MediaRecorder mediaRecorder;
    boolean isRecording = false;    //因为我们只有一个按钮来控制开始录制跟停止录制，所以我们用一个boolean值来记录当前的状态。
    //    CaptureRequest.Builder requestBuilder_video;
//    Size mPreviewSize, mVideoSize;
//    HandlerThread mBackgroundThread;
//    Handler mBackgroundHandler;
//    Semaphore mCameraOpenCloseLock = new Semaphore(1);
    Surface recorderSurface;
    private String format;
    private File path;

    //闪光灯
    Button flash_open;
    boolean isFlash = true;
    private Size selectSize;

    //对焦

    //缩放
    CameraCharacteristics cameraCharacteristics;
    private int mZoom = 0; // 缩放
    private int mDisplayRotate = 0;
    private float mOldDistance;
    private float maxZoom;

    //SeekBar
    SeekBar seekBar;

    ImageView albumsPicture;

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_main);
//        mContext = getApplicationContext();
        // 先准备一个监听器
        textureView = findViewById(R.id.texture_view_camera2);
        takephoto_btn = (Button) findViewById(R.id.btn_camera2_takephoto);
        takephoto_imageView = findViewById(R.id.image_view_preview_image);
        change = findViewById(R.id.change);
        mVideo = findViewById(R.id.video);
        flash_open = findViewById(R.id.btn_flash_open);
        seekBar = findViewById(R.id.seek_bar);
        albumsPicture = findViewById(R.id.iv_photo);
//        change.setOnClickListener(this);
//        takephoto_btn.setOnClickListener(this);
        //TextureView监听器
        surfaceTextureListener = new TextureView.SurfaceTextureListener() {
            @Override
            public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
                texture_surface = new Surface(textureView.getSurfaceTexture());
                //方法一：设置预览画面比例
                //Android获取屏幕宽度和高度的方法
//                screenSize=new Point();
////                Display display = getWindowManager().getDefaultDisplay();
////                display.getSize(screenSize);
//                getWindowManager().getDefaultDisplay().getSize(screenSize);
//                Log.d("MainActivity",  "识别宽高 "+screenSize.x +", " + screenSize.y);
//                surface.setDefaultBufferSize(screenSize.y, screenSize.x);

                //方法二：设置预览画面比例
//                Camera mCamera = Camera.open(Integer.parseInt(cameraId));
//                Camera.Parameters params = mCamera.getParameters();
//                //获取设备照片支持分辨率参数
//                List<Camera.Size> pictureSizes = params.getSupportedPictureSizes();
//                int length = pictureSizes.size();
//                for (int i = 0; i < length; i++) {
//                    Log.d("SupportedPictureSizes","SupportedPictureSizes : " + pictureSizes.get(i).width + "x" + pictureSizes.get(i).height);
//                }
//                //获取设备预览支持分辨率参数
//                List<Camera.Size> previewSizes = params.getSupportedPreviewSizes();
//                length = previewSizes.size();
//                for (int i = 0; i < length; i++) {
//                    Log.d("SupportedPreviewSizes","SupportedPreviewSizes : " + previewSizes.get(i).width + "x" + previewSizes.get(i).height);
//                }

                //方法三：获取相机最适应分辨率
                getMatchingSize2();
                //设置预览分辨率
                surface.setDefaultBufferSize(selectSize.getWidth(), selectSize.getHeight());
                Log.d(TAG, "选择的分辨率宽度:" + selectSize.getWidth() + "，高度：" + selectSize.getHeight());   //1400, 720

                openCamera();
            }

            @Override
            public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
            }

            @Override
            public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
                return false;
            }

            @Override
            public void onSurfaceTextureUpdated(SurfaceTexture surface) {
            }
        };
        //绑定监听器
        textureView.setSurfaceTextureListener(surfaceTextureListener);
        //B1. 准备工作：初始化ImageReader
        //创建图片读取器,参数为分辨率宽度和高度/图片格式/需要缓存几张图片,这里写的2意思是获取2张照片
        imageReader = ImageReader.newInstance(1920, 1080, ImageFormat.JPEG, 2);
//        imageReader = ImageReader.newInstance(screenSize.y  ,screenSize.x, ImageFormat.JPEG,2); //获取不到
        //B2. 准备工作：设置ImageReader收到图片后的回调函数
        //创建ImagerReader.OnImageAvailableListener方法的实例，并重写OnImageAvailable方法，
        // 该方法在ImageReader中的图像信息可用的时候执行
        imageReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() {
            @Override
            public void onImageAvailable(ImageReader reader) {
                //image.acquireLatestImage();//从ImageReader的队列中获取最新的image,删除旧的
                //image.acquireNextImage();//从ImageReader的队列中获取下一个图像,如果返回null没有新图像可用

                //B2.1 接收图片：acquireLatestImage()从ImageReader中读取最近的一张，转成Bitmap
                Image image = reader.acquireLatestImage();

                try {

                    //文件存储
                    dataConfig();
                    String fName = "picture_" + format + ".jpg";
                    File file = new File(path + File.separator + fName);     //File.separator就是“/”

                    //先把图片读入到内存--然后写到 文件
                    FileOutputStream fileOutputStream = new FileOutputStream(file);

                    //这里的image.getPlanes()[0]其实是图层的意思,因为我的图片格式是JPEG只有一层所以是getPlanes()[0]
                    //如果你是其他格式(例如png)的图片会有多个图层,就可以获取指定图层的图像数据　
                    //ByteBuffer是一个缓冲区对象,同时包含了这些像素数据的配置信息。
                    ByteBuffer buffer = image.getPlanes()[0].getBuffer();
//                    Log.d("buffer", "image.getPlanes()[0]" + image.getPlanes()[0]); //android.media.ImageReader$SurfaceImage$SurfacePlane@54f3b06
//                    Log.d("buffer", "buffer" + buffer); //java.nio.DirectByteBuffer[pos=0 lim=476769 cap=476769]
                    //                int length= buffer.remaining();
                    //                byte[] bytes= new byte[length];
                    //返回剩余的可用长度，此长度为实际读取的数据长度，最大自然是底层数组的长度。
                    //定义一个字节数组,相当于缓存
                    byte[] bytes = new byte[buffer.remaining()];
//                    Log.d("buffer", "buffer.remaining()" + buffer.remaining()); //476769
//                    Log.d("buffer", "bytes" + bytes);   //[B@35c25c7
                    //将字节从该缓冲区ByteBuffer中传输到给定的目标阵列byte[].
                    buffer.get(bytes);
//                    Log.d("buffer", "buffer.remaining()" + buffer.remaining()); //0
//                    Log.d("buffer", "buffer.get(bytes)" + buffer.get(bytes)); //报错
//                    Log.d("buffer", "bytes" + bytes);   //[B@35c25c7

                    //                bitmap = BitmapFactory.decodeByteArray(bytes,0,length);
                    //从指定的字节数组解码不可变位图，参数：
                    //data-压缩图像数据的字节数组
                    //偏移-偏移到imageData中， 解码器应开始解析的位置。
                    //长度-要分析的字节数，从偏移量开始返回：
                    //解码后的位图， 如果图像无法解码， 则为null，
                    bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);

                    //设置镜像
                    if ("1".equals(cameraId)) {
                        Matrix matrix = new Matrix();
                        matrix.postScale(-1, 1);//利用matrix 对矩阵进行转换，y轴镜像     postScale是缩放
                        /*
                        source  –  产生子位图的源位图
                        x       - 源中第一个像素的 x 坐标
                        y       - 源中第一个像素的 y 坐标宽度
                        width   - 每行中的像素数高度
                        height  - 行数m
                        m       - 应用于像素的可选矩阵
                        filter  – 如果源图要被过滤滤源，则为 true。仅当矩阵包含的不仅仅是平移时才适用。
                         */
                        //返回一个不可变的源位图的位图的子集,改变了可选的矩阵。新的位图可能与源相同的对象,或可能是一个副本。
                        bitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true);//matrix转换传入bitmap
                    }
                    //B2.2 显示图片
                    takephoto_imageView.setImageBitmap(bitmap);
                    image.close();

                    //ByteArrayOutputStream是对byte类型数据进行写入的类 相当于一个中间缓冲层，将类写入到文件等其他outputStream。它是对字节进行操作，属于内存操作流
                    ByteArrayOutputStream byteArray = new ByteArrayOutputStream();  //byte输出流   //下面再次把bitmap转换为byte数组
                    bitmap.compress(Bitmap.CompressFormat.JPEG, 100, byteArray);    //将位图（Bitmap）的压缩版本写入指定的输出流。
                    byte[] bytes1 = byteArray.toByteArray();//byteArray.toByteArray() 创建一个新分配的 byte 数组。其大小是此输出流的当前大小，并且缓冲区的有效内容已复制到该数组中。

//                    insertToDB(file.getAbsolutePath());

                    /**
                     * 文件存储2
                     */
                    fileOutputStream.write(bytes1);
                    fileOutputStream.flush();
                    fileOutputStream.close();


                    // 其次把文件插入到系统图库
                    try {
                        MediaStore.Images.Media.insertImage(getContentResolver(),
                                file.getAbsolutePath(), fName, null);
                    } catch (FileNotFoundException e) {
                        e.printStackTrace();
                    }
                    //sendBroadcast(new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE, Uri.parse(file.getAbsolutePath())));
                    Intent intent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);
                    Uri uri = Uri.fromFile(file);
                    intent.setData(uri);
                    sendBroadcast(intent); // 发送广播，通知图库更新


                } catch (FileNotFoundException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                }

            }
        }, null);
        //B3 配置：获取ImageReader的Surface
        imageReaderSurface = imageReader.getSurface();

        //B4. 相机点击事件
        takephoto_btn.setOnClickListener(new View.OnClickListener() {

            @Override
            public void onClick(View v) {
                //B4.1 配置request的参数 拍照模式(这行代码要调用已启动的相机 mCameraDevice，所以不能放在外面
                try {
                    requestBuilder_image_reader = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
                } catch (CameraAccessException e) {
                    e.printStackTrace();
                }

                //后置旋转90，前置旋转270
                if ("0".equals(cameraId)) {
                    //因为默认水平，所以要旋转90度
                    requestBuilder_image_reader.set(CaptureRequest.JPEG_ORIENTATION, 90);
                    mDisplayRotate = 90;
                } else if ("1".equals(cameraId)) {
                    requestBuilder_image_reader.set(CaptureRequest.JPEG_ORIENTATION, 270);
                    mDisplayRotate = 270;
                }

                //开启自动对焦
                requestBuilder_image_reader.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_START);
                requestBuilder_image_reader.set(CaptureRequest.CONTROL_AF_MODE, CameraMetadata.CONTROL_AF_MODE_AUTO);
                //开启闪光灯
                if (isFlash) {
                    requestBuilder_image_reader.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH);
                }
                //预览如果有放大，拍照的时候也应该保存相同的缩放
                //SCALER_CROP_REGION请求裁剪范围
                Rect zoomRect = requestBuilder.get(CaptureRequest.SCALER_CROP_REGION);
                if (zoomRect != null){
                    requestBuilder_image_reader.set(CaptureRequest.SCALER_CROP_REGION, zoomRect);
                }

                //B4.2 配置request的参数 的目标对象
                requestBuilder_image_reader.addTarget(imageReaderSurface);
                takephoto_request = requestBuilder_image_reader.build();
                try {
                    //B4.3 触发拍照
                    //capture提交一个获取单张图片的捕捉请求，常用于拍照场景。
                    cameraCaptureSession.capture(takephoto_request, null, null);
                } catch (CameraAccessException e) {
                    e.printStackTrace();
                }
            }
        });

        //切换摄像头点击事件
        change.setOnClickListener(new View.OnClickListener() {
            @Override
            public void onClick(View v) {
                switchCamera();
            }
        });

        //录像点击事件
        mVideo.setOnClickListener(new View.OnClickListener() {
            @Override
            public void onClick(View v) {

                if (isRecording) {
                    //停止录制
                    stopRecord();
                    //停止录制时的预览
                    stopPreview();
                    //开启新的预览回话
                    startPreviewSession();
                    //改变按钮状态
                    mVideo.setBackgroundResource(R.drawable.video);
                    isRecording = false;
                    return;
                }
                startRecord();
            }
        });

        //闪光灯点击事件
        flash_open.setOnClickListener(new View.OnClickListener() {
            @Override
            public void onClick(View v) {
                if (isFlash == true) {
                    Log.d("zzzz1", "isFlash" + isFlash);
                    flash_open.setBackgroundResource(R.drawable.flash_close);
                    isFlash = false;
                    Log.d("zzzz2", "isFlash" + isFlash);
                } else {
                    Log.d("zzzz3", "isFlash" + isFlash);
                    flash_open.setBackgroundResource(R.drawable.flash_open);
                    isFlash = true;
                }
            }
        });

        //AF点击事件
        textureView.setOnTouchListener(new View.OnTouchListener() {
            @Override
            public boolean onTouch(View v, MotionEvent event) {
                requestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO);
                requestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_START);
                try {
                    cameraCaptureSession.setRepeatingRequest(requestBuilder.build(), null, null);
                } catch (CameraAccessException e) {
                    e.printStackTrace();
                }
                return false;
            }
        });

        //SeekBar点击事件
        seekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
            @Override
            public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {

                maxZoom = cameraCharacteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM);
//                seekBar.setProgress(0);
                int factor = 100; // 放大/缩小的一个因素，设置越大越平滑，相应放大的速度也越慢
//                seekBar.setMax (factor);
                //用来计算放大后显示的画面大小，即 屏幕大小
                //这是一个矩形，表示传感器的活动区域的大小(即实际接收到来自场景的光的区域)，
                // 在任何几何校正后，应该被视为除原始格式外的任何图像输出格式的最大像素大小。
                Rect rect = cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
                Log.d(TAG, rect + "rect");
                // 因为缩放时两边都要变化，所以要除以2
                int minW = (int) ((rect.width() - rect.width() / maxZoom) / (2 * factor));
                int minH = (int) ((rect.height() - rect.height() / maxZoom) / (2 * factor));
                //坐标根据mZoom等比例增加
                int cropW = minW * progress;   //x轴坐标
                int cropH = minH * progress;   //y轴坐标
                Log.d(TAG, "handleZoom: cropW: " + cropW + ", cropH: " + cropH);
                Rect zoomRect = new Rect(cropW, cropH, rect.width() - cropW, rect.height() - cropH);
                //请求裁剪范围
                requestBuilder.set(CaptureRequest.SCALER_CROP_REGION, zoomRect);    //设置缩放
                restartPreview(); // 需要重新 start preview 才能生效

//                float minimumLens = cameraCharacteristics.get(CameraCharacteristics.LENS_INFO_MINIMUM_FOCUS_DISTANCE);
//                float num = (((float)progress) * minimumLens/100);
//                requestBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, num);
            }

            @Override   //开始滑动时执行此方法
            public void onStartTrackingTouch(SeekBar seekBar) {

            }
            @Override   //结束滑动时执行此方法
            public void onStopTrackingTouch(SeekBar seekBar) {
            }
        });

        Intent intent=new Intent(MainActivity.this, Albums.class);//创建跳转到Albums显示的窗口的Intent

        //相册点击事件
        takephoto_imageView.setOnClickListener(new View.OnClickListener() {
            @Override
            public void onClick(View v) {
                //获取的是相册中的图片
//                Intent intent = new Intent(Intent.ACTION_PICK, MediaStore.Images.Media.EXTERNAL_CONTENT_URI);
//                startActivity(intent);

//                获取的是所有本地图片
//                Intent intent = new Intent("android.intent.action.GET_CONTENT");
////                    Intent intent = new Intent(Intent.ACTION_GET_CONTENT);
//                intent.setType("image/*");
//                startActivityForResult(intent, 1);//打开相册
//                if (ContextCompat.checkSelfPermission(MainActivity.this, Manifest.permission.WRITE_EXTERNAL_STORAGE)!= PackageManager.PERMISSION_GRANTED){
//                    ActivityCompat.requestPermissions(MainActivity.this,new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE},1);
//                }else {
//
//                    Intent intent;
//                    if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
//                        intent = new Intent();
//                        intent.setType("image/*");
//                        intent.setAction(Intent.ACTION_GET_CONTENT);
//                    } else {
//                        intent = new Intent(Intent.ACTION_PICK, android.provider.MediaStore.Images.Media.EXTERNAL_CONTENT_URI);
//                    }
//                    MainActivity.startActivityForResult(intent, 1);
//                }

                startActivity(intent);
            }
        });


    }

//    @Override
//    protected void onActivityResult(int requestCode, int resultCode, @Nullable Intent data) {
//        switch (requestCode) {
//            case 1://选择相册照片返回
//                if (requestCode == 1 && resultCode == RESULT_OK && null != data) {
//                    if (Build.VERSION.SDK_INT >= 19) {
//                        handleImageOnKitkat(data);
//                    } else {
//                        handleImageBeforeKitKat(data);
//                    }
//                }
//                break;
//        }
//        super.onActivityResult(requestCode, resultCode, data);
//
//    }
//    private void handleImageOnKitkat(Intent data) {
//        String imagePath = null;
//        Uri uri = data.getData();
//        if (DocumentsContract.isDocumentUri(this, uri)) {
//            //如果是document类型的uri，则通过document id处理
//            String docId = DocumentsContract.getDocumentId(uri);
//            if ("com.android.providers.media.documents".equals(uri.getAuthority())) {
//                String id = docId.split(":")[1];
//                String selection = MediaStore.Images.Media._ID + "=" + id;
//                imagePath = getImagePath( MediaStore.Images.Media.EXTERNAL_CONTENT_URI, selection);
//            } else if ("com.android.providers.downloads.documents".equals(uri.getAuthority())) {
//                Uri contentUri = ContentUris.withAppendedId(Uri.parse("content:" +
//                        "//downloads/public_downloads"), Long.valueOf(docId));
//                imagePath = getImagePath(contentUri, null);
//            }
//        } else if ("content".equalsIgnoreCase(uri.getScheme())) {
//            //如果是content类型的uri，则使用普通方式处理
//            imagePath = getImagePath(uri, null);
//        } else if ("file".equalsIgnoreCase(uri.getScheme())) {
//            //如果是File类型的uri，直接获取图片路径即可
//            imagePath = uri.getPath();
//        }
//        //根据图片路径显示图片
//        displayImage(imagePath);
//    }
//    private void handleImageBeforeKitKat(Intent data){
//        Uri uri=data.getData();
//        String imagePath=getImagePath(uri,null);
//        displayImage(imagePath);
//    }
//    private void displayImage(String imagePath){
//        if(imagePath!=null){
//            Bitmap bitmap=BitmapFactory.decodeFile(imagePath);
//            albumsPicture.setImageBitmap(bitmap);//将图片放置在控件上
//        }else {
//            Toast.makeText(this,"得到图片失败",Toast.LENGTH_SHORT).show();
//        }
//    }
//    @SuppressLint("Range")
//    private String getImagePath(Uri uri, String selection){
//        String path=null;
//        Cursor cursor=getContentResolver().query(uri,null,selection,null,null);
//        if(cursor!=null){
//            if(cursor.moveToFirst()){
//                path=cursor.getString(cursor.getColumnIndex(MediaStore.Images.Media.DATA));
//            }
//            cursor.close();
//        }
//        return path;
//    }

//    public static void insertToDB(String picturePath) {
//        ContentValues values = new ContentValues();
//        ContentResolver resolver = mContext.getContentResolver();
//        values.put(MediaStore.Images.ImageColumns.DATA, picturePath);
//        values.put(MediaStore.Images.ImageColumns.TITLE, picturePath.substring(picturePath.lastIndexOf("/") + 1));
//        values.put(MediaStore.Images.ImageColumns.DATE_TAKEN, System.currentTimeMillis());
//        values.put(MediaStore.Images.ImageColumns.MIME_TYPE, "image/jpeg");
//        resolver.insert(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, values);
//    }


    //打开摄像头
    private void openCamera() {
        // 1 创建相机管理器，调用系统相机
        cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);  // 初始化
        // 2 准备 相机状态回调对象为后面用
        cam_stateCallback = new CameraDevice.StateCallback() {
            /**
             * 相机打开时调用
             * @param camera
             */
            @Override
            public void onOpened(@NonNull CameraDevice camera) {
                // 2.1 保存已开启的相机对象
                mCameraDevice = camera;
                startPreviewSession();
            }

            @Override
            public void onDisconnected(@NonNull CameraDevice camera) {
            }

            /**
             * 发生异常时调用
             * 释放资源，关闭界面
             * @param camera
             * @param error
             */
            @Override
            public void onError(@NonNull CameraDevice camera, int error) {
            }
        };
        // 4 检查相机权限
        checkPermission();
        // 5 开启相机（传入：要开启的相机ID，和状态回调对象）
        try {
            Log.d("cameraId", "openCamera_cameraId:" + cameraId);
            cameraManager.openCamera(cameraId, cam_stateCallback, null);
//                cameraManager.openCamera(cameraManager.getCameraIdList()[0],cam_stateCallback,null);
//            }
        } catch (CameraAccessException e) {
            e.printStackTrace();
        }
    }

    /**
     * 检查是否申请了权限
     */
    private void checkPermission() {

        if (ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
            if (ActivityCompat.shouldShowRequestPermissionRationale(this, Manifest.permission.CAMERA)) {
                Toast.makeText(this, "CAMERA没有权限", Toast.LENGTH_SHORT).show();
            } else {
                ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.CAMERA}, 1);
            }
        } else if (ContextCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) {
            if (ActivityCompat.shouldShowRequestPermissionRationale(this, Manifest.permission.RECORD_AUDIO)) {
                Toast.makeText(this, "RECORD_AUDIO没有权限", Toast.LENGTH_SHORT).show();
            } else {
                ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.RECORD_AUDIO}, 1);
            }
        } else if (ContextCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) {
            if (ActivityCompat.shouldShowRequestPermissionRationale(this, Manifest.permission.WRITE_EXTERNAL_STORAGE)) {
                Toast.makeText(this, "WRITE_EXTERNAL_STORAGE没有权限", Toast.LENGTH_SHORT).show();
            } else {
                ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE}, 1);
            }
        }
    }

    @Override
    protected void onResume() {
        super.onResume();
        // 如果 textureView可用，就直接打开相机
        if (textureView.isAvailable()) {
            openCamera();
        } else {
            // 否则，就开启它的可用时监听。
            textureView.setSurfaceTextureListener(surfaceTextureListener);
        }
    }
//    @Override
//    protected void onPause() {
//        // 先把相机的session关掉
//        if(cameraCaptureSession!=null){
//            cameraCaptureSession.close();
//        }
//        // 再关闭相机
//        if(null!=mCameraDevice){
//            mCameraDevice.close();
//        }
//        // 最后关闭ImageReader
//        if(null!=imageReader){
//            imageReader.close();
//        }
//        // 最后交给父View去处理
//        super.onPause();
//    }

    //切换摄像头
    public void switchCamera() {
        Log.d("switch", "test");

        //获取摄像头的管理者
//        CameraManager cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
        try {
            for (String id : cameraManager.getCameraIdList()) { //获取当前设备的相机设备列表 根据这个列表可以查询当前存在几个相机
                //获取到每个相机的参数对象，包含前后摄像头，分辨率等
                cameraCharacteristics = cameraManager.getCameraCharacteristics(id); //查询出了相关设备的特征信息，特征信息都被封装在了 CameraCharacteristics 中
//                StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
//                previewSize = getOptimalSize(map.getOutputSizes(SurfaceTexture.class), textureView.getWidth(), textureView.getHeight());
                //匹配方向,指定打开后摄像头
                //现在的摄像头是前置，id是后置时 切换摄像头为后置
                if (cameraId.equals(String.valueOf(CameraCharacteristics.LENS_FACING_BACK)) && cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT) {
                    Log.d("change", "cameraId:" + cameraId);//1
                    Log.d("change", "String.valueOf(CameraCharacteristics.LENS_FACING_BACK)" + String.valueOf(CameraCharacteristics.LENS_FACING_BACK));//1
                    Log.d("change", "characteristics.get(CameraCharacteristics.LENS_FACING)" + cameraCharacteristics.get(CameraCharacteristics.LENS_FACING));//0
                    cameraId = String.valueOf(CameraCharacteristics.LENS_FACING_FRONT); //重新设置摄像头
                    mCameraDevice.close();  //关闭之前设备
//                    openCamera();     //重新打开 重新实例化不如直接调用cameraManager.openCamera
                    checkPermission();
                    cameraManager.openCamera(cameraId, cam_stateCallback, null);  //重新打开    //这个不能提出到外面，Id会进行多次判断，来回切换
                    break;
                } else if (cameraId.equals(String.valueOf(CameraCharacteristics.LENS_FACING_FRONT)) && cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_BACK) {
                    cameraId = String.valueOf(CameraCharacteristics.LENS_FACING_BACK);
                    mCameraDevice.close();
//                    openCamera();
                    checkPermission();
                    cameraManager.openCamera(cameraId, cam_stateCallback, null);
                    break;
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    //开启预览
    private void startPreviewSession() {
        try {
            // 2.2 构建请求对象（设置预览参数，和输出对象）
            requestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);// 设置参数：预览
            requestBuilder.addTarget(texture_surface);// 设置参数：目标容器(绑定Surface)
            request = requestBuilder.build();
            //2.3 创建会话的回调函数，后面用
            cam_session_stateCallback = new CameraCaptureSession.StateCallback() {
                //2.3.1  会话准备好了，在里面创建 预览或拍照请求
                //摄像头完成配置，可以处理Capture请求了。
                @Override
                public void onConfigured(@NonNull CameraCaptureSession session) {
                    cameraCaptureSession = session;
                    try {
                        // 2.3.2 预览请求
                        //setRepeatingRequest不断的重复请求捕捉画面，常用于预览或者连拍场景。
                        cameraCaptureSession.setRepeatingRequest(request, null, null);
                    } catch (CameraAccessException e) {
                        e.printStackTrace();
                    }
                }

                //摄像头配置失败
                @Override
                public void onConfigureFailed(@NonNull CameraCaptureSession session) {
                }
            };
            // 2.3 创建会话
            ////创建相机捕获会话，第一个参数是捕获数据的输出Surface列表，
            // 第二个参数是CameraCaptureSession的状态回调接口，当它创建好后会回调onConfigured方法，
            // 第三个参数用来确定Callback在哪个线程执行，为null的话就在当前线程执行

            //在预览的 Surface捕获图像的同时， 我们也需要 ImageReader来同时捕获图像数据
            mCameraDevice.createCaptureSession(Arrays.asList(texture_surface, imageReaderSurface), cam_session_stateCallback, null);
        } catch (CameraAccessException e) {
            e.printStackTrace();
        }
    }

    //录像发送AF请求
    private void updatePreview() {
        try {
            //连续自动对焦模式，主要用于录制视频过程中，Camera会不断地尝试聚焦，这是录制视频时对焦模式的最好选择
            // 在设置了Camera的参数后就开始自动对焦，但是调用takePicture时不一定已经对焦完成。
            requestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO);
//            requestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_START);
            //开始预览
            cameraCaptureSession.setRepeatingRequest(requestBuilder.build(), null, null);
        } catch (CameraAccessException e) {
            e.printStackTrace();
        }
    }

    //开始录像
    private void startRecord() {
        //MediaRecorder
        try {
            setupMediaRecorder();
        } catch (IOException e) {
            e.printStackTrace();
        }
        //停止预览
        stopPreview();
        try {
            //创建一个类型为CameraDevice.TEMPLATE_RECORD的CaptureRequest.Builder
            requestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_VIDEO_SNAPSHOT);
            //添加预览的Surface
//            SurfaceTexture surfaceTexture = textureView.getSurfaceTexture();
//            surfaceTexture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
//            surfaceTexture.setDefaultBufferSize(screenSize.y, screenSize.x);
//            Surface previewSurface = new Surface(surfaceTexture);
//            texture_surface = new Surface(surfaceTexture);
            requestBuilder.addTarget(texture_surface);
            //添加MediaRecorder的Surface
            recorderSurface = mediaRecorder.getSurface();
            requestBuilder.addTarget(recorderSurface);
            //requestBuilder.addTarget(surface);
            //创建新的CameraCaptureSession
            mCameraDevice.createCaptureSession(Arrays.asList(texture_surface, recorderSurface, imageReaderSurface), new CameraCaptureSession.StateCallback() {
                @Override
                public void onConfigured(@NonNull CameraCaptureSession session) {
                    cameraCaptureSession = session;
                    //发送录像请求
                    updatePreview();
                    //开始录制
                    mediaRecorder.start();
                    //改变按钮状态
                    mVideo.post(new Runnable() {
                        @Override
                        public void run() {
                            isRecording = true;
                            mVideo.setBackgroundResource(R.drawable.video_red);
                        }
                    });
                }

                @Override
                public void onConfigureFailed(@NonNull CameraCaptureSession session) {
                }
            }, null);
        } catch (Exception exception) {
        }
    }

    //关闭预览
    private void stopPreview() {
        if (cameraCaptureSession != null) {
            cameraCaptureSession.close();
            cameraCaptureSession = null;
        }
    }

    //构造MediaRecorder
    private void setupMediaRecorder() throws IOException {
        //创建可与通常具有输入表面的编解码器一起使用的持久输入表面，例如视频编码器。
//        recorderSurface = MediaCodec.createPersistentInputSurface();
        //生成MediaRecorder实例,
        mediaRecorder = new MediaRecorder();
        //采集声音来源,mic是麦克风,进入initialized状态
        //后面必须setAudioEncoder 放在Source后面 并对应格式 MIC-AAC
        mediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);    //MIC：主麦克风
        //设置视频源,从Surface里面读取画面去录制
        mediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
        //设置输出格式为MP4    设置封装格式 默认是MP4,进入DataSourceConfigured状态
        //setOutputFormat()设定媒体文件的输出格式，必须在设定音频与视频的编码格式之前设定。
        mediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
        //设置视频编码格式,AAC：高级音频编码
        mediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
        //设置图像编码格式,H263 多用于视频传输，其优点是压缩后体积小，占用带宽少；
        mediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
        //设置最大录像时间 单位：毫秒
        //mediaRecorder.setMaxDuration(60 * 1000);
        //设置最大录制的大小60M 单位，字节
//        mediaRecorder.setMaxFileSize(60 * 1024 * 1024);
        //设置帧率(帧/秒)，该帧率必须是硬件支持的，可以通过Camera.CameraParameter.getSupportedPreviewFpsRange()方法获取相机支持的帧率
        //设置要捕获的视频的帧速率。每秒要捕捉的视频帧数.必须在setVideoSource()后调用。在setOutputFormat()之后，在prepare()之前调用它
        mediaRecorder.setVideoFrameRate(30);
        //setVideoFrameRate在正常记录情况下使用，在延时情况下添加设置要捕获的视频的帧速率 ，setCaptureRate()。
        //相对于VideoRate的值，大于则放慢，小于加快
//        if (delayflage) {mediaRecorder.setCaptureRate(10);}
        //设置视频比特率:指每秒传输速率(比特数(bit)。单位为bps(Bit Per Second)，比特率越高，每秒传送数据就越多，画质就越清晰。
//        mediaRecorder.setVideoEncodingBitRate(screenSize.y * screenSize.x * 24);
        mediaRecorder.setVideoEncodingBitRate(selectSize.getWidth() * selectSize.getHeight() * 24);
        //设置视频尺寸，通常搭配码率一起使用，可调整视频清晰度
//        mediaRecorder.setVideoSize(screenSize.y,screenSize.x);
        mediaRecorder.setVideoSize(selectSize.getWidth(), selectSize.getHeight());
//        mediaRecorder.setInputSurface(texture_surface);
        //设置预览的SurfaceHolder
        mediaRecorder.setPreviewDisplay(texture_surface);
        //设置方向
//        mediaRecorder.setOrientationHint(90);
        if (cameraId.equals(String.valueOf(CameraCharacteristics.LENS_FACING_BACK))) {
            mediaRecorder.setOrientationHint(270);  //前置
            mDisplayRotate = 270;
        } else {
            mediaRecorder.setOrientationHint(90);   //后置
            mDisplayRotate = 90;
        }
        //新建文件保存视频
        dataConfig();
        String fName = "video_" + format + ".mp4";
        File file = new File(path + File.separator + fName);     //File.separator就是“/”
//        Log.d("xxxxxx", "startRecord: "+file.getAbsolutePath());    //  /storage/emulated/0/DCIM/data/video_20220714_070511.mp4
        //设置输出文件
        mediaRecorder.setOutputFile(file);

        try {
            //准备mMediaRecorder
            mediaRecorder.prepare();
        } catch (IOException | IllegalStateException e) {
            e.printStackTrace();
        }

    }

    //停止录像
    private void stopRecord() {
        if (mediaRecorder != null) {
            mediaRecorder.stop();
            mediaRecorder.reset();
        }
    }

    /**
     * 文件存储
     */
    public void dataConfig() {

        //File path = new File(MainActivity.this.getExternalCacheDir().getPath() + "/AAA");
        //获取路径方便
        //app删除，app对应的图片不删除，保存路径是sd卡根路径
//        path = new File(Environment.getExternalStorageDirectory() + "/DCIM/data");
        path = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM) + File.separator + "Camera"); //DCIM/Camera
        Log.d(TAG, "dataConfig: path" + path);
        Toast.makeText(this, "路径："+ path, Toast.LENGTH_SHORT).show();

        //app删除对应的图片相应删除，保护隐私
        //getExternalFilesDir(null)则为：/storage/emulated/0/Android/data/com.wintec.huashang/files
//        File path = new File(getExternalFilesDir(null) + "/DCIM/data");
//        File path = new File(getFilesDir()+ "/DCIM/data");
//        Log.d("MainActivity", "getFilesDir路径：" + path);
        if (!path.exists()) {
            Log.d("MainActivity", "onImageAvailable: 路径不存在");
            path.mkdirs();
        } else {
            Log.d("MainActivity", "onImageAvailable: 路径存在");
        }
        SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd_HHmmss", Locale.US);  //指定英文字符串
        //下面两句一样
//       Date date = new Date(System.currentTimeMillis());
        Date date = new Date();
        format = sdf.format(date);
//       System.out.println(formatter.format(date));

    }

    //获取最适应屏幕分辨率
    private Size getMatchingSize2() {
        selectSize = null;
        try {

            CameraManager mCameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
            for (final String cameraId : mCameraManager.getCameraIdList()) {

                cameraCharacteristics = mCameraManager.getCameraCharacteristics(cameraId);
                //SCALER_STREAM_CONFIGURATION_MAP 获取相机设备支持的可用流的配置，包括最小帧间隔、不同格式、大小组合的失帧时长
                StreamConfigurationMap streamConfigurationMap = cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
                //获取设备支持的JPEG格式存储到sizes中
                Size[] sizes = streamConfigurationMap.getOutputSizes(ImageFormat.JPEG);
                //getResources().getDisplayMetrics() 依赖于手机系统，获取到的是系统的屏幕信息；
                //因为这里是将预览铺满屏幕,所以直接获取屏幕分辨率
//                DisplayMetrics displayMetrics = getResources().getDisplayMetrics();
//                int deviceWidth = displayMetrics.widthPixels; //屏幕分辨率宽
//                int deviceHeigh = displayMetrics.heightPixels; //屏幕分辨率高
//                Log.d(TAG, "getMatchingSize2: 屏幕密度宽度=" + deviceWidth);  //720
//                Log.d(TAG, "getMatchingSize2: 屏幕密度高度=" + deviceHeigh);  //1440
                //获取textureView的宽高
                int measuredWidth = textureView.getMeasuredWidth();
                int measuredHeight = textureView.getMeasuredHeight();
                Log.d(TAG, "measuredWidth: 屏幕密度宽度=" + measuredWidth);  //720
                Log.d(TAG, "measuredHeight: 屏幕密度高度=" + measuredHeight);  //1440
                /**
                 * 循环40次,让宽度范围从最小逐步增加,找到最符合屏幕宽度的分辨率,
                 * 你要是不放心那就增加循环,肯定会找到一个分辨率,不会出现此方法返回一个null的Size的情况
                 * ,但是循环越大后获取的分辨率就越不匹配
                 */
//				for (int j = 1; j < 41; j++) {
//                for (int i = 0; i < sizes.length; i++) { //遍历所有Size
//                    Size itemSize = sizes[i];
//                    Log.d(TAG, "当前itemSize 宽=" + itemSize.getWidth() + "高=" + itemSize.getHeight());
//                    //判断当前Size高度小于屏幕宽度+j*5  &&  判断当前Size高度大于屏幕宽度-j*5  &&  判断当前Size宽度小于当前屏幕高度
//                    if (itemSize.getHeight() <= (deviceWidth)) {
//                        if (selectSize != null) { //如果之前已经找到一个匹配的宽度
//                            if (Math.abs(deviceHeigh - itemSize.getWidth()) < Math.abs(deviceHeigh - selectSize.getWidth())) { //求绝对值算出最接近设备高度的尺寸
//                                selectSize = itemSize;
//                                continue;
//                            }
//                        } else {
//                            selectSize = itemSize;
//                        }
//
//                    }
//                }
                //获取textureView中最合适的分辨率
                for (int i = 0; i < sizes.length; i++) { //遍历所有Size
                    Size itemSize = sizes[i];
                    Log.d(TAG, "当前itemSize 宽=" + itemSize.getWidth() + "高=" + itemSize.getHeight());
                    //判断当前Size高度小于屏幕宽度+j*5  &&  判断当前Size高度大于屏幕宽度-j*5  &&  判断当前Size宽度小于当前屏幕高度
                    if (itemSize.getHeight() <= (measuredWidth)) {
                        if (selectSize != null) { //如果之前已经找到一个匹配的宽度
                            if (Math.abs(measuredHeight - itemSize.getWidth()) < Math.abs(measuredHeight - selectSize.getWidth())) { //求绝对值算出最接近设备高度的尺寸
                                selectSize = itemSize;
                                continue;
                            }
                        } else {
                            selectSize = itemSize;
                        }

                    }
                }
                if (selectSize != null) { //如果不等于null 说明已经找到了 跳出循环
                    break;
                }
//				}
            }
        } catch (CameraAccessException e) {
            e.printStackTrace();
        }
//        Log.d(TAG, "getMatchingSize2: 选择的分辨率宽度="+ selectSize.getWidth());
//        Log.d(TAG, "getMatchingSize2: 选择的分辨率高度="+ selectSize.getHeight());
        return selectSize;
    }


    public void restartPreview() {
        Log.v(TAG, "restartPreview");
        if (cameraCaptureSession == null || requestBuilder == null) {
            Log.w(TAG, "restartPreview: cameraCaptureSession or requestBuilder is null");
            return;
        }
        try {
            // 开始预览，即一直发送预览的请求
            cameraCaptureSession.setRepeatingRequest(requestBuilder.build(), null, null);
        } catch (CameraAccessException e) {
            e.printStackTrace();
        }
    }

    //预览缩放
    public void handleZoom(boolean isZoomIn) {
        if (mCameraDevice == null || cameraCharacteristics == null || requestBuilder  == null) {
            return;
        }
        // maxZoom 表示 active_rect 宽度除以 crop_rect 宽度的最大值
        //获取支持的最大数码变焦倍数
        maxZoom = cameraCharacteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM);
        Log.d(TAG, "handleZoom: maxZoom: " + maxZoom);  //4.0
        int factor = 100; // 放大/缩小的一个因素，设置越大越平滑，相应放大的速度也越慢
        if (isZoomIn && mZoom < factor) {   //放大
            mZoom++;
        } else if (mZoom > 0) {   //缩小
            mZoom--;
        }
        Log.d(TAG, "handleZoom: mZoom: " + mZoom); //0-100
        //用来计算放大后显示的画面大小，即 屏幕大小
        //这是一个矩形，表示传感器的活动区域的大小(即实际接收到来自场景的光的区域)，
        // 在任何几何校正后，应该被视为除原始格式外的任何图像输出格式的最大像素大小。
        Rect rect = cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
        Log.d(TAG, rect + "rect");
        // 因为缩放时两边都要变化，所以要除以2
        int minW = (int) ((rect.width() - rect.width() / maxZoom) / (2 * factor));
        int minH = (int) ((rect.height() - rect.height() / maxZoom) / (2 * factor));
        //坐标根据mZoom等比例增加
        int cropW = minW * mZoom;   //x轴坐标
        int cropH = minH * mZoom;   //y轴坐标
        Log.d(TAG, "handleZoom: cropW: " + cropW + ", cropH: " + cropH);
        Rect zoomRect = new Rect(cropW, cropH, rect.width() - cropW, rect.height() - cropH);
        //请求裁剪范围
        requestBuilder.set(CaptureRequest.SCALER_CROP_REGION, zoomRect);    //设置缩放
        Log.d(TAG, "focusOnPoint: 2222222222222222222222222222222222");
        restartPreview(); // 需要重新 start preview 才能生效
//        stopPreview();
//        startPreviewSession();
    }

    //单击对焦
//    private final CameraCaptureSession.CaptureCallback mAfCaptureCallback = new CameraCaptureSession.CaptureCallback() {
//
//        private void process(CaptureResult result) {
//            Integer state = result.get(CaptureResult.CONTROL_AF_STATE);
//            if (null == state) {
//                return;
//            }
//            Log.d(TAG, "process: CONTROL_AF_STATE: " + state);
//            if (state == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED || state == CaptureResult
//                    .CONTROL_AF_STATE_NOT_FOCUSED_LOCKED) {
//                Log.d(TAG, "process: start normal preview");
//                requestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,
//                        CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
//                requestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest
//                        .CONTROL_AF_MODE_CONTINUOUS_PICTURE);
//                requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.FLASH_MODE_OFF);
//                Log.d(TAG, "focusOnPoint: 333333333333333333333333333333333");
//                startPreview();
//            }
//        }
//
//        @Override
//        public void onCaptureProgressed(@NonNull CameraCaptureSession session,
//                                        @NonNull CaptureRequest request,
//                                        @NonNull CaptureResult partialResult) {
//            process(partialResult);
//        }
//
//        @Override
//        public void onCaptureCompleted(@NonNull CameraCaptureSession session,
//                                       @NonNull CaptureRequest request,
//                                       @NonNull TotalCaptureResult result) {
//            process(result);
//        }
//    };
//    public void focusOnPoint(double x, double y, int width, int height) {
//        if (mCameraDevice == null || requestBuilder == null) {
//            return;
//        }
//        // 1. 先取相对于view上面的坐标
//        int previewWidth = selectSize.getWidth();
//        int previewHeight = selectSize.getHeight();
//        if (mDisplayRotate == 90 || mDisplayRotate == 270) {
//            previewWidth = selectSize.getHeight();
//            previewHeight = selectSize.getWidth();
//        }
//        // 2. 计算摄像头取出的图像相对于view放大了多少，以及有多少偏移
//        double tmp;
//        double imgScale;
//        double verticalOffset = 0;
//        double horizontalOffset = 0;
//        if (previewHeight * width > previewWidth * height) {
//            imgScale = width * 1.0 / previewWidth;
//            verticalOffset = (previewHeight - height / imgScale) / 2;
//        } else {
//            imgScale = height * 1.0 / previewHeight;
//            horizontalOffset = (previewWidth - width / imgScale) / 2;
//        }
//        // 3. 将点击的坐标转换为图像上的坐标
//        x = x / imgScale + horizontalOffset;
//        y = y / imgScale + verticalOffset;
//        if (90 == mDisplayRotate) {
//            tmp = x;
//            x = y;
//            y = selectSize.getHeight() - tmp;
//        } else if (270 == mDisplayRotate) {
//            tmp = x;
//            x = selectSize.getWidth() - y;
//            y = tmp;
//        }
//        // 4. 计算取到的图像相对于裁剪区域的缩放系数，以及位移
//        Rect cropRegion = requestBuilder.get(CaptureRequest.SCALER_CROP_REGION);
//        if (cropRegion == null) {
//            Log.w(TAG, "can't get crop region");
//            cropRegion = cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
//        }
//        int cropWidth = cropRegion.width();
//        int cropHeight = cropRegion.height();
//        if (selectSize.getHeight() * cropWidth > selectSize.getWidth() * cropHeight) {
//            imgScale = cropHeight * 1.0 / selectSize.getHeight();
//            verticalOffset = 0;
//            horizontalOffset = (cropWidth - imgScale * selectSize.getWidth()) / 2;
//        } else {
//            imgScale = cropWidth * 1.0 / selectSize.getWidth();
//            horizontalOffset = 0;
//            verticalOffset = (cropHeight - imgScale * selectSize.getHeight()) / 2;
//        }
//        // 5. 将点击区域相对于图像的坐标，转化为相对于成像区域的坐标
//        x = x * imgScale + horizontalOffset + cropRegion.left;
//        y = y * imgScale + verticalOffset + cropRegion.top;
//        double tapAreaRatio = 0.1;
//        Rect rect = new Rect();
//        rect.left = clamp((int) (x - tapAreaRatio / 2 * cropRegion.width()), 0, cropRegion.width());
//        rect.right = clamp((int) (x + tapAreaRatio / 2 * cropRegion.width()), 0, cropRegion.width());
//        rect.top = clamp((int) (y - tapAreaRatio / 2 * cropRegion.height()), 0, cropRegion.height());
//        rect.bottom = clamp((int) (y + tapAreaRatio / 2 * cropRegion.height()), 0, cropRegion.height());
//        // 6. 设置 AF、AE 的测光区域，即上述得到的 rect        设置聚焦的区域
//        requestBuilder.set(CaptureRequest.CONTROL_AF_REGIONS,
//                new MeteringRectangle[]{new MeteringRectangle
//                (rect, 1000)});
//        requestBuilder.set(CaptureRequest.CONTROL_AE_REGIONS, new MeteringRectangle[]{new MeteringRectangle
//                (rect, 1000)});
//        requestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO);
//        requestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_START);
//        requestBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CameraMetadata
//                .CONTROL_AE_PRECAPTURE_TRIGGER_START);
//        Log.d(TAG, "focusOnPoint: 11111111111111111111111111111111");
//        try {
//            // 7. 发送上述设置的对焦请求，并监听回调
//            cameraCaptureSession.setRepeatingRequest(requestBuilder.build(), mAfCaptureCallback
//                    , null);
//        } catch (CameraAccessException e) {
//            e.printStackTrace();
//        }
//    }
//    private int clamp(int x, int min, int max) {
//        if (x > max) return max;
//        if (x < min) return min;
//        return x;
//    }


    @Override
    public boolean onTouchEvent(MotionEvent event) {
//        if (event.getPointerCount() == 1) {
//            focusOnPoint((int) event.getX(), (int) event.getY(),
//                    selectSize.getWidth(), selectSize.getHeight());
//            return true;
//        }
        switch (event.getAction() & MotionEvent.ACTION_MASK) {  // 处理多点触摸  当触碰点有2个时，才去放大缩小
            case MotionEvent.ACTION_POINTER_DOWN:
                // 点下时，得到两个点间的距离为mOldDistance
                mOldDistance = getFingerSpacing(event);
                break;
            case MotionEvent.ACTION_MOVE:
                // 移动时，根据距离是变大还是变小，去放大还是缩小预览画面
                float newDistance = getFingerSpacing(event);
                if (newDistance > mOldDistance) {
                    handleZoom(true);
                } else if (newDistance < mOldDistance) {
                    handleZoom(false);
                }
                // 更新mOldDistance
                mOldDistance = newDistance;
                break;
            default:
                break;
        }
        return super.onTouchEvent(event);
    }

    //返回两指之间的距离
    private static float getFingerSpacing(MotionEvent event) {
        float x = event.getX(0) - event.getX(1);
        float y = event.getY(0) - event.getY(1);
        return (float) Math.sqrt(x * x + y * y);    //求斜边
    }

}