//package com.example.emotiondetector;
//
//import android.annotation.SuppressLint;
//import android.content.pm.PackageManager;
//import android.graphics.Bitmap;
//import android.graphics.PointF;
//import android.media.Image;
//import android.os.Bundle;
//import android.util.Log;
//import android.util.Size;
//import android.view.OrientationEventListener;
//import android.view.Surface;
//import android.view.View;
//import android.widget.ImageButton;
//import android.widget.ImageView;
//
//import androidx.annotation.NonNull;
//import androidx.appcompat.app.AppCompatActivity;
//import androidx.camera.core.Camera;
//import androidx.camera.core.CameraSelector;
//import androidx.camera.core.ImageAnalysis;
//import androidx.camera.core.ImageCapture;
//import androidx.camera.core.ImageProxy;
//import androidx.camera.core.Preview;
//import androidx.camera.lifecycle.ProcessCameraProvider;
//import androidx.camera.view.PreviewView;
//import androidx.core.app.ActivityCompat;
//import androidx.core.content.ContextCompat;
//import androidx.lifecycle.LifecycleOwner;
//
//import com.example.emotiondetector.Utils.FaceGraphic;
//import com.example.emotiondetector.Utils.GraphicOverlay;
//import com.google.android.gms.tasks.OnFailureListener;
//import com.google.android.gms.tasks.OnSuccessListener;
//import com.google.android.gms.tasks.Task;
//import com.google.common.util.concurrent.ListenableFuture;
//import com.google.mlkit.vision.common.InputImage;
//import com.google.mlkit.vision.face.Face;
//import com.google.mlkit.vision.face.FaceDetection;
//import com.google.mlkit.vision.face.FaceDetector;
//import com.google.mlkit.vision.face.FaceDetectorOptions;
//import com.google.mlkit.vision.face.FaceLandmark;
//
//import java.util.List;
//import java.util.Locale;
//import java.util.concurrent.ExecutionException;
//import java.util.concurrent.Executor;
//import java.util.concurrent.Executors;
//
//public class FaceRecognition extends AppCompatActivity {
//
//    //mobile_facenet
//    private static final int TF_OD_API_INPUT_SIZE = 112;
//    private static final boolean TF_OD_API_IS_QUANTIZED = false;
//    private static final String TF_OD_API_MODEL_FILE = "";
//
//    private static final String TF_OD_API_LABELS_FILE = "";
//    private Bitmap rgbFrameBitmap = null;
//    private Bitmap croppedBitmap = null;
//    private Bitmap cropCopyBitmap = null;
//
//    private static final DetectorMode MODE = DetectorMode.TF_OD_API;
//    // Minimum detection confidence to track a detection.
//    private static final float MINIMUM_CONFIDENCE_TF_OD_API = 0.5f;
//    private static final boolean MAINTAIN_ASPECT = false;
//
//    private static final Size DESIRED_PREVIEW_SIZE = new Size(640, 480);
//    //private static final int CROP_SIZE = 320;
//    //private static final Size CROP_SIZE = new Size(320, 320);
//
//
//    //CameraX
//    private static final String MANUAL_TESTING_LOG = "manual testing";
//    private Executor executor = Executors.newSingleThreadExecutor();
//    private int REQUEST_CODE_PERMISSIONS = 1001;
//    private final String[] REQUIRED_PERMISSIONS = new String[]{"android.permission.CAMERA", "android.permission.WRITE_EXTERNAL_STORAGE"};
//    private int lensFacing = CameraSelector.LENS_FACING_FRONT;
//    private ListenableFuture<ProcessCameraProvider> cameraProviderFuture;
//    ProcessCameraProvider cameraProvider;
//
//
//
//
//    //activitiy views
//    PreviewView mPreviewView;
//    ImageView captureImage;
//    ImageButton toggle;
//    private GraphicOverlay graphicOverlay;
//
//    @Override
//    protected void onCreate(Bundle savedInstanceState) {
//        super.onCreate(savedInstanceState);
//        setContentView(R.layout.activity_face_recognition);
//
//
//        mPreviewView = findViewById(R.id.view_finder);
//        graphicOverlay = findViewById(R.id.graphic_overlay);
//        toggle = findViewById(R.id.toggle_button);
//
//        toggle.setOnClickListener(new View.OnClickListener() {
//            @Override
//            public void onClick(View v) {
////                lensFacing = if (CameraX.LensFacing.FRONT == lensFacing) {
////                    CameraX.getCameraWithCameraSelector(ca)
////                } else {
////                    CameraX.LensFacing.FRONT
////                }
////                try {
////                    // Only bind use cases if we can query a camera with this orientation
////                    CameraX.getCameraWithLensFacing(lensFacing)
////                    bindCameraUseCases()
////                } catch (exc: Exception) {
////                    // Do nothing
////                }
//            }
//        });
//
//        if(allPermissionsGranted()){
//            startCamera();
//        } else{
//            ActivityCompat.requestPermissions(this, REQUIRED_PERMISSIONS, REQUEST_CODE_PERMISSIONS);
//        }
//
//
//
//    }
//
//    private void startCamera() {
//
//        cameraProviderFuture = ProcessCameraProvider.getInstance(FaceRecognition.this);
//        cameraProviderFuture.addListener(() -> {
//            try {
//                cameraProvider = cameraProviderFuture.get();
//                bindPreview(cameraProvider);
//
//            } catch (ExecutionException | InterruptedException e) {
//                // No errors need to be handled for this Future.
//                // This should never be reached.
//            }
//        }, ContextCompat.getMainExecutor(this));
//    }
//
//    //end of onCreate method
//
//    private void bindPreview(ProcessCameraProvider cameraProvider) {
//        cameraProvider.unbindAll();
//
//        //Size targetResolution = new Size(mPreviewView.getWidth(), mPreviewView.getHeight());
//        ImageAnalysis imageAnalysis =
//                new ImageAnalysis.Builder()
//                        .setTargetResolution(DESIRED_PREVIEW_SIZE)
//                        .setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
//                        .build();
//
//        imageAnalysis.setAnalyzer(executor, new ImageAnalysis.Analyzer() {
//            @SuppressLint("UnsafeExperimentalUsageError")
//            @Override
//            public void analyze(@NonNull ImageProxy imageProxy) {
//                Image mediaImage = imageProxy.getImage();
//                if (mediaImage != null) {
//                    InputImage image = InputImage.fromMediaImage(mediaImage, imageProxy.getImageInfo().getRotationDegrees());
//                    Bitmap bitmap = Bitmap.createBitmap(image.getWidth(), image.getHeight(), Bitmap.Config.ARGB_8888);
//                    //define opts for faceDetector obj
//                    FaceDetectorOptions options =
//                            new FaceDetectorOptions.Builder()
//                                    .setClassificationMode(FaceDetectorOptions.PERFORMANCE_MODE_ACCURATE)
//                                    .setLandmarkMode(FaceDetectorOptions.CLASSIFICATION_MODE_NONE)
//                                    .setClassificationMode(FaceDetectorOptions.CLASSIFICATION_MODE_NONE)
//                                    .enableTracking()
//                                    .build();
//                    //init faceDetector with difiened opts previously
//                    FaceDetector faceDetector = FaceDetection.getClient(options);
//                    Task<List<Face>> task = faceDetector.process(image);
//                    task.addOnSuccessListener(new OnSuccessListener<List<Face>>() {
//                        @Override
//                        public void onSuccess(List<Face> faces) {
//                            //clear overlay on each frame
//                            graphicOverlay.clear();
//                            //call analyzer func to process face
//                            analyzer(faces);
//                            //close face detector
//                            faceDetector.close();
//                        }
//                    }).addOnFailureListener(new OnFailureListener() {
//                        @Override
//                        public void onFailure(@NonNull Exception e) {
//                            //Toast.makeText(CatchSmile.this, "Error: " + e.getMessage(), Toast.LENGTH_LONG);
//                        }
//                    }).addOnCompleteListener(results -> imageProxy.close());
//                }
//            }
//        });
//
//
//       Preview preview = new Preview.Builder()
//                .build();
//
//        CameraSelector cameraSelector = new CameraSelector.Builder()
//                .requireLensFacing(lensFacing)
//                .build();
//
//        preview.setSurfaceProvider(mPreviewView.getSurfaceProvider());
//
//
//
//        //orientation
//        ImageCapture imageCapture = new ImageCapture.Builder().build();
//
//        OrientationEventListener orientationEventListener = new OrientationEventListener(FaceRecognition.this) {
//            @Override
//            public void onOrientationChanged(int orientation) {
//                int rotation;
//
//                // Monitors orientation values to determine the target rotation value
//                if (orientation >= 45 && orientation < 135) {
//                    rotation = Surface.ROTATION_270;
//                } else if (orientation >= 135 && orientation < 225) {
//                    rotation = Surface.ROTATION_180;
//                } else if (orientation >= 225 && orientation < 315) {
//                    rotation = Surface.ROTATION_90;
//                } else {
//                    rotation = Surface.ROTATION_0;
//                }
//
//                imageCapture.setTargetRotation(rotation);
//            }
//        };
//
//        orientationEventListener.enable();
//
//
//        Camera camera = cameraProvider.bindToLifecycle((LifecycleOwner)this, cameraSelector,imageAnalysis, preview, imageCapture);
//    }
//
////    private void analyzer(){
////        try {
////            MobileFaceNet model = MobileFaceNet.newInstance(FaceRecognition.this);
////
////            // Creates inputs for reference.
////            TensorBuffer inputFeature0 = TensorBuffer.createFixedSize(new int[]{1, 112, 112, 3}, DataType.FLOAT32);
////            //inputFeature0.loadBuffer(byteBuffer);
////
////            // Runs model inference and gets result.
////            MobileFaceNet.Outputs outputs = model.process(inputFeature0);
////            TensorBuffer outputFeature0 = outputs.getOutputFeature0AsTensorBuffer();
////
////            // Releases model resources if no longer used.
////            model.close();
////        } catch (IOException e) {
////        }
////    }
//
//
//
//    //processFace
//    private void analyzer(List<Face> faces) {
//
//        for(Face face : faces){
//            Log.i("box size", String.valueOf(face.getBoundingBox()));
//            graphicOverlay.add(new FaceGraphic(graphicOverlay, face));
//
//        }
//    }
//
//
//
//    private static void logExtrasForTesting(Face face) {
//
//        if (face != null) {
//            Log.v(MANUAL_TESTING_LOG, "face bounding box: " + face.getBoundingBox().flattenToString());
//            Log.v(MANUAL_TESTING_LOG, "face Euler Angle X: " + face.getHeadEulerAngleX());
//            Log.v(MANUAL_TESTING_LOG, "face Euler Angle Y: " + face.getHeadEulerAngleY());
//            Log.v(MANUAL_TESTING_LOG, "face Euler Angle Z: " + face.getHeadEulerAngleZ());
//
//            // All landmarks
//            int[] landMarkTypes =
//                    new int[] {
//                            FaceLandmark.MOUTH_BOTTOM,
//                            FaceLandmark.MOUTH_RIGHT,
//                            FaceLandmark.MOUTH_LEFT,
//                            FaceLandmark.RIGHT_EYE,
//                            FaceLandmark.LEFT_EYE,
//                            FaceLandmark.RIGHT_EAR,
//                            FaceLandmark.LEFT_EAR,
//                            FaceLandmark.RIGHT_CHEEK,
//                            FaceLandmark.LEFT_CHEEK,
//                            FaceLandmark.NOSE_BASE
//                    };
//            String[] landMarkTypesStrings =
//                    new String[] {
//                            "MOUTH_BOTTOM",
//                            "MOUTH_RIGHT",
//                            "MOUTH_LEFT",
//                            "RIGHT_EYE",
//                            "LEFT_EYE",
//                            "RIGHT_EAR",
//                            "LEFT_EAR",
//                            "RIGHT_CHEEK",
//                            "LEFT_CHEEK",
//                            "NOSE_BASE"
//                    };
//            for (int i = 0; i < landMarkTypes.length; i++) {
//                FaceLandmark landmark = face.getLandmark(landMarkTypes[i]);
//                if (landmark == null) {
//                    Log.v(
//                            MANUAL_TESTING_LOG,
//                            "No landmark of type: " + landMarkTypesStrings[i] + " has been detected");
//                } else {
//                    PointF landmarkPosition = landmark.getPosition();
//                    String landmarkPositionStr =
//                            String.format(Locale.US, "x: %f , y: %f", landmarkPosition.x, landmarkPosition.y);
//                    Log.v(
//                            MANUAL_TESTING_LOG,
//                            "Position for face landmark: "
//                                    + landMarkTypesStrings[i]
//                                    + " is :"
//                                    + landmarkPositionStr);
//                }
//            }
//            Log.v(
//                    MANUAL_TESTING_LOG,
//                    "face left eye open probability: " + face.getLeftEyeOpenProbability());
//            Log.v(
//                    MANUAL_TESTING_LOG,
//                    "face right eye open probability: " + face.getRightEyeOpenProbability());
//            Log.v(MANUAL_TESTING_LOG, "face smiling probability: " + face.getSmilingProbability());
//            Log.v(MANUAL_TESTING_LOG, "face tracking id: " + face.getTrackingId());
//        }
//    }
//
//    private boolean allPermissionsGranted(){
//        //check if req permissions have been granted
//        for(String permission : REQUIRED_PERMISSIONS){
//            if(ContextCompat.checkSelfPermission(this, permission) != PackageManager.PERMISSION_GRANTED){
//                return false;
//            }
//        }
//        return true;
//    }
//
//    // Which detection model to use: by default uses Tensorflow Object Detection API frozen
//    // checkpoints.
//    private enum DetectorMode {
//        TF_OD_API;
//    }
//
//
//}
