package com.app.facerecognition;

import android.Manifest;
import android.annotation.SuppressLint;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.RectF;
import android.media.Image;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.text.Html;
import android.text.InputType;
import android.util.Log;
import android.util.Pair;
import android.widget.EditText;
import android.widget.Toast;

import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.appcompat.app.AlertDialog;
import androidx.appcompat.app.AppCompatActivity;
import androidx.camera.core.CameraSelector;
import androidx.camera.core.ImageAnalysis;
import androidx.camera.core.Preview;
import androidx.camera.lifecycle.ProcessCameraProvider;
import androidx.core.content.ContextCompat;
import androidx.databinding.DataBindingUtil;
import androidx.lifecycle.LifecycleOwner;
import androidx.lifecycle.ViewModelProvider;

import com.app.facerecognition.callBack.MainCallBack;
import com.app.facerecognition.classifier.SimilarityClassifier;
import com.app.facerecognition.databinding.ActivityMainBinding;
import com.app.facerecognition.dto.EditUserRes;
import com.app.facerecognition.dto.UserInfo;
import com.app.facerecognition.dto.UserListResponse;
import com.app.facerecognition.utils.BitmapUtils;
import com.app.facerecognition.utils.ImageRecognitionUtils;
import com.app.facerecognition.viewModel.MainViewModel;
import com.bumptech.glide.Glide;
import com.bumptech.glide.request.target.SimpleTarget;
import com.bumptech.glide.request.transition.Transition;
import com.google.android.gms.tasks.OnCompleteListener;
import com.google.android.gms.tasks.OnFailureListener;
import com.google.android.gms.tasks.OnSuccessListener;
import com.google.android.gms.tasks.Task;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.gson.Gson;
import com.google.mlkit.vision.common.InputImage;
import com.google.mlkit.vision.face.Face;
import com.google.mlkit.vision.face.FaceDetector;

import org.tensorflow.lite.Interpreter;

import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;

import okhttp3.Call;
import okhttp3.Callback;
import okhttp3.HttpUrl;
import okhttp3.MediaType;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.RequestBody;
import okhttp3.Response;


public class MainActivity extends AppCompatActivity implements MainCallBack {

    private static final String TAG = "MainActivity";

    private ActivityMainBinding binding;
    private MainViewModel viewModel;
    private Interpreter tfLite;
    private FaceDetector detector;

    private ListenableFuture<ProcessCameraProvider> cameraProviderFuture;
    private ProcessCameraProvider cameraProvider;
    private int cam_face = CameraSelector.LENS_FACING_BACK;
    private static final int MY_CAMERA_REQUEST_CODE = 100;

    private HashMap<String, SimilarityClassifier.Recognition> registered = new HashMap<>();
    private boolean flipX = false;
    private boolean start = true;

    private String apiRootPath="http://work.ii999.live:19002/";

    private float[][] embeddings;

    private OkHttpClient client = new OkHttpClient();
    private Gson gson = new Gson();


    @RequiresApi(api = Build.VERSION_CODES.M)
    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        binding = DataBindingUtil.setContentView(this, R.layout.activity_main);
        Objects.requireNonNull(getSupportActionBar()).setTitle(Html.fromHtml("<font color='#FB4E00'>Face Recognition App</font>"));
        viewModel = new ViewModelProvider(this).get(MainViewModel.class);
        viewModel.init(this);
        binding.setContract(this);
        binding.setViewModel(viewModel);

        checkCameraPermission();
        registered = viewModel.readFromSP();
        try {
            tfLite = viewModel.getModel(MainActivity.this);
        }catch (IOException e){
            e.printStackTrace();
        }

        detector = viewModel.getDetector();
        cameraBind();


    }


    @SuppressLint("HandlerLeak")
    private Handler mHandler = new Handler(){
        @Override
        public void handleMessage(Message msg) {
            switch (msg.what){
                case 1:
                    Log.d("handleMessage","收到网络请求返回的信息");
                    UserInfo user = new Gson().fromJson((String) msg.obj, UserInfo.class);
                    Log.d("handleMessage",user.toString());
            }
        }
    };

    @Override
    public void onActionClick() {
        AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this);
        builder.setTitle("Select Action:");
        String[] names= {"View Recognition List","Save Recognitions","Load Recognitions","Clear All Recognitions","请求远程数据","获取所有人脸特征码"};

        builder.setItems(names, (dialog, which) -> {
            switch (which) {
                case 0:
                    displayNameListView();
                    break;
                case 1:
                    viewModel.insertToSP(registered,false, viewModel.readFromSP());
                    break;
                case 2:
                    registered.putAll(viewModel.readFromSP());
                    break;
                case 3:
                    clearNameList();
                    break;
                case 4:
                    Log.d("onActionClick","开始请求远程数据");
                    fetchUserList();
                    break;
                case 5:
                    registerAllUser();
                    break;
            }
        });

        AlertDialog dialog = builder.create();
        dialog.show();
    }

    @Override
    public void onCameraSwitchClick() {
        if(cam_face == CameraSelector.LENS_FACING_BACK){
            cam_face = CameraSelector.LENS_FACING_FRONT;
            flipX = true;
        }else{
            cam_face = CameraSelector.LENS_FACING_BACK;
            flipX = false;
        }
        cameraProvider.unbindAll();
        cameraBind();
    }

    @SuppressLint("SetTextI18n")
    @Override
    public void onRecognizeClick() {
        if(binding.recognize.getText().toString().equals("Recognize")){
            start = true;
            viewModel.view_call(1);
        }else{
            viewModel.view_call(2);
        }
    }

    @Override
    public void onAddFaceClick() {
        addFace();
    }


    int hasFeature = 1; // 1 for users with feature code
    int pageNo = 1; // Replace with the actual page number
    int pageSize = 100; // Replace with the desired page size

    private void registerAllUser() {


        String apiUrl = apiRootPath + "znzq/onguard/face/info/list";
        // Build the URL with parameters
        HttpUrl.Builder urlBuilder = HttpUrl.parse(apiUrl).newBuilder();
        urlBuilder.addQueryParameter("hasFeature", String.valueOf(hasFeature));
        urlBuilder.addQueryParameter("pageNo", String.valueOf(pageNo));
        urlBuilder.addQueryParameter("pageSize", String.valueOf(pageSize));
        String url = urlBuilder.build().toString();

        // Build the request
        Request request = new Request.Builder()
                .url(url)
                .get()
                .build();

        client.newCall(request).enqueue(new Callback() {
            @Override
            public void onFailure(@NonNull Call call, @NonNull IOException e) {
                e.printStackTrace();
            }

            @Override
            public void onResponse(@NonNull Call call, @NonNull Response response) throws IOException {
                if (response.isSuccessful()) {
                    String bodyString = response.body().string();
                    Log.i(TAG, "Response Body: " + bodyString);
                    UserListResponse userListResponse = gson.fromJson(bodyString, UserListResponse.class);
                    Log.i(TAG, "onResponse: userListResponse = " + userListResponse);


                    if(userListResponse.isSuccess()) {
                        addServerRecordFace(userListResponse.getResult().getRecords());
                        if(userListResponse.getResult().getRecords().size() == pageSize) {
                            // 请求下一页
                            pageNo++;
                            registerAllUser();
                        }
                    }

                } else {
                    Log.i(TAG, "Error: " + response.code() + " " + response.message());
                }
            }
        });

    }

    private void addServerRecordFace(List<UserInfo.UserInfoResult> records) {

        for (int i = 0; i < records.size(); i++) {
            UserInfo.UserInfoResult userInfoResult = records.get(i);
            SimilarityClassifier.Recognition result = new SimilarityClassifier.Recognition(
                    "0", "", -1f);

            float[][] extra = new float[1][];

            extra[0] = stringToFloatArray(userInfoResult.getFeatureCode());

            result.setExtra(extra);
            Log.i(TAG, "addServerRecordFace: id = " + userInfoResult.getId());
            String imageUrl = "http://webgl.jszy.xyz/face_img/data/" + userInfoResult.getFacePicurl();
            Log.i(TAG, "addServerRecordFace: imageUrl = " + imageUrl);
            registered.put(userInfoResult.getFaceCardId(),result);
        }
    }

    private void fetchUserList(){

        String url = apiRootPath + "znzq/onguard/face/info/getRandomInfo";

        Request request = new Request.Builder()
                .url(url)
                .build();

        client.newCall(request).enqueue(new Callback() {
            @Override
            public void onFailure(@NonNull Call call, @NonNull IOException e) {
                e.printStackTrace();
            }

            @Override
            public void onResponse(@NonNull Call call, @NonNull Response response) throws IOException {
                if (response.isSuccessful()) {
                    String bodyString = response.body().string();
                    Log.i(TAG, "Response Body: " + bodyString);
                    UserInfo userInfo = gson.fromJson(bodyString, UserInfo.class);
                    downloadUserImageAndRecognize(userInfo);
                } else {
                    Log.i(TAG, "Error: " + response.code() + " " + response.message());
                }
            }
        });

    }


    private void downloadUserImageAndRecognize(UserInfo userInfo) {
        Log.i(TAG, "onResponse: userInfo = " + userInfo.getResult().getFacePicurl());

        String imageUrl = "http://webgl.jszy.xyz/face_img/data/"+userInfo.getResult().getFacePicurl();

        Log.i(TAG, "updateUserFeatureCode: imageUrl = " + imageUrl);

        Glide.with(MainActivity.this)
                .asBitmap()
                .load(imageUrl)
                .into(new SimpleTarget<Bitmap>() {
                    @Override
                    public void onResourceReady(@NonNull Bitmap resource, @Nullable @org.jetbrains.annotations.Nullable Transition<? super Bitmap> transition) {
                        // Bitmap scaled = BitmapUtils.getResizedBitmap(resource, 112, 112);
                        detectBitmap(userInfo, resource);
                    }
                });
    }

    private void updateUserFeatureCode(UserInfo userInfo, String featureCode) {

        // API endpoint
        String apiUrl = apiRootPath +  "znzq/onguard/face/info/edit";

        Map<String,Object> params = new HashMap<>();
        params.put("id",userInfo.getResult().getId());

        if(featureCode != null && featureCode.length() > 0) {
            params.put("featureCode",featureCode);
            params.put("hasFeature",1); // 1 for success, 2 for failure
        } else {
            params.put("featureCode","");
            params.put("hasFeature",2); // 1 for success, 2 for failure
        }


        String jsonRequestBody = gson.toJson(params);

        Log.i(TAG, "updateUserFeatureCode: jsonRequestBody = " + jsonRequestBody);

        // Build the request
        Request request = new Request.Builder()
                .url(apiUrl)
                .put(RequestBody.create(MediaType.parse("application/json"), jsonRequestBody))
                .build();

        client.newCall(request).enqueue(new Callback() {
            @Override
            public void onFailure(@NonNull Call call, @NonNull IOException e) {
                e.printStackTrace();
                // 不管成功还是失败 都去获取下一个用户
                fetchUserList();
            }

            @Override
            public void onResponse(@NonNull Call call, @NonNull Response response) throws IOException {
                if (response.isSuccessful()) {
                    String bodyString = response.body().string();
                    Log.i(TAG, "Response Body: " + bodyString);
                    // {"success":true,"message":"操作成功！","code":200,"result":"编辑成功!","timestamp":1700321301973}
                    EditUserRes editUserRes = gson.fromJson(bodyString, EditUserRes.class);
                    // 不管成功还是失败 都去获取下一个用户
                    fetchUserList();
                } else {
                    Log.i(TAG, "Error: " + response.code() + " " + response.message());
                    // 不管成功还是失败 都去获取下一个用户
                    fetchUserList();
                }
            }
        });

    }

    private void cameraBind(){
        cameraProviderFuture = ProcessCameraProvider.getInstance(this);
        cameraProviderFuture.addListener(() -> {
            try {
                cameraProvider = cameraProviderFuture.get();
                bindPreview(cameraProvider);
            } catch (ExecutionException | InterruptedException ignored) {

            }
        }, ContextCompat.getMainExecutor(this));
    }

    private void bindPreview(@NonNull ProcessCameraProvider cameraProvider){
        Preview preview = new Preview.Builder().build();
        CameraSelector cameraSelector = new CameraSelector.Builder()
                .requireLensFacing(cam_face)
                .build();

        ImageAnalysis imageAnalysis = ImageRecognitionUtils.getImageAnalysis(preview, binding);
        Executor executor = Executors.newSingleThreadExecutor();

        imageAnalysis.setAnalyzer(executor, imageProxy -> {
            InputImage image = null;
            @SuppressLint("UnsafeExperimentalUsageError")
            Image mediaImage = imageProxy.getImage();

            if(mediaImage != null){
                image = InputImage.fromMediaImage(mediaImage, imageProxy.getImageInfo().getRotationDegrees());
            }

            assert image != null;
            @SuppressLint("SetTextI18n") Task<List<Face>> result =
                    detector.process(image).addOnSuccessListener(faces -> {
                        if(faces.size() != 0){
                            Face face = faces.get(0);
                            Bitmap frame_bmp = BitmapUtils.toBitmap(mediaImage);
                            int rot = imageProxy.getImageInfo().getRotationDegrees();
                            Bitmap frame_bmp1 = BitmapUtils.rotateBitmap(frame_bmp, rot, false, false);

                            RectF boundingBox = new RectF(face.getBoundingBox());
                            Bitmap cropped_face = BitmapUtils.getCropBitmapByCPU(frame_bmp1, boundingBox);

                            if(flipX){
                                cropped_face = BitmapUtils.rotateBitmap(cropped_face, 0, true, false);
                            }
                            Bitmap scaled = BitmapUtils.getResizedBitmap(cropped_face, 112, 112);
                            if(start){
                                recognizeImage(scaled, binding, tfLite, registered);
                            }
                            try{
                                Thread.sleep(10);
                            }catch (InterruptedException e){
                                e.printStackTrace();
                            }
                        }else{
                            if(registered.isEmpty()){
                                viewModel.getRecoNameText().set("Add Face");
                            }else{
                                viewModel.getRecoNameText().set("No Face Detected");
                            }
                        }
                    })
                    .addOnCompleteListener(task -> {
                        imageProxy.close();
                    });
        });
        cameraProvider.bindToLifecycle((LifecycleOwner) this, cameraSelector, imageAnalysis, preview);
    }

    public void detectBitmap(UserInfo userInfo, final Bitmap bitmap) {

        InputImage inputImage = InputImage.fromBitmap(bitmap, 0);

        detector.process(inputImage).addOnSuccessListener(new OnSuccessListener<List<Face>>() {
            @Override
            public void onSuccess(List<Face> faces) {
                Log.i(TAG, "detector.process onSuccess: faces.size = " + faces.size());
                if(faces.size() != 0){
                    Face face = faces.get(0);


                    RectF boundingBox = new RectF(face.getBoundingBox());
                    Bitmap cropped_face = BitmapUtils.getCropBitmapByCPU(bitmap, boundingBox);


                    Bitmap scaled = BitmapUtils.getResizedBitmap(cropped_face, 112, 112);


                    binding.facePreview.setImageBitmap(scaled);

                    String recognizeResult = recognizeBitmap(scaled);
                    updateUserFeatureCode(userInfo, recognizeResult);

                } else {
                    // 没有识别到人脸
                    updateUserFeatureCode(userInfo, "");
                }
            }
        }).addOnFailureListener(new OnFailureListener() {
            @Override
            public void onFailure(@NonNull Exception e) {
                Log.i(TAG, "detector.process onFailure: ");
                e.printStackTrace();
            }
        }).addOnCompleteListener(new OnCompleteListener<List<Face>>() {
            @Override
            public void onComplete(@NonNull Task<List<Face>> task) {
                Log.i(TAG, "detector.process onComplete: ");
            }
        });





    }

    public String recognizeBitmap(final Bitmap bitmap) {
        int inputSize = 112;
        int OUTPUT_SIZE = 192;
        float IMAGE_MEAN = 128.0f;
        float IMAGE_STD = 128.0f;

        ByteBuffer imgData = ImageRecognitionUtils.getImgData(inputSize, bitmap, IMAGE_MEAN, IMAGE_STD);
        Object[] inputArray = {imgData};
        Map<Integer, Object> outputMap = new HashMap<>();
        float[][] floatArray = new float[1][OUTPUT_SIZE];

        outputMap.put(0, floatArray);
        tfLite.runForMultipleInputsOutputs(inputArray, outputMap);

        String result = floatArrayToString(floatArray[0]);

        Log.i(TAG, "recognizeBitmap: " + result);

        return result;
    }



    private String floatArrayToString(float[] floatArray) {
        StringBuilder stringBuilder = new StringBuilder();
        for (float value : floatArray) {
            stringBuilder.append(value).append(",");
        }
        // Remove the trailing comma
        stringBuilder.deleteCharAt(stringBuilder.length() - 1);
        return stringBuilder.toString();
    }

    // Method to convert string back to float array
    private float[] stringToFloatArray(String arrayAsString) {
        String[] stringValues = arrayAsString.split(",");
        float[] floatArray = new float[stringValues.length];
        for (int i = 0; i < stringValues.length; i++) {
            floatArray[i] = Float.parseFloat(stringValues[i]);
        }
        return floatArray;
    }

    public void recognizeImage(final Bitmap bitmap, ActivityMainBinding binding, Interpreter tfLite, HashMap<String, SimilarityClassifier.Recognition> registered){
        int inputSize = 112;
        int OUTPUT_SIZE = 192;
        float IMAGE_MEAN = 128.0f;
        float IMAGE_STD = 128.0f;

        binding.facePreview.setImageBitmap(bitmap);
        ByteBuffer imgData = ImageRecognitionUtils.getImgData(inputSize, bitmap, IMAGE_MEAN, IMAGE_STD);
        Object[] inputArray = {imgData};
        Map<Integer, Object> outputMap = new HashMap<>();
        embeddings = new float[1][OUTPUT_SIZE];

        outputMap.put(0, embeddings);
        tfLite.runForMultipleInputsOutputs(inputArray, outputMap);

        float distance;

        if(registered.size() > 0){
            final Pair<String, Float> nearest = ImageRecognitionUtils.findNearest(embeddings[0], registered);
            if(nearest != null){
                final String name = nearest.first;
                distance = nearest.second;
                if(distance < 1.000f){
                    viewModel.getRecoNameText().set(name);
                }else{
                    viewModel.getRecoNameText().set("Unknown");
                }
            }
        }
    }

    private void addFace(){
        start=false;
        AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this);
        builder.setTitle("Enter Name");
        final EditText input = new EditText(MainActivity.this);
        input.setInputType(InputType.TYPE_CLASS_TEXT );
        builder.setView(input);

        builder.setPositiveButton("ADD", (dialog, which) -> {
            SimilarityClassifier.Recognition result = new SimilarityClassifier.Recognition(
                    "0", "", -1f);
            result.setExtra(embeddings);
            registered.put( input.getText().toString(),result);
            start=true;

        });
        builder.setNegativeButton("Cancel", (dialog, which) -> {
            start=true;
            dialog.cancel();
        });
        builder.show();
    }

    @RequiresApi(api = Build.VERSION_CODES.M)
    private void checkCameraPermission() {
        if (checkSelfPermission(Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
            requestPermissions(new String[]{Manifest.permission.CAMERA}, MY_CAMERA_REQUEST_CODE);
        }
    }

    private  void clearNameList() {
        AlertDialog.Builder builder =new AlertDialog.Builder(MainActivity.this);
        builder.setTitle("Do you want to delete all Recognitions?");
        builder.setPositiveButton("Delete All", (dialog, which) -> {
            registered.clear();
            Toast.makeText(MainActivity.this, "Recognitions Cleared", Toast.LENGTH_SHORT).show();
        });
        viewModel.insertToSP(registered,true, viewModel.readFromSP());
        builder.setNegativeButton("Cancel",null);
        AlertDialog dialog = builder.create();
        dialog.show();
    }

    private void displayNameListView() {
        AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this);
        if(registered.isEmpty())
            builder.setTitle("No Faces Added!!");
        else
            builder.setTitle("Recognitions:");
        String[] names= new String[registered.size()];
        boolean[] checkedItems = new boolean[registered.size()];
        int i=0;
        for (Map.Entry<String, SimilarityClassifier.Recognition> entry : registered.entrySet()) {
            names[i]=entry.getKey();
            checkedItems[i]=false;
            i=i+1;
        }
        builder.setItems(names,null);
        AlertDialog dialog = builder.create();
        dialog.show();
    }


    @Override
    public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
        super.onRequestPermissionsResult(requestCode, permissions, grantResults);
        if (requestCode == MY_CAMERA_REQUEST_CODE) {
            if (grantResults[0] == PackageManager.PERMISSION_GRANTED) {
                Toast.makeText(this, "Camera permission granted", Toast.LENGTH_LONG).show();
            } else {
                Toast.makeText(this, "Camera permission denied", Toast.LENGTH_LONG).show();
            }
        }
    }





}