package com.example.emotiondetector.Utils;

/*
 * Copyright 2020 Google LLC. All rights reserved.
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

/**
 * Abstract base class for ML Kit frame processors. Subclasses need to implement  to define what they want to with the detection
 * results and {@link #detectInImage(VisionImage)} to specify the detector object.
 *
 * @param <T> The type of the detected feature.
 */

import android.app.ActivityManager;
import android.content.Context;
import android.graphics.Bitmap;
import android.os.SystemClock;
import android.util.Log;
import android.widget.Toast;

import androidx.annotation.GuardedBy;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;

import com.example.emotiondetector.interfaces.VisionImageProcessor;
import com.google.android.gms.tasks.Task;
import com.google.mlkit.vision.common.InputImage;

import java.nio.ByteBuffer;
import java.util.Timer;
import java.util.TimerTask;

///**
// * Abstract base class for ML Kit frame processors. Subclasses need to implement {@link
// * #onSuccess(T, FrameMetadata, GraphicOverlay)} to define what they want to with the detection
// * results and {@link #detectInImage(VisionImage)} to specify the detector object.
// *
// * @param <T> The type of the detected feature.
// */
public abstract class VisionProcessorBase<T> implements VisionImageProcessor {

    protected static final String MANUAL_TESTING_LOG = "LogTagForTest";
    private static final String TAG = "VisionProcessorBase";


    private ActivityManager activityManager;
    private final Timer fpsTimer = new Timer();


    // Whether this processor is already shut down
    private boolean isShutdown;

    // Used to calculate latency, running in the same thread, no sync needed.
    private int numRuns = 0;
    private long totalRunMs = 0;
    private long maxRunMs = 0;
    private long minRunMs = Long.MAX_VALUE;

    // Frame count that have been processed so far in an one second interval to calculate FPS.
    private int frameProcessedInOneSecondInterval = 0;
    private int framesPerSecond = 0;

    // To keep the latest images and its metadata.
    @GuardedBy("this")
    private ByteBuffer latestImage;

    @GuardedBy("this")
    private FrameMetadata latestImageMetaData;
    // To keep the images and metadata in process.
    @GuardedBy("this")
    private ByteBuffer processingImage;

    @GuardedBy("this")
    private FrameMetadata processingMetaData;

    VisionProcessorBase(Context context) {
        activityManager = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
        fpsTimer.scheduleAtFixedRate(
                new TimerTask() {
                    @Override
                    public void run() {
                            framesPerSecond = frameProcessedInOneSecondInterval;
                            frameProcessedInOneSecondInterval = 0;
                        }
                    },
                    /* delay= */ 0,
                    /* period= */ 1000);
    }




    // -----------------Code for processing single still image----------------------------------------
    @Override
    public void processBitmap(Bitmap bitmap, final GraphicOverlay graphicOverlay) {
        requestDetectInImage(
                InputImage.fromBitmap(bitmap, 0),
                graphicOverlay,
                /* originalCameraImage= */ null,
                /* shouldShowFps= */ false);
    }


    // -----------------Common processing logic-------------------------------------------------------
    private Task<T> requestDetectInImage( final InputImage image,
                                          final GraphicOverlay graphicOverlay,
                                          @Nullable final Bitmap originalCameraImage,
                                          boolean shouldShowFps) {
        //method started
        final long startMs = SystemClock.elapsedRealtime();
        return detectInImage(image)
                .addOnSuccessListener(

                        results -> {
                            long currentLatencyMs = SystemClock.elapsedRealtime() - startMs;
                            numRuns++;
                            frameProcessedInOneSecondInterval++;
                            totalRunMs += currentLatencyMs;
                            maxRunMs = Math.max(currentLatencyMs, maxRunMs);
                            minRunMs = Math.min(currentLatencyMs, minRunMs);

                            // Only log inference info once per second. When frameProcessedInOneSecondInterval is
                            // equal to 1, it means this is the first frame processed during the current second.
                            if (frameProcessedInOneSecondInterval == 1) {
                                Log.d(TAG, "Max latency is: " + maxRunMs);
                                Log.d(TAG, "Min latency is: " + minRunMs);
                                Log.d(TAG, "Num of Runs: " + numRuns + ", Avg latency is: " + totalRunMs / numRuns);
                                ActivityManager.MemoryInfo mi = new ActivityManager.MemoryInfo();
                                activityManager.getMemoryInfo(mi);
                                long availableMegs = mi.availMem / 0x100000L;
                                Log.d(TAG, "Memory available in system: " + availableMegs + " MB");
                            }

                            graphicOverlay.clear();
                            if (originalCameraImage != null) {
                                graphicOverlay.add(new CameraImageGraphic(graphicOverlay, originalCameraImage));
                            }
                            graphicOverlay.add(
                                    new InferenceInfoGraphic(
                                            graphicOverlay, currentLatencyMs, shouldShowFps ? framesPerSecond : null));
                            VisionProcessorBase.this.onSuccess(results, graphicOverlay);
                            graphicOverlay.postInvalidate();
                        })
                .addOnFailureListener(
                        e -> {
                            graphicOverlay.clear();
                            graphicOverlay.postInvalidate();
                            String error = "Failed to process. Error: " + e.getLocalizedMessage();
                            Toast.makeText(
                                    graphicOverlay.getContext(),
                                    error + "\nCause: " + e.getCause(),
                                    Toast.LENGTH_SHORT)
                                    .show();
                            Log.d(TAG, error);
                            e.printStackTrace();
                            VisionProcessorBase.this.onFailure(e);
                        });
    }

    @Override
    public void stop() {
        isShutdown = true;
        numRuns = 0;
        totalRunMs = 0;
        fpsTimer.cancel();
    }

    protected abstract Task<T> detectInImage(InputImage image);

    protected abstract void onSuccess(@NonNull T results, @NonNull GraphicOverlay graphicOverlay);

    protected abstract void onFailure(@NonNull Exception e);

}
