/*  _____ _
 * |_   _| |_  _ _ ___ ___ _ __  __ _
 *   | | | ' \| '_/ -_) -_) '  \/ _` |_
 *   |_| |_||_|_| \___\___|_|_|_\__,_(_)
 *
 * Threema for Android
 * Copyright (c) 2019-2020 Threema GmbH
 *
 * This program is free software: you can redistribute it and/or modify
 * it under the terms of the GNU Affero General Public License, version 3,
 * as published by the Free Software Foundation.
 *
 * This program is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
 * GNU Affero General Public License for more details.
 *
 * You should have received a copy of the GNU Affero General Public License
 * along with this program. If not, see <https://www.gnu.org/licenses/>.
 */

/*
 * Copyright (C) 2008 ZXing authors
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
package ch.threema.app.qrscanner.camera;

import android.content.Context;
import android.graphics.Point;
import android.graphics.Rect;
import android.hardware.Camera;
import android.os.Handler;
import android.util.DisplayMetrics;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.widget.FrameLayout;

import com.google.zxing.PlanarYUVLuminanceSource;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;

import ch.threema.app.qrscanner.camera.open.OpenCamera;
import ch.threema.app.qrscanner.camera.open.OpenCameraInterface;

/**
 * This object wraps the Camera service object and expects to be the only one talking to it. The
 * implementation encapsulates the steps needed to take preview-sized images, which are used for
 * both preview and decoding.
 *
 * @author dswitkin@google.com (Daniel Switkin)
 */

/**
 * @date 2016-11-18 16:17
 * @auther GuoJinyu
 * @description modified
 */
public final class CameraManager {

	private static final Logger logger = LoggerFactory.getLogger(CameraManager.class);
	private static final int MIN_FRAME_WIDTH = 240;
	private static final int MIN_FRAME_HEIGHT = 240;
	//    private static final int MAX_FRAME_WIDTH = 1200; // = 5/8 * 1920
//    private static final int MAX_FRAME_HEIGHT = 675; // = 5/8 * 1080
	private static final int MAX_FRAME_WIDTH = 1920; // = 3/4 * 2560
	private static final int MAX_FRAME_HEIGHT = 1080; // = 3/4 * 1440

	private final CameraConfigurationManager configManager;
	/**
	 * Preview frames are delivered here, which we pass on to the registered handler. Make sure to
	 * clear the handler so it will only receive one message.
	 */
	private final PreviewCallback previewCallback;
	private OpenCamera camera;
	private AutoFocusManager autoFocusManager;
	private Rect framingRect;
	private Rect framingRectInPreview;
	private boolean initialized;
	private boolean previewing;
	private int requestedFramingRectWidth;
	private int requestedFramingRectHeight;
	private boolean needFullScreen;
	private DisplayMetrics displayMetrics;

	public CameraManager(Context context, DisplayMetrics displayMetrics, boolean needExposure, boolean needFullScreen) {
		this.configManager = new CameraConfigurationManager(context, needExposure);
		previewCallback = new PreviewCallback(configManager);
		this.needFullScreen = needFullScreen;
		this.displayMetrics = displayMetrics;
	}

	private static int findDesiredDimensionInRange(int resolution, int hardMin, int hardMax) {
		//int dim = 5 * resolution / 8; // Target 5/8 of each dimension
		int dim = 3 * resolution / 4; // Target 3/4 of each dimension
		if (dim < hardMin) {
			return hardMin;
		}
		if (dim > hardMax) {
			return hardMax;
		}
		return dim;
	}

	/**
	 * Opens the camera driver and initializes the hardware parameters.
	 *
	 * @param holder The surface object which the camera will draw preview frames into.
	 * @throws IOException Indicates the camera driver failed to open.
	 */
	public synchronized void openDriver(SurfaceHolder holder, SurfaceView surfaceView) throws IOException {
		OpenCamera theCamera = camera;
		if (theCamera == null) {
			theCamera = OpenCameraInterface.open(OpenCameraInterface.NO_REQUESTED_CAMERA);
			if (theCamera == null) {
				throw new IOException("Camera.open() failed to return object from driver");
			}
			camera = theCamera;
		}

		if (!initialized) {
			initialized = true;
			configManager.initFromCameraParameters(theCamera);
			if (requestedFramingRectWidth > 0 && requestedFramingRectHeight > 0) {
				setManualFramingRect(requestedFramingRectWidth, requestedFramingRectHeight);
				requestedFramingRectWidth = 0;
				requestedFramingRectHeight = 0;
			}
		}

		Camera cameraObject = theCamera.getCamera();
		Camera.Parameters parameters = cameraObject.getParameters();
		String parametersFlattened = parameters == null ? null : parameters.flatten(); // Save these, temporarily
		try {
			configManager.setDesiredCameraParameters(theCamera, false);
		} catch (RuntimeException re) {
			// Driver failed
			logger.info("Camera rejected parameters. Setting only minimal safe-mode parameters");
			logger.info("Resetting to saved camera params: " + parametersFlattened);
			// Reset:
			if (parametersFlattened != null) {
				parameters = cameraObject.getParameters();
				parameters.unflatten(parametersFlattened);
				try {
					cameraObject.setParameters(parameters);
					configManager.setDesiredCameraParameters(theCamera, true);
				} catch (RuntimeException re2) {
					// Well, darn. Give up
					logger.info("Camera rejected even safe-mode parameters! No configuration");
				}
			}
		}
		if (parameters != null) {
			// adjust surface view to match aspect ratio

			Camera.Size previewSize = cameraObject.getParameters().getPreviewSize();

			boolean rotated = theCamera.getOrientation() == 90 || theCamera.getOrientation() == 270;

			int previewWidth = rotated ? previewSize.height : previewSize.width;
			int previewHeight = rotated ? previewSize.width : previewSize.height;

			int containerWidth = ((FrameLayout) surfaceView.getParent()).getWidth();
			int containerHeight = ((FrameLayout) surfaceView.getParent()).getHeight();

			float aspectRatio = Math.min((float) containerWidth / (float) previewWidth, (float) containerHeight / (float) previewHeight);

			// adjust the bounds of the preview to fully match at least one edge of the container by keeping the original aspect ratio of the camera image
			if (aspectRatio >= 1) {
				previewHeight = Math.round((float) previewHeight * aspectRatio);
				previewWidth = Math.round((float) previewWidth * aspectRatio);
			}

			android.widget.FrameLayout.LayoutParams params = new android.widget.FrameLayout.LayoutParams(previewWidth, previewHeight);
			surfaceView.setLayoutParams(params);
			surfaceView.setX((float) (containerWidth - previewWidth) / 2);
			surfaceView.setY((float) (containerHeight -previewHeight) / 2);
		}
		cameraObject.setPreviewDisplay(holder);
	}

	public synchronized boolean isOpen() {
		return camera != null;
	}

	/**
	 * Closes the camera driver if still in use.
	 */
	public synchronized void closeDriver() {
		if (camera != null) {
			camera.getCamera().release();
			camera = null;
			// Make sure to clear these each time we close the camera, so that any scanning rect
			// requested by intent is forgotten.
			framingRect = null;
			framingRectInPreview = null;
		}
	}

	/**
	 * Asks the camera hardware to begin drawing preview frames to the screen.
	 */
	public synchronized void startPreview() {
		OpenCamera theCamera = camera;
		if (theCamera != null && !previewing) {
			theCamera.getCamera().startPreview();
			previewing = true;
			autoFocusManager = new AutoFocusManager(theCamera.getCamera());
		}
	}

	/**
	 * Tells the camera to stop drawing preview frames.
	 */
	public synchronized void stopPreview() {
		if (autoFocusManager != null) {
			autoFocusManager.stop();
			autoFocusManager = null;
		}
		if (camera != null && previewing) {
			camera.getCamera().stopPreview();
			previewCallback.setHandler(null, 0);
			previewing = false;
		}
	}

	/**
	 * Convenience method for {@link ch.threema.app.qrscanner.activity.CaptureActivity}
	 *
	 * @param newSetting if {@code true}, light should be turned on if currently off. And vice versa.
	 */
	public synchronized void setTorch(boolean newSetting) {
		OpenCamera theCamera = camera;
		if (theCamera != null) {
			if (newSetting != configManager.getTorchState(theCamera.getCamera())) {
				boolean wasAutoFocusManager = autoFocusManager != null;
				if (wasAutoFocusManager) {
					autoFocusManager.stop();
					autoFocusManager = null;
				}
				configManager.setTorch(theCamera.getCamera(), newSetting);
				if (wasAutoFocusManager) {
					autoFocusManager = new AutoFocusManager(theCamera.getCamera());
					autoFocusManager.start();
				}
			}
		}
	}

	/**
	 * A single preview frame will be returned to the handler supplied. The data will arrive as byte[]
	 * in the message.obj field, with width and height encoded as message.arg1 and message.arg2,
	 * respectively.
	 *
	 * @param handler The handler to send the message to.
	 * @param message The what field of the message to be sent.
	 */
	public synchronized void requestPreviewFrame(Handler handler, int message) {
		OpenCamera theCamera = camera;
		if (theCamera != null && previewing) {
			previewCallback.setHandler(handler, message);
			theCamera.getCamera().setOneShotPreviewCallback(previewCallback);
		}
	}

	/**
	 * Calculates the framing rect which the UI should draw to show the user where to place the
	 * barcode. This target helps with alignment as well as forces the user to hold the device
	 * far enough away to ensure the image will be in focus.
	 *
	 * @return The rectangle to draw on screen in window coordinates.
	 */
	public synchronized Rect getFramingRect() {

		if (displayMetrics == null) {
			// Called early, before init even finished
			return null;
		}

		if (framingRect == null){
			int width = findDesiredDimensionInRange(this.displayMetrics.widthPixels, MIN_FRAME_WIDTH, MAX_FRAME_WIDTH);
			int height = findDesiredDimensionInRange(this.displayMetrics.heightPixels, MIN_FRAME_HEIGHT, width);
			int leftOffset = (this.displayMetrics.widthPixels - width) / 2;
			int topOffset = (this.displayMetrics.heightPixels - height) / 2;
			framingRect = new Rect(leftOffset, topOffset, leftOffset + width, topOffset + height);
		}

		return framingRect;
	}


	/**
	 * Like {@link #getFramingRect} but coordinates are in terms of the preview frame,
	 * not UI / screen.
	 *
	 * @return {@link Rect} expressing barcode scan area in terms of the preview size
	 */
	public synchronized Rect getFramingRectInPreview() {
		if (framingRectInPreview == null) {
			Rect framingRect = getFramingRect();
			if (framingRect == null) {
				return null;
			}
			Rect rect = new Rect(framingRect);
			Point cameraResolution = configManager.getCameraResolution();
			Point screenResolution = configManager.getScreenResolution();

			if (cameraResolution == null || screenResolution == null) {
				// Called early, before init even finished
				return null;
			}
			if (screenResolution.x < screenResolution.y) {
				// portrait
				rect.left = rect.left * cameraResolution.y / screenResolution.x;
				rect.right = rect.right * cameraResolution.y / screenResolution.x;
				rect.top = rect.top * cameraResolution.x / screenResolution.y;
				rect.bottom = rect.bottom * cameraResolution.x / screenResolution.y;
			} else {
				// landscape
				rect.left = rect.left * cameraResolution.x / screenResolution.x;
				rect.right = rect.right * cameraResolution.x / screenResolution.x;
				rect.top = rect.top * cameraResolution.y / screenResolution.y;
				rect.bottom = rect.bottom * cameraResolution.y / screenResolution.y;
			}
			framingRectInPreview = rect;

		}

		return framingRectInPreview;
	}

	/**
	 * Allows third party apps to specify the scanning rectangle dimensions, rather than determine
	 * them automatically based on screen resolution.
	 *
	 * @param width  The width in pixels to scan.
	 * @param height The height in pixels to scan.
	 */
	private synchronized void setManualFramingRect(int width, int height) {
		if (initialized) {
			Point screenResolution = configManager.getScreenResolution();
			if (width > screenResolution.x) {
				width = screenResolution.x;
			}
			if (height > screenResolution.y) {
				height = screenResolution.y;
			}
			int leftOffset = (screenResolution.x - width) / 2;
			int topOffset = (screenResolution.y - height) / 2;
			framingRect = new Rect(leftOffset, topOffset, leftOffset + width, topOffset + height);
			//Log.d(TAG, "Calculated manual framing rect: " + framingRect);
			framingRectInPreview = null;
		} else {
			requestedFramingRectWidth = width;
			requestedFramingRectHeight = height;
		}
	}

	/**
	 * A factory method to build the appropriate LuminanceSource object based on the format
	 * of the preview buffers, as described by Camera.Parameters.
	 *
	 * @param data   A preview frame.
	 * @param width  The width of the image.
	 * @param height The height of the image.
	 * @return A PlanarYUVLuminanceSource instance.
	 */
	public PlanarYUVLuminanceSource buildLuminanceSource(byte[] data, int width, int height) {
		if (needFullScreen) {
			return new PlanarYUVLuminanceSource(data, width, height, 0, 0, width, height, false);
		} else {
			Rect rect = getFramingRectInPreview();
			if (rect == null) {
				return null;
			}
			// Go ahead and assume it's YUV rather than die.
			return new PlanarYUVLuminanceSource(data, width, height, rect.left, rect.top,
					rect.width(), rect.height(), false);
		}
	}

}
