/*
 * Copyright (C) 2008 ZXing authors
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package com.zxing.camera;

import com.zxing.slice.CodeUtils;
import com.zxing.utils.Log;
import com.zxing.view.ViewfinderView;
import ohos.agp.components.surfaceprovider.SurfaceProvider;
import ohos.agp.graphics.Surface;
import ohos.agp.utils.Point;
import ohos.agp.utils.Rect;
import ohos.app.Context;
import ohos.app.Environment;
import ohos.eventhandler.EventHandler;
import ohos.media.camera.CameraKit;
import ohos.media.camera.device.Camera;
import ohos.media.camera.device.CameraConfig;
import ohos.media.camera.device.CameraInfo;
import ohos.media.camera.device.CameraStateCallback;
import ohos.media.camera.device.FrameConfig;
import ohos.media.image.Image;
import ohos.media.image.ImageReceiver;
import ohos.media.image.ImageSource;
import ohos.media.image.PixelMap;
import ohos.media.image.common.ImageFormat;
import ohos.media.image.common.Size;

import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.List;
import java.util.UUID;

import static ohos.media.camera.device.Camera.FrameConfigType.FRAME_CONFIG_PREVIEW;
import static ohos.media.camera.params.Metadata.FlashMode.FLASH_ALWAYS_OPEN;
import static ohos.media.camera.params.Metadata.FlashMode.FLASH_CLOSE;
import static ohos.media.camera.params.Metadata.FlashMode.FLASH_OPEN;


/**
 * This object wraps the Camera service object and expects to be the only one talking to it. The
 * implementation encapsulates the steps needed to take preview-sized images, which are used for
 * both preview and decoding.
 */
public final class CameraManager implements ImageReceiver.IImageArrivalListener {

    private static final String TAG = CameraManager.class.getSimpleName();

    private static final int SCREEN_WIDTH = 2340;
    private static final int SCREEN_HEIGHT = 1080;
    private static final int IMAGE_RCV_CAPACITY = 5;

    private static final int MIN_FRAME_WIDTH = 240;
    private static final int MIN_FRAME_HEIGHT = 240;
    private static final int MAX_FRAME_WIDTH = 480;
    private static final int MAX_FRAME_HEIGHT = 360;
    private FrameConfig.Builder framePreviewConfigBuilder;
    private ImageReceiver imageReceiver;
    private Camera cameraDevice;
    private SurfaceProvider surfaceProvider = null;

    private static CameraManager cameraManager;

    private final Context context;
    private final CameraConfigurationManager configManager;
    private Camera camera;
    private Surface previewSurface;
    private Rect framingRect;
    private Rect framingRectInPreview;
    private boolean initialized;
    private boolean previewing;
    private final boolean useOneShotPreviewCallback;
    private boolean isAnalyze = false;
    private int time = 0;
    private ViewfinderView viewfinderView;

    private static final String IMG_FILE_PREFIX = "IMG_";

    private static final String IMG_FILE_TYPE = ".jpg";

    /**
     * Preview frames are delivered here, which we pass on to the registered handler. Make sure to
     * clear the handler so it will only receive one message.
     */
    private final PreviewCallback previewCallback;
    /**
     * Autofocus callbacks arrive here, and are dispatched to the Handler which requested them.
     */
    private final AutoFocusCallback autoFocusCallback;
    private ResultCallback resultCallback;

    /**
     * Initializes this static object with the Context of the calling Activity.
     *
     * @param context The Activity which wants to use the camera.
     */
    public static void init(Context context) {
        if (cameraManager == null) {
            cameraManager = new CameraManager(context);
        }
    }

    /**
     * Gets the CameraManager singleton instance.
     *
     * @return A reference to the CameraManager singleton.
     */
    public static CameraManager get() {
        return cameraManager;
    }

    private CameraManager(Context context) {

        this.context = context;
        this.configManager = new CameraConfigurationManager(context);

        // Camera.setOneShotPreviewCallback() has a race condition in Cupcake, so we use the older
        // Camera.setPreviewCallback() on 1.5 and earlier. For Donut and later, we need to use
        // the more efficient one shot callback, as the older one can swamp the system and cause it
        // to run out of memory. We can't use SDK_INT because it was introduced in the Donut SDK.
        useOneShotPreviewCallback = true; // 3 = Cupcake

        previewCallback = new PreviewCallback(configManager, useOneShotPreviewCallback);
        autoFocusCallback = new AutoFocusCallback();
    }

    /**
     * Opens the camera driver and initializes the hardware parameters.
     *
     * @param holder The surface object which the camera will draw preview frames into.
     * @throws IOException Indicates the camera driver failed to open.
     */
    public void openDriver(SurfaceProvider holder, EventHandler creamEventHandler, ViewfinderView viewfinderView, ResultCallback resultCallback) throws IOException {
        this.surfaceProvider = holder;
        this.resultCallback = resultCallback;
        this.viewfinderView = viewfinderView;
        imageReceiver = ImageReceiver.create(SCREEN_WIDTH, SCREEN_HEIGHT, ImageFormat.JPEG, IMAGE_RCV_CAPACITY);
        imageReceiver.setImageArrivalListener(this);

        CameraKit cameraKit = CameraKit.getInstance(context.getApplicationContext());
        String[] cameraList = cameraKit.getCameraIds();
        String cameraId = null;
        for (String id : cameraList) {
            if (cameraKit.getCameraInfo(id).getFacingType() == CameraInfo.FacingType.CAMERA_FACING_BACK) {
                cameraId = id;
            }
        }
        if (cameraId == null) {
            return;
        }
        CameraStateCallbackImpl cameraStateCallback = new CameraStateCallbackImpl();
        cameraKit.createCamera(cameraId, cameraStateCallback, creamEventHandler);

        //FIXME
        FlashlightManager.enableFlashlight();
    }

    @Override
    public void onImageArrival(ImageReceiver imageReceiver) {
        try {
            Image image = imageReceiver.readLatestImage();
            if (image == null) {
                return;
            }
            if (isAnalyze) {
                image.release();
                return;
            }
            isAnalyze = true;
            Image.Component component = image.getComponent(ImageFormat.ComponentType.JPEG);
            byte[] bytes = new byte[component.remaining()];
            ByteBuffer buffer = component.getBuffer();
            buffer.get(bytes);
            ImageSource imageSource = ImageSource.create(bytes, new ImageSource.SourceOptions());

            ImageSource.DecodingOptions options = new ImageSource.DecodingOptions();
            options.rotateDegrees = 90f;
            options.desiredRegion =
                    new ohos.media.image.common.Rect(
                            viewfinderView.getFramingRect().top,
                            viewfinderView.getFramingRect().left,
                            viewfinderView.getFramingRect().getHeight(),
                            viewfinderView.getFramingRect().getWidth());
            PixelMap map = imageSource.createPixelmap(options);
            saveImage(bytes);
            String result = CodeUtils.parseInfoFromBitmap(map);
            Log.debug("result=" + result);
            if (result != null) {
				resultCallback.resultDecode(result, map);
                return;
            }
            image.release();
            isAnalyze = false;

        } catch (Exception e) {
            Log.debug(e.getMessage());
        }
    }


    private void saveImage(byte[] bitmapPixels) {
        FileOutputStream output = null;
        try {
            String fileName = IMG_FILE_PREFIX + UUID.randomUUID() + IMG_FILE_TYPE;
            File targetFile = new File(context.getExternalFilesDir(Environment.DIRECTORY_PICTURES), fileName);
            output = new FileOutputStream(targetFile);
            output.write(bitmapPixels);
        } catch (IOException e) {
            Log.debug("saveImage ERROR =" + e.getMessage());
        } finally {
            try {
                if (output != null) {
                    output.close();
                }

            } catch (IOException e) {
                Log.error(e.getMessage());
            }
        }
    }

    /**
     * camera回调
     */
    public class CameraStateCallbackImpl extends CameraStateCallback {
        CameraStateCallbackImpl() {
        }

        @Override
        public void onCreated(Camera camera) {
            Log.debug("create camera onCreated");
            Log.debug("surfaceProvider==null  =" + (surfaceProvider));
            if (surfaceProvider == null) {
                return;
            }
            previewSurface = surfaceProvider.getSurfaceOps().get().getSurface();
            if (previewSurface == null) {
                Log.debug("create camera filed, preview surface is null");
                return;
            }
            // Wait until the preview surface is created.
            try {
                Thread.sleep(200);
            } catch (InterruptedException e) {
                Log.debug("Waiting to be interrupted");
            }
            configManager.initFromCameraParameters(camera,previewSurface,imageReceiver);
            cameraDevice = camera;

            framePreviewConfigBuilder = camera.getFrameConfigBuilder(FRAME_CONFIG_PREVIEW);
            framePreviewConfigBuilder.addSurface(imageReceiver.getRecevingSurface());
            framePreviewConfigBuilder.addSurface(previewSurface);
            camera.triggerLoopingCapture(framePreviewConfigBuilder.build());
        }

        @Override
        public void onConfigured(Camera camera) {
            Log.debug("onConfigured....");

            framePreviewConfigBuilder.setFlashMode(FLASH_OPEN);
            framePreviewConfigBuilder.addSurface(previewSurface);

            camera.triggerLoopingCapture(framePreviewConfigBuilder.build());
        }
    }

    private Size getOptimalSize(CameraKit cameraKit, String camId, int screenWidth, int screenHeight) {
        List<Size> sizes = cameraKit.getCameraAbility(camId).getSupportedSizes(ImageFormat.YUV420_888);
        final double aspect_tolerance = 0.1;
        //竖屏screenHeight/screenWidth，横屏是screenWidth/screenHeight
        double targetRatio = (double) screenHeight / screenWidth;
        Size optimalSize = null;
        double minDiff = Double.MAX_VALUE;
        int targetHeight = screenWidth;
        for (Size size : sizes) {
            double ratio = (double) size.width / size.height;
            if (Math.abs(ratio - targetHeight) < minDiff) {
                optimalSize = size;
                minDiff = Math.abs(size.height - targetHeight);
            }
        }
        if (optimalSize == null) {
            minDiff = Double.MAX_VALUE;
            for (Size size : sizes) {
                if (Math.abs(size.height - targetHeight) < minDiff) {
                    optimalSize = size;
                    minDiff = Math.abs(size.height - targetHeight);
                }
            }
        }
        return optimalSize;
    }


    //Lemon add <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<

    /**
     * 开关闪光灯
     *
     * @param open 返回否打开灯
     * @return 是否打开
     */
    public boolean switchLight(boolean open) {
        if (open) {
            if (framePreviewConfigBuilder != null && cameraDevice != null) {
                framePreviewConfigBuilder.setFlashMode(FLASH_ALWAYS_OPEN);
                cameraDevice.triggerLoopingCapture(framePreviewConfigBuilder.build());
            }
            return true;
        } else {
            if (framePreviewConfigBuilder != null && cameraDevice != null) {
                framePreviewConfigBuilder.setFlashMode(FLASH_CLOSE);
                cameraDevice.triggerLoopingCapture(framePreviewConfigBuilder.build());
            }
            return false;
        }
    }
    //Lemon add >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>

    /**
     * Closes the camera driver if still in use.
     */
    public void closeDriver() {
        if (camera != null) {
            FlashlightManager.disableFlashlight();
            if (cameraDevice != null) {
                framePreviewConfigBuilder = null;
                try {
                    cameraDevice.release();
                    cameraDevice = null;
                    surfaceProvider.clearFocus();
                    surfaceProvider.removeFromWindow();
                    surfaceProvider = null;
                } catch (Exception e) {
                    Log.debug(e.getMessage());
                }
            }
        }
    }

    /**
     * Asks the camera hardware to begin drawing preview frames to the screen.
     */
    public void startPreview() {
        if (camera != null && !previewing) {
//			camera.startPreview();
            previewing = true;
        }
    }

    /**
     * Tells the camera to stop drawing preview frames.
     */
    public void stopPreview() {
        if (camera != null && previewing) {
            previewCallback.setHandler(null, 0);
            autoFocusCallback.setHandler(null, 0);
            previewing = false;
        }
    }

    /**
     * A single preview frame will be returned to the handler supplied. The data will arrive as byte[]
     * in the message.obj field, with width and height encoded as message.arg1 and message.arg2,
     * respectively.
     *
     * @param handler The handler to send the message to.
     * @param message The what field of the message to be sent.
     */
    public void requestPreviewFrame(EventHandler handler, int message) {
        if (camera != null && previewing) {
            previewCallback.setHandler(handler, message);
        }
    }

    /**
     * Asks the camera hardware to perform an autofocus.
     *
     * @param handler The Handler to notify when the autofocus completes.
     * @param message The message to deliver.
     */
    public void requestAutoFocus(EventHandler handler, int message) {
        if (camera != null && previewing) {
            autoFocusCallback.setHandler(handler, message);
        }
    }

    /**
     * Calculates the framing rect which the UI should draw to show the user where to place the
     * barcode. This target helps with alignment as well as forces the user to hold the device
     * far enough away to ensure the image will be in focus.
     *
     * @return The rectangle to draw on screen in window coordinates.
     */
    public Rect getFramingRect() {
        Point screenResolution = configManager.getScreenResolution();
        if (framingRect == null) {
            if (camera == null) {
                return null;
            }
            //Lemon 扫描框修改,解决拉伸<<<<<<<<<<<<<<<<<<<<<<<<<<<<
            Log.debug(TAG, "Calculated framing rect: " + framingRect);
            //Lemon 扫描框修改,解决拉伸>>>>>>>>>>>>>>>>>>>>>>>>>>>>

        }
        return framingRect;
    }

    /**
     * Like {@link #getFramingRect} but coordinates are in terms of the preview frame,
     * not UI / screen.
     */
    public Rect getFramingRectInPreview(ViewfinderView viewfinderView) {

        return new Rect(
                viewfinderView.getFramingRect().top,
                viewfinderView.getFramingRect().left,
                viewfinderView.getFramingRect().getHeight(),
                viewfinderView.getFramingRect().getWidth());
    }

    /**
     * Converts the result points from still resolution coordinates to screen coordinates.
     *
     * @param points The points returned by the Reader subclass through Result.getResultPoints().
     * @return An array of Points scaled to the size of the framing rect and offset appropriately
     *         so they can be drawn in screen coordinates.
     */
	/*
  public Point[] convertResultPoints(ResultPoint[] points) {
    Rect frame = getFramingRectInPreview();
    int count = points.length;
    Point[] output = new Point[count];
    for (int x = 0; x < count; x++) {
      output[x] = new Point();
      output[x].x = frame.left + (int) (points[x].getX() + 0.5f);
      output[x].y = frame.top + (int) (points[x].getY() + 0.5f);
    }
    return output;
  }
	 */

    /**
     * A factory method to build the appropriate LuminanceSource object based on the format
     * of the preview buffers, as described by Camera.Parameters.
     *
     * @param data   A preview frame.
     * @param width  The width of the image.
     * @param height The height of the image.
     * @return A PlanarYUVLuminanceSource instance.
     */
    public PlanarYUVLuminanceSource buildLuminanceSource(byte[] data, int width, int height) {
        return new PlanarYUVLuminanceSource(data, width, height, 0, 0,
                0, 0);
    }

    public Context getContext() {
        return context;
    }

}
