package com.ibaodashi.gxing.activity;

import android.content.res.Resources;
import android.graphics.Rect;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.support.annotation.Nullable;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.SurfaceView;
import android.view.WindowManager;

import com.google.zxing.BarcodeFormat;
import com.google.zxing.BinaryBitmap;
import com.google.zxing.DecodeHintType;
import com.google.zxing.MultiFormatReader;
import com.google.zxing.PlanarYUVLuminanceSource;
import com.google.zxing.ReaderException;
import com.google.zxing.Result;
import com.google.zxing.common.HybridBinarizer;
import com.ibaodashi.gxing.R;

import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Mat;
import org.opencv.imgproc.Imgproc;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.EnumMap;
import java.util.EnumSet;
import java.util.Map;
import java.util.Timer;
import java.util.TimerTask;

/**
 * desc   :
 * author : guowenlong
 * Email  : guowenlong20000@gmail.com
 * time   : 2018年11月30日16 : 16
 */
public class CaptureActivity extends AppCompatActivity implements CameraBridgeViewBase.CvCameraViewListener2 {
    private static final String TAG = "QRCodeActivity ";
    private CameraBridgeViewBase mOpenCvCameraView;
    private LoopHandler mLoopHandler;
    private Rect mCropRect = null;
    public int cameraPreviewHeight;
    public int cameraPreviewWidth;
    private Map<DecodeHintType, Object> hints = new EnumMap<>(DecodeHintType.class);
    private MultiFormatReader multiFormatReader;

    public Rect getCropRect() {
        return mCropRect;
    }

    private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
        @Override
        public void onManagerConnected(int status) {
            switch (status) {
                case LoaderCallbackInterface.SUCCESS: {
                    Log.i(TAG, "OpenCV loaded successfully");
                    mOpenCvCameraView.enableView();
                }
                break;
                default: {
                    super.onManagerConnected(status);
                }
                break;
            }
        }
    };

    @Override
    protected void onCreate(@Nullable Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
        setContentView(R.layout.activity_qrcode);
        mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.jcv);
        mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
        mOpenCvCameraView.setCvCameraViewListener(this);

        Collection<BarcodeFormat> decodeFormats = new ArrayList<>();
        decodeFormats.addAll(EnumSet.of(BarcodeFormat.AZTEC));
        decodeFormats.addAll(EnumSet.of(BarcodeFormat.PDF_417));
        decodeFormats.addAll(DecodeFormatManager.getBarCodeFormats());
        decodeFormats.addAll(DecodeFormatManager.getQrCodeFormats());
        hints.put(DecodeHintType.POSSIBLE_FORMATS, decodeFormats);
        multiFormatReader = new MultiFormatReader();
        multiFormatReader.setHints(hints);
    }

    @Override
    public void onPause() {
        super.onPause();
        if (mOpenCvCameraView != null)
            mOpenCvCameraView.disableView();
    }

    @Override
    public void onResume() {
        super.onResume();
        if (!OpenCVLoader.initDebug()) {
            Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
            OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback);
        } else {
            Log.d(TAG, "OpenCV library found inside package. Using it!");
            mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
        }
    }

    public void onDestroy() {
        super.onDestroy();
        if (mOpenCvCameraView != null)
            mOpenCvCameraView.disableView();
    }

    @Override
    public void onCameraViewStarted(int width, int height) {
        cameraPreviewHeight = height;
        cameraPreviewWidth = width;
        mLoopHandler = new LoopHandler(width, height);
//        loopCameraMat();
        timer.schedule(task, 0, 1000);
    }

    @Override
    public void onCameraViewStopped() {

    }

    private Mat mMat;

    @Override
    public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
        mMat = inputFrame.gray();
        initCrop();
        return mMat;
    }

    Timer timer = new Timer();
    TimerTask task = new TimerTask() {
        @Override
        public void run() {
            new MyThread().run();
        }
    };

    @Override
    protected void onStop() {
        super.onStop();
        timer.cancel();
    }

    private class MyThread extends Thread {
        @Override
        public void run() {
            if (mMat == null) return;
            Mat dst = new Mat();
            Imgproc.adaptiveThreshold(mMat, dst, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C, Imgproc.THRESH_BINARY, 85, 3);
            byte[] mBytes = new byte[dst.cols() * dst.rows()];
            dst.get(0, 0, mBytes);
            decode(mBytes, cameraPreviewWidth, cameraPreviewHeight, cameraPreviewWidth, cameraPreviewHeight);
        }
    }

    private int getStatusBarHeight() {
        Resources resources = getResources();
        int resourceId = resources.getIdentifier("status_bar_height", "dimen", "android");
        return resources.getDimensionPixelSize(resourceId);
    }

    private void initCrop() {
        int cameraWidth = cameraPreviewWidth;
        int cameraHeight = cameraPreviewHeight;

        /** 获取布局中扫描框的位置信息 */
        int[] location = new int[2];
        mOpenCvCameraView.getLocationInWindow(location);

        int cropLeft = location[0];
        int cropTop = location[1] - getStatusBarHeight();

        int cropWidth = mOpenCvCameraView.getWidth();
        int cropHeight = mOpenCvCameraView.getHeight();

        /** 获取布局容器的宽高 */
        int containerWidth = mOpenCvCameraView.getWidth();
        int containerHeight = mOpenCvCameraView.getHeight();

        /** 计算最终截取的矩形的左上角顶点x坐标 */
        int x = cropLeft * cameraWidth / containerWidth;
        /** 计算最终截取的矩形的左上角顶点y坐标 */
        int y = cropTop * cameraHeight / containerHeight;

        /** 计算最终截取的矩形的宽度 */
        int width = cropWidth * cameraWidth / containerWidth;
        /** 计算最终截取的矩形的高度 */
        int height = cropHeight * cameraHeight / containerHeight;

        /** 生成最终的截取的矩形 */
        mCropRect = new Rect(x, y, width + x, height + y);
        mCropRect = new Rect(0, 0, cameraPreviewWidth, cameraPreviewHeight);
    }

    private void decode(byte[] data, int width, int height, int _previewWidth, int _previewHeight) {
        int previewWidth = _previewWidth;
        int previewHeight = _previewHeight;

        // 这里需要将获取的data翻转一下，因为相机默认拿的的横屏的数据
        byte[] rotatedData = new byte[data.length];
        for (int y = 0; y < previewHeight; y++) {
            for (int x = 0; x < previewWidth; x++)
                rotatedData[x * previewHeight + previewHeight - y - 1] = data[x + y * previewWidth];
        }

        // 宽高也要调整
        int tmp = previewWidth;
        previewWidth = previewHeight;
        previewHeight = tmp;

        Result rawResult = null;
        PlanarYUVLuminanceSource source = buildLuminanceSource(rotatedData, previewWidth, previewHeight);
        if (source != null) {
            BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source));
            try {
                rawResult = multiFormatReader.decodeWithState(bitmap);
                Log.e("LoopHandler", rawResult.getText());
            } catch (ReaderException re) {
                // continue
            } finally {
                multiFormatReader.reset();
            }
        }
    }

    public PlanarYUVLuminanceSource buildLuminanceSource(byte[] data, int width, int height) {

        // Go ahead and assume it's YUV rather than die.
        return new PlanarYUVLuminanceSource(data, width, height, 0, 0, width, height, false);
    }
}