package com.eqgis.eqr.utils;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Matrix;
import android.media.Image;
import android.os.Environment;
import android.util.Log;

import java.io.File;
import java.io.FileOutputStream;
import java.nio.ByteBuffer;

/**
 * 位图相关静态工具类
 * @author tanyx 2021/12/08
 */
public class BitmapUtils {
    // This value is 2 ^ 18 - 1, and is used to clamp the RGB values before their ranges
    // are normalized to eight bits.
    static final int kMaxChannelValue = 262143;

    /**
     * 图像转位图
     * @param context 上下文
     * @param image 图像
     * @return 位图
     */
    public static Bitmap convertToBitmap(Context context,Image image){
        if (ScreenPointTool.getScreenWidth_DP(context) > ScreenPointTool.getScreenHeight_DP(context)) {
            //desc-宽度大于高度=>横屏
            return convertToBitmap(image,true);
        }else {
            return convertToBitmap(image,false);
        }
    }

    /**
     * 将图像转换为Bitmap
     * @param image
     * @return
     */
    public static Bitmap convertToBitmap(Image image,boolean isLandScape){

        // 1. 首先获取YUV 三个平面的byte数组
        ByteBuffer y_buffer = image.getPlanes()[0].getBuffer();
        byte[] y_bytes = new byte[y_buffer.remaining()];
        y_buffer.get(y_bytes);
        ByteBuffer u_buffer = image.getPlanes()[1].getBuffer();
        byte[] u_bytes = new byte[u_buffer.remaining()];
        u_buffer.get(u_bytes);
        ByteBuffer v_buffer = image.getPlanes()[2].getBuffer();
        byte[] v_bytes = new byte[v_buffer.remaining()];
        v_buffer.get(v_bytes);


        int width = image.getWidth();
        int height = image.getHeight();
        if (isLandScape){
            // 2. 创建输出数组
            int[] outPut = new int[width * height];
            convertYUV420ToARGB8888(y_bytes,
                    u_bytes,
                    v_bytes,
                    width,
                    height,
                    image.getPlanes()[0].getRowStride(),
                    image.getPlanes()[1].getRowStride(),
                    image.getPlanes()[1].getPixelStride(),
                    outPut);
            return Bitmap.createBitmap(outPut, width,height, Bitmap.Config.ARGB_8888);
        }else {
            int[] argb = new int[width * height];
            // 3. 逐行遍历 YUV三个byte[]
            int Y = 0, U = 0, V = 0;
            for (int row = 0; row < height; row++) {
                for (int col = 0; col < width; col++) {
                    // 3.1 获取YUV
                    Y = y_bytes[row * width + col] & 0xff; // Y 数组是紧密排布的
                    if ((col & 0x1) == 0) { // UV 每行的奇数元素是有用的，偶数位不要
                        U = u_bytes[row / 2 * width + col] & 0xff; // U 是两行合并一行的
                        V = v_bytes[row / 2 * width + col] & 0xff; // V 和U是一样的
                    }
                    // 3.2 转换公式转换
                    int facter=128;
                    int r= (int) (Y+1.4022*(V-facter));
                    int g= (int) (Y-0.3456*(U-facter)-0.7145*(V-facter));
                    int b= (int) (Y+1.771*(U-facter));
                    // 3.3 防止出现负数和超范围
                    r=r<0?0:(Math.min(r, 255));
                    g=g<0?0:(Math.min(g, 255));
                    b=b<0?0:(Math.min(b, 255));

                    // 3.5 把3个byte组成一个32位的int类型，一个int包含RGBA四个通道，
                    argb[col * height + height - 1 - row] =0xff000000
                            |((r<<16)&0xff0000)
                            | ((g <<8) & 0xff00)
                            |((b)&0xff)
                    ;
                }

            }
            // 4. 创建Bitmap
            return  Bitmap.createBitmap(argb,height,width, Bitmap.Config.ARGB_8888);
        }
    }

    /**
     * Utility method to compute the allocated size in bytes of a YUV420SP image of the given
     * dimensions.
     */
    public static int getYUVByteSize(final int width, final int height) {
        // The luminance plane requires 1 byte per pixel.
        final int ySize = width * height;

        // The UV plane works on 2x2 blocks, so dimensions with odd size must be rounded up.
        // Each 2x2 block takes 2 bytes to encode, one each for U and V.
        final int uvSize = ((width + 1) / 2) * ((height + 1) / 2) * 2;

        return ySize + uvSize;
    }

    /**
     * Saves a Bitmap object to disk for analysis.
     *
     * @param bitmap The bitmap to save.
     */
    public static void saveBitmap(final Bitmap bitmap) {
        saveBitmap(bitmap, "preview.png");
    }

    /**
     * Saves a Bitmap object to disk for analysis.
     *
     * @param bitmap The bitmap to save.
     * @param filename The location to save the bitmap to.
     */
    public static boolean saveBitmap(final Bitmap bitmap, final String filename) {
        final String root =
                Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator + "tensorflow";
        final File myDir = new File(root);

        if (!myDir.mkdirs()) {
            Log.e(BitmapUtils.class.getSimpleName(), "Make dir failed");
            return false;
        }

        final String fname = filename;
        final File file = new File(myDir, fname);
        if (file.exists()) {
            file.delete();
        }
        try {
            final FileOutputStream out = new FileOutputStream(file);
            bitmap.compress(Bitmap.CompressFormat.PNG, 99, out);
            out.flush();
            out.close();
        } catch (final Exception e) {
            Log.e(BitmapUtils.class.getSimpleName(), "saveBitmap: failed");
            return false;
        }
        return true;
    }

    public static void convertYUV420SPToARGB8888(byte[] input, int width, int height, int[] output) {
        final int frameSize = width * height;
        for (int j = 0, yp = 0; j < height; j++) {
            int uvp = frameSize + (j >> 1) * width;
            int u = 0;
            int v = 0;

            for (int i = 0; i < width; i++, yp++) {
                int y = 0xff & input[yp];
                if ((i & 1) == 0) {
                    v = 0xff & input[uvp++];
                    u = 0xff & input[uvp++];
                }

                output[yp] = YUV2RGB(y, u, v);
            }
        }
    }

    private static int YUV2RGB(int y, int u, int v) {
        // Adjust and check YUV values
        y = (y - 16) < 0 ? 0 : (y - 16);
        u -= 128;
        v -= 128;

        // This is the floating point equivalent. We do the conversion in integer
        // because some Android devices do not have floating point in hardware.
        // nR = (int)(1.164 * nY + 2.018 * nU);
        // nG = (int)(1.164 * nY - 0.813 * nV - 0.391 * nU);
        // nB = (int)(1.164 * nY + 1.596 * nV);
        int y1192 = 1192 * y;
        int r = (y1192 + 1634 * v);
        int g = (y1192 - 833 * v - 400 * u);
        int b = (y1192 + 2066 * u);

        // Clipping RGB values to be inside boundaries [ 0 , kMaxChannelValue ]
        r = r > kMaxChannelValue ? kMaxChannelValue : (r < 0 ? 0 : r);
        g = g > kMaxChannelValue ? kMaxChannelValue : (g < 0 ? 0 : g);
        b = b > kMaxChannelValue ? kMaxChannelValue : (b < 0 ? 0 : b);

        return 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);
    }

    /**
     *
     * 示例：
     *      Image image = arEffectView.getArSceneView().getArFrame().acquireCameraImage();
     *      int[] outPut = new int[image.getWidth() * image.getHeight()];
     *      // 1. 首先获取YUV 三个平面的byte数组
     *      ByteBuffer y_buffer = image.getPlanes()[0].getBuffer();
     *      byte[] y_bytes = new byte[y_buffer.remaining()];
     *      y_buffer.get(y_bytes);
     *      ByteBuffer u_buffer = image.getPlanes()[1].getBuffer();
     *      byte[] u_bytes = new byte[u_buffer.remaining()];
     *      u_buffer.get(u_bytes);
     *      ByteBuffer v_buffer = image.getPlanes()[2].getBuffer();
     *      byte[] v_bytes = new byte[v_buffer.remaining()];
     *      v_buffer.get(v_bytes);
     *      //2. 转换
     *      ImageUtils.convertYUV420ToARGB8888(y_bytes,u_bytes,v_bytes,image.getWidth(),image.getHeight(),
     *          image.getPlanes()[0].getRowStride(),image.getPlanes()[1].getRowStride(),image.getPlanes()[1].getPixelStride(),
     *          outPut);
     *      //3. 输出位图
     *      Bitmap bitmap = Bitmap.createBitmap(outPut, image.getWidth(), image.getHeight(), Bitmap.Config.ARGB_8888);
     */
    public static void convertYUV420ToARGB8888(
            byte[] yData,
            byte[] uData,
            byte[] vData,
            int width,
            int height,
            int yRowStride,
            int uvRowStride,
            int uvPixelStride,
            int[] out) {
        int yp = 0;
        for (int j = 0; j < height; j++) {
            int pY = yRowStride * j;
            int pUV = uvRowStride * (j >> 1);

            for (int i = 0; i < width; i++) {
                int uv_offset = pUV + (i >> 1) * uvPixelStride;

                out[yp++] = YUV2RGB(0xff & yData[pY + i], 0xff & uData[uv_offset], 0xff & vData[uv_offset]);
            }
        }
    }

    /**
     * Returns a transformation matrix from one reference frame into another. Handles cropping (if
     * maintaining aspect ratio is desired) and rotation.
     *
     * @param srcWidth Width of source frame.
     * @param srcHeight Height of source frame.
     * @param dstWidth Width of destination frame.
     * @param dstHeight Height of destination frame.
     * @param applyRotation Amount of rotation to apply from one frame to another. Must be a multiple
     *     of 90.
     * @param maintainAspectRatio If true, will ensure that scaling in x and y remains constant,
     *     cropping the image if necessary.
     * @return The transformation fulfilling the desired requirements.
     */
    public static Matrix getTransformationMatrix(
            final int srcWidth,
            final int srcHeight,
            final int dstWidth,
            final int dstHeight,
            final int applyRotation,
            final boolean maintainAspectRatio) {
        final Matrix matrix = new Matrix();

        if (applyRotation != 0) {
            if (applyRotation % 90 != 0) {
                Log.e(BitmapUtils.class.getSimpleName(), "Rotation of %d % 90 != 0");
            }

            // Translate so center of image is at origin.
            matrix.postTranslate(-srcWidth / 2.0f, -srcHeight / 2.0f);

            // Rotate around origin.
            matrix.postRotate(applyRotation);
        }

        // Account for the already applied rotation, if any, and then determine how
        // much scaling is needed for each axis.
        final boolean transpose = (Math.abs(applyRotation) + 90) % 180 == 0;

        final int inWidth = transpose ? srcHeight : srcWidth;
        final int inHeight = transpose ? srcWidth : srcHeight;

        // Apply scaling if necessary.
        if (inWidth != dstWidth || inHeight != dstHeight) {
            final float scaleFactorX = dstWidth / (float) inWidth;
            final float scaleFactorY = dstHeight / (float) inHeight;

            if (maintainAspectRatio) {
                // Scale by minimum factor so that dst is filled completely while
                // maintaining the aspect ratio. Some image may fall off the edge.
                final float scaleFactor = Math.max(scaleFactorX, scaleFactorY);
                matrix.postScale(scaleFactor, scaleFactor);
            } else {
                // Scale exactly to fill dst from src.
                matrix.postScale(scaleFactorX, scaleFactorY);
            }
        }

        if (applyRotation != 0) {
            // Translate back from origin centered reference to destination frame.
            matrix.postTranslate(dstWidth / 2.0f, dstHeight / 2.0f);
        }

        return matrix;
    }
}
