package hyl.base.medium;

import java.awt.AlphaComposite;
import java.awt.Graphics2D;
import java.awt.image.BufferedImage;
import java.awt.image.DataBufferByte;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.IntBuffer;
import java.util.Base64;
import java.util.Base64.Encoder;
import static org.bytedeco.ffmpeg.global.avutil.*;
import javax.imageio.ImageIO;
import org.bytedeco.opencv.global.opencv_core;
//import org.opencv.core.Mat;
//import org.opencv.core.MatOfByte;
import org.opencv.highgui.HighGui;
//import org.opencv.imgcodecs.Imgcodecs;
import org.bytedeco.ffmpeg.avutil.AVFrame;
import org.bytedeco.ffmpeg.global.avutil;
import org.bytedeco.ffmpeg.swscale.SwsContext;
import org.bytedeco.javacpp.BytePointer;
import org.bytedeco.javacpp.DoublePointer;
import org.bytedeco.javacv.Frame;
import org.bytedeco.javacv.Java2DFrameConverter;
import org.bytedeco.javacv.OpenCVFrameConverter;

import static org.bytedeco.ffmpeg.global.swscale.*;

/**
 * 图像变形专用
 * 
 * @author 37798955@qq.com
 *
 */
public class MyImage {
	/**
	 * rgb 31~24 alpha 透明度 23~16 红色 15~8 绿色 7~0 蓝色
	 * 
	 * @param 像素
	 * @param rgb
	 */

	public static void decodeARGB(int 像素, int argb[]) {
		// int rgb[]=new int[3];
		argb[0] = (像素 >> 24) & 0xff;
		argb[1] = (像素 >> 16) & 0xff;
		argb[2] = (像素 >> 8) & 0xff;
		argb[3] = 像素 & 0xff;
	}

	public static int encodeARGB(int argb[]) {
		return (argb[0] << 24) | (argb[1] << 16) | (argb[2] << 8) | argb[3];
	}

	public static void decodeRGB(int 像素, int rgb[]) {
		// int rgb[]=new int[3];
		rgb[0] = (像素 & 0xff0000) >> 16;
		rgb[1] = (像素 & 0xff00) >> 8;
		rgb[2] = (像素 & 0xff);
	}

	public static int encodeRGB(int rgb[]) {

		return (rgb[0] << 16) | (rgb[1] << 8) | rgb[2];

	}

	// 对灰度图像进行伽马变换
	// 伽马变换也叫幂律变换，是一个幂函数，可以压缩和扩展灰度级，图像经过幂函数的处理后，增加了对比度，下面看下代码和运行效果图，也是最终的效果图
	// gamma的取值范围为0.05~5之间。((255.0/Math.pow(fmax+1,a))*Math.pow(rgb[0]+1,a));
	public static BufferedImage 灰度伽马补偿(BufferedImage 源图, float 伽马) {
		int width = 源图.getWidth();
		int height = 源图.getHeight();
		int srcRGBs[] = 源图.getRGB(0, 0, width, height, null, 0, width);
		int rgb[] = new int[3];
		BufferedImage 目标图像 = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB);
		int fmin = 0, fmax = 0;
		for (int j = 0; j < height; j++) {
			for (int i = 0; i < width; i++) {
				decodeRGB(srcRGBs[j * width + i], rgb);
				if (j == 0 && i == 0) {
					fmin = rgb[1];
					fmax = rgb[2];
				}
				if (rgb[1] < fmin) {
					fmin = rgb[1];
				}
				if (rgb[1] > fmax) {
					fmax = rgb[1];
				}
			}
		}
		fmax++;
		double k = (255.0 / Math.pow(fmax, 伽马));
		for (int j = 0; j < height; j++) {
			for (int i = 0; i < width; i++) {
				decodeRGB(srcRGBs[j * width + i], rgb);
				rgb[0] = (int) (k * Math.pow(rgb[0] + 1, 伽马));
				rgb[1] = (int) (k * Math.pow(1 + rgb[1], 伽马));
				rgb[2] = (int) (k * Math.pow(1 + rgb[2], 伽马));
				目标图像.setRGB(i, j, encodeRGB(rgb));
			}
		}
		return 目标图像;
	}

	public static BufferedImage 单通道伽马补偿(BufferedImage 源图, int 通道, float 伽马) {
		int width = 源图.getWidth();
		int height = 源图.getHeight();
		int srcRGBs[] = 源图.getRGB(0, 0, width, height, null, 0, width);
		int rgb[] = new int[3];
		BufferedImage 目标图像 = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB);
		int fmin = 0, fmax = 0;
		for (int 宽 = 0; 宽 < height; 宽++) {
			for (int i = 0; i < width; i++) {
				decodeRGB(srcRGBs[宽 * width + i], rgb);
				fmin = rgb[通道];

				if (rgb[通道] < fmin) {
					fmin = rgb[通道];
				}
				if (rgb[通道] > fmax) {
					fmax = rgb[通道];
				}
			}
		}
		for (int j = 0; j < height; j++) {
			for (int i = 0; i < width; i++) {
				decodeRGB(srcRGBs[j * width + i], rgb);
				rgb[通道] = get单通道伽马变化(rgb[通道], 伽马);
				目标图像.setRGB(i, j, encodeRGB(rgb));
			}
		}
		return 目标图像;
	}

	private static int get单通道伽马变化(int 入参, double 伽马) {
		return get伽马变化1(入参, 255, 伽马);
	}

	private static int get伽马变化1(int 入参, int 最大值, double 伽马) {
		if (最大值 == 0)
			最大值 = 255;
		return (int) (Math.exp(Math.log(入参 / 最大值) * 伽马) * 最大值);
	}

	@SuppressWarnings("unused")
	private static int get伽马变化2(int 入参, int 最大值, double 伽马) {
		if (最大值 == 0)
			最大值 = Integer.MAX_VALUE;
		return (int) Math.round(Math.pow((double) 入参 / 最大值, 伽马) * 最大值);
		// return (int) (Math.exp(Math.log(入参 / 255.0) * 伽马) * 255.0);
	}

	/**
	 * bufferedImage转base64
	 * 
	 * @param format -格式（jpg,png,bmp,gif,jpeg等等）
	 * @return
	 * @throws IOException
	 */
	public static String bufferedImage2Base64(BufferedImage image, String format) throws IOException {
		Encoder encoder = Base64.getEncoder();
		ByteArrayOutputStream baos = new ByteArrayOutputStream();// 字节流
		ImageIO.write(image, format, baos);// 写出到字节流
		byte[] bytes = baos.toByteArray();
		// 编码成base64

		String jpg_base64 = encoder.encodeToString(bytes);
		return jpg_base64;
	}

	/**
	 * avframe 转 org.bytedeco.opencv.opencv_core.Mat
	 * 
	 * @param frame
	 * @return
	 */
	public static org.bytedeco.opencv.opencv_core.Mat avframe2Mat(AVFrame frame) {
		AVFrame dst = av_frame_alloc();
		org.bytedeco.opencv.opencv_core.Mat m;
		int w = frame.width(), h = frame.height();
		m = new org.bytedeco.opencv.opencv_core.Mat(h, w, opencv_core.CV_8UC3);

		dst.data(0, m.data());// m.data()
		// dst.data[0] = m.data();
		int i图像输出格式 = AV_PIX_FMT_BGR24;

		int numBytes = av_image_get_buffer_size(i图像输出格式, w, h, 1);
		// System.out.println("numBytes" + numBytes);
		// 确定图像所需的缓冲区大小, 并分配缓冲区
		/*
		 * 分配一个适合所有内存访问的对齐内存块（包括CPU上可用的向量）。
		 */
		BytePointer buffer = new BytePointer(av_malloc(numBytes));
		// 将缓冲区的适当部分分配给pframeRGB中的图像平面,把buffer 分配给pframeRGB.data()
		// 请注意，pframeRGB是一个AVFrame，但AVFrame是一个AVPicture的超集
		// 过期 avpicture_fill(new AVPicture(pframeRGB), buffer, 图像格式,
		// pCodecCtx.width(),pCodecCtx.height());
		avutil.av_image_fill_arrays(frame.data(), frame.linesize(), buffer, i图像输出格式, w, h, 1);
		/*
		 * SwsContext sws_ctx 图像变换器 ,你需要它来表演缩放等操作
		 * 
		 * sws_getContext () 分配并返回一个SwsContext。
		 * 
		 * 使用sws_scale（）进行缩放/转换操作。
		 */
		SwsContext 图像变形器 = sws_getContext(w, h, frame.format(), w, h, i图像输出格式, SWS_FAST_BILINEAR, null, null,
				(DoublePointer) null);

		sws_scale(图像变形器, frame.data(), frame.linesize(), 0, h, dst.data(), dst.linesize());
		sws_freeContext(图像变形器);

		return m;
	}

//未验证
	public static void Mat2Avframe(org.bytedeco.opencv.opencv_core.Mat mat, AVFrame frame) {
		int w = mat.cols();
		int h = mat.rows();
		// AVFrame image = av_frame_alloc();
		// image.data(0, mat.data());
//		mat.step1();
//		if (frame == null) {
//			frame = av_frame_alloc();
//			// av_image_alloc(image.data(), image.linesize(), w, h, AV_PIX_FMT_YUV420P, 1);
//		}
//
//		frame.format(AV_PIX_FMT_YUV420P);
//		frame.width(w);
//		frame.height(h);
//		av_frame_get_buffer(frame, 0);
//
//		av_frame_make_writable(frame);
//		
//		Mat yuv=new Mat();
//		PointerPointer pp= frame.data();
//		opencv_imgproc.cvtColor(mat, yuv, opencv_imgproc.COLOR_RGB2YUV_I420); // 
//		   int frame_size = h * w; 
//		  // pp
//		  // frame.data(pp);
		int cvLinesizes[] = new int[1];
		cvLinesizes[0] = (int) mat.step1();
		if (frame == null) {
			frame = av_frame_alloc();
			av_image_alloc(frame.data(), frame.linesize(), w, h, AV_PIX_FMT_YUV420P, 1);
		}
		IntBuffer ipot = IntBuffer.wrap(cvLinesizes);
		SwsContext conversion = sws_getContext(w, h, AV_PIX_FMT_BGR24, w, h, frame.format(), SWS_FAST_BILINEAR, null,
				null, (DoublePointer) null);
		sws_scale(conversion, mat.data().asByteBuffer(), ipot, 0, h, frame.data().asByteBuffer(),
				frame.linesize().asBuffer());
		sws_freeContext(conversion);
	}

	static OpenCVFrameConverter.ToMat CV2MAT = new OpenCVFrameConverter.ToMat();

	public static Frame Img2Frame(BufferedImage bi) {
		Frame f = new Java2DFrameConverter().convert(bi);
		return f;
	}

	/**
	 * Frame 转 BufferedImage
	 * 
	 * @param frame
	 * @return
	 */
	public static BufferedImage Frame2Img(Frame frame) {
		BufferedImage bi = new Java2DFrameConverter().convert(frame);
		return bi;
	}

	/**
	 * BufferedImage 转 org.bytedeco.opencv.opencv_core.Mat
	 * 
	 * @param bi
	 * @return
	 */
	public static org.bytedeco.opencv.opencv_core.Mat Img2Mat(BufferedImage bi) {
		return CV2MAT.convertToMat(Img2Frame(bi));
	}

	/**
	 * 把Mat 转 BufferedImage
	 * 
	 * @param mat
	 * @return
	 */
	public static BufferedImage Mat2Img(org.bytedeco.opencv.opencv_core.Mat mat) {
		Frame f = CV2MAT.convert(mat);
		BufferedImage bi = Frame2Img(f);
		return bi;
	}

	/**
	 * 把frame 转ByteBuffer
	 * 
	 * @param pFrame
	 * @return
	 */
	public static ByteBuffer frame2ByteBuffer(AVFrame pFrame) {
		BytePointer data = pFrame.data(0);
		int size = pFrame.width() * pFrame.height() * 3;
		ByteBuffer buf = data.position(0).limit(size).asBuffer();
		return buf;
	}

	/**
	 * 把avFrame转 BufferedImage
	 * 
	 * @param pFrame
	 * @return
	 */
	public static BufferedImage avFrame2Img(AVFrame pFrame) {
		ByteBuffer src = frame2ByteBuffer(pFrame);
		return MyUI.byteBuffer2BGR(src, pFrame.width(), pFrame.height());
	}

	/**
	 * org.opencv.core.Mat 转 BufferedImage
	 * 
	 * @param mat
	 * @return
	 */
	public static BufferedImage OMat2Img(org.opencv.core.Mat mat) {
		return (BufferedImage) HighGui.toBufferedImage(mat);
	}

	/**
	 * BufferedImage转换成Mat
	 * 
	 * @param original 要转换的BufferedImage
	 * @param imgType  bufferedImage的类型 如 BufferedImage.TYPE_3BYTE_BGR
	 * @param matType  转换成mat的type 如 CvType.CV_8UC3
	 */
	public static org.opencv.core.Mat Img2OMat(BufferedImage original, int imgType, int matType) {
		if (original == null) {
			throw new IllegalArgumentException("original == null");
		}

		// Don't convert if it already has correct type
		if (original.getType() != imgType) {

			// Create a buffered image
			BufferedImage image = new BufferedImage(original.getWidth(), original.getHeight(), imgType);

			// Draw the image onto the new buffer
			Graphics2D g = image.createGraphics();
			try {
				g.setComposite(AlphaComposite.Src);
				g.drawImage(original, 0, 0, null);
			} finally {
				g.dispose();
			}
		}

		byte[] pixels = ((DataBufferByte) original.getRaster().getDataBuffer()).getData();
		org.opencv.core.Mat mat = org.opencv.core.Mat.eye(original.getHeight(), original.getWidth(), matType);

		mat.put(0, 0, pixels);
		return mat;
	}

}
