package com.feishi.core.media.javacv;


import org.bytedeco.javacpp.avcodec;
import org.bytedeco.javacpp.opencv_core.IplImage;
import org.bytedeco.javacpp.opencv_core.Mat;
import org.bytedeco.javacpp.tools.Logger;
import org.bytedeco.javacv.FFmpegFrameRecorder;
import org.bytedeco.javacv.Frame;
import org.bytedeco.javacv.FrameGrabber;
import org.bytedeco.javacv.OpenCVFrameConverter.ToIplImage;
import org.bytedeco.javacv.OpenCVFrameGrabber;

import javax.imageio.ImageIO;
import javax.sound.sampled.*;
import java.awt.image.BufferedImage;
import java.awt.image.DataBufferByte;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;

import static org.bytedeco.javacpp.FlyCapture2.FRAME_RATE;
import static org.bytedeco.javacpp.opencv_imgcodecs.imencode;

/**
 * @author wenyz
 */
public class CvUtil {
   static Logger logger=Logger.create(CvUtil.class);
    private static ToIplImage converter = new ToIplImage();
    private static OpenCVFrameGrabber grabber;
    private static AudioFormat audioFormat;
    private static TargetDataLine audioDataLine;

    public static void converter(Frame frame) {

        // 将Frame转为Mat
        Mat mat = converter.convertToMat(frame);

        // 将Mat转为Frame
        Frame convertFrame1 = converter.convert(mat);

        // 将Frame转为IplImage
        IplImage image1 = converter.convertToIplImage(frame);
        IplImage image2 = converter.convert(frame);

        // 将IplImage转为Frame
        Frame convertFrame2 = converter.convert(image1);

        //Mat转IplImage
        IplImage matImage = new IplImage(mat);

        //IplImage转Mat
        Mat mat2 = new Mat(matImage);

    }

    public static boolean openCamera(OpenCVFrameGrabber grabber) {
        try {
            grabber.start();
            return true;
        } catch (FrameGrabber.Exception e2) {
            if (grabber != null) {
                try {
                    grabber.restart();
                    return true;
                } catch (FrameGrabber.Exception e) {
                    try {
                        grabber.stop();
                    } catch (FrameGrabber.Exception e1) {
                    } finally {
                        return false;
                    }
                }
            }
        }
        return false;
    }

    public static void closeCamera(OpenCVFrameGrabber grabber) {

        try {
            grabber.stop();
        } catch (FrameGrabber.Exception e) {
            logger.error("OpenCVFrameGrabber stop failed");
        }
    }

    public static TargetDataLine startAudioDevice() {
        if (audioDataLine == null) {
            getDataLine();
        }
        audioDataLine.start();
        return audioDataLine;
    }

    public static void stopAudioDevice() {
        if (audioDataLine != null) {
            audioDataLine.flush();
            audioDataLine.stop();
        }
    }

    public static AudioFormat getAudioFormat() {
        if (audioFormat == null) {
            audioFormat = new AudioFormat(44100.0F, 16, 2, true, false);
        }
        return audioFormat;
    }


    private static TargetDataLine getDataLine() {
        if (audioFormat == null) {
            audioFormat = getAudioFormat();
        }
        /**
         * 设置音频编码器 最好是系统支持的格式，否则getLine() 会发生错误
         * 采样率:44.1k;采样率位数:16位;立体声(stereo);是否签名;true:
         * big-endian字节顺序,false:little-endian字节顺序(详见:ByteOrder类)
         */

        // 通过AudioSystem获取本地音频混合器信息

        Mixer.Info[] minfoSet = AudioSystem.getMixerInfo();
        // 通过AudioSystem获取本地音频混合器
//        Mixer mixer = AudioSystem.getMixer(minfoSet[4]);
        // 通过设置好的音频编解码器获取数据线信息
        DataLine.Info dataLineInfo = new DataLine.Info(TargetDataLine.class, audioFormat);
        // 打开并开始捕获音频
        // 通过line可以获得更多控制权
        // 获取设备：TargetDataLine line

        try {
//            audioDataLine = (TargetDataLine) mixer.getLine(dataLineInfo);
            audioDataLine = (TargetDataLine) AudioSystem.getLine(dataLineInfo);
            audioDataLine.open(audioFormat);
            return audioDataLine;
        } catch (LineUnavailableException e) {

            logger.error(e.getMessage());
        } catch (Exception e) {
            logger.error(e.getMessage());
        }
        return null;
    }

    public static FFmpegFrameRecorder createFrameRecorder(String filePath, int width, int height) {

        FFmpegFrameRecorder recorder = new FFmpegFrameRecorder(filePath, width, height, 2);
        recorder.setInterleaved(true);
        //该参数用于降低延迟
        recorder.setVideoOption("tune", "zerolatency");
        //ultrafast(终极快)提供最少的压缩（低编码器CPU）和最大的视频流大小；而veryslow(非常慢)提供最佳的压缩（高编码器CPU）的同时降低视频流的大小
        recorder.setVideoOption("preset", "ultrafast");
        //设置视频编解码器(codec) -r 25 -设置帧率(frame rate) -b:v 500k -设置视频比特率(bit rate),比特率越高视频越清晰,视频体积也会变大,需要根据实际选择合理范围 -f flv
        recorder.setVideoOption("crf", "25");
        // 2000 kb/s, 720P视频的合理比特率范围
        recorder.setVideoBitrate(2000000);
        // h264编/解码器
        recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264);
        // 封装格式flv
        recorder.setFormat("mp4");
        // 视频帧率(保证视频质量的情况下最低25，低于25会出现闪屏)
        recorder.setFrameRate(FRAME_RATE);
        // 关键帧间隔，一般与帧率相同或者是视频帧率的两倍
        recorder.setGopSize(FRAME_RATE * 2);
        // 不可变(固定)音频比特率
        recorder.setAudioOption("crf", "0");
        // 最高质量
        recorder.setAudioQuality(0);
        // 音频比特率
        recorder.setAudioBitrate(192000);
        // 音频采样率
        recorder.setSampleRate(44100);
        // 双通道(立体声)
        recorder.setAudioChannels(2);
        // 音频编/解码器
        recorder.setAudioCodec(avcodec.AV_CODEC_ID_AAC);
        return recorder;
    }


    public static OpenCVFrameGrabber getOpenCVFrameGrabber() {
        if (grabber == null) {
            grabber = new OpenCVFrameGrabber(0);
        }
        return grabber;
    }

    public static OpenCVFrameGrabber getOpenCVFrameGrabber(String videoName) {
        if (grabber == null) {
            String OS = System.getProperty("os.name").toLowerCase();
            if (OS.contains("win")) {
                grabber = new OpenCVFrameGrabber(0);
            } else {
                grabber = new OpenCVFrameGrabber("/dev/" + (videoName == null ? "video0" : videoName));
            }
        }
        return grabber;
    }

    public static BufferedImage MatToBufImg(Mat mat, String fileExtension) {
        byte[] byteArray = MatToBytes(mat, fileExtension);
        BufferedImage bufImage = null;
        try {
            InputStream in = new ByteArrayInputStream(byteArray);
            bufImage = ImageIO.read(in);
        } catch (Exception e) {
            logger.error(e.getMessage());
        }
        return bufImage;
    }

    public static Mat bufImgToMat(BufferedImage image) {
        Mat mat;
        byte[] pixels = ((DataBufferByte) image.getRaster().getDataBuffer())
                .getData();
        mat = new Mat(pixels, true);

        return mat;
    }

    public static ByteArrayInputStream MatToStream(Mat mat, String fileExtension) {

        return new ByteArrayInputStream(MatToBytes(mat, fileExtension));
    }

    public static byte[] MatToBytes(Mat mat, String fileExtension) {
        ByteBuffer bf = ByteBuffer.allocate(mat.createBuffer().limit());
        imencode(fileExtension, mat, bf);
        return ByteBufferToBytes(bf);
    }

    private static byte[] ByteBufferToBytes(ByteBuffer bf) {
        byte[] result = new byte[bf.remaining()];
        bf.get(result);
        return result;
    }

    public static ByteArrayInputStream MatToStream(Mat mat) {
        ByteBuffer bf = mat.createBuffer();

        return new ByteArrayInputStream(ByteBufferToBytes(bf));
    }


    /**
     * 未经测试,无法确定效果
     */
    public static Mat bytesToMat(byte[] pixels) {
        Mat mat = new Mat();
        return mat;
    }

//
//    public static byte[] cut(byte[] bytes, ImgLocation location) {
//        if (location == null) {
//            return bytes;
//        }
//        BufferedImage image = bytesToBufferedImage(bytes);
//        BufferedImage result = cut(image, location);
//        return bufferedImageToBytes(result);
//    }
//
//    public static Mat cut(Mat image, ImgLocation location) {
//
//        if (location == null) {
//            return image;
//        }
//        int width = image.size().width();
//        int height = image.size().height();
//
//
//        Rect rect = new Rect((int) (location.getX() * width), (int) (location.getY() * height), (int) (location.getWidth() * width), (int) (location.getHeight() * height));
//
//        return new Mat(image,rect);
////        return image.apply(rect);
//    }
//
//    public static BufferedImage cut(BufferedImage image, ImgLocation location) {
//
//        int width = image.getWidth();
//        int height = image.getHeight();
//        return image.getSubimage((int) (location.getX() * width), (int) (location.getY() * height), (int) (location.getWidth() * width), (int) (location.getHeight() * height));
//
//    }

    public static byte[] bufferedImageToBytes(BufferedImage img) {
        ByteArrayOutputStream byteOutputStream = new ByteArrayOutputStream();
        try {
            ImageIO.write(img, "png", byteOutputStream);
        } catch (IOException e) {
            logger.error(e.getMessage());
        }
        return byteOutputStream.toByteArray();
    }

    public static BufferedImage bytesToBufferedImage(byte[] bytes) {
        try {
            return ImageIO.read(new ByteArrayInputStream(bytes));
        } catch (IOException e) {
            logger.error(e.getMessage());
        }
        return null;
    }

}
