package com.lvm.app.file;

import org.bytedeco.ffmpeg.global.avcodec;
import org.bytedeco.ffmpeg.global.avutil;
import org.bytedeco.javacv.*;

import javax.imageio.ImageIO;
import javax.swing.*;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;

public class VideoF {
    private static VideoF INSTANCE;
    private VideoF(){

    }
    public final static VideoF getInstance(){
        if(INSTANCE==null){
            synchronized (FileUtil.class){
                if(INSTANCE==null){
                    INSTANCE = new VideoF();
                }
            }
        }
        return INSTANCE;
    }
    /**
     * 转码
     * @throws Exception
     * @throws org.bytedeco.javacv.FrameRecorder.Exception
     * @throws InterruptedException
     */
     public void transcode(File input,String outputDir,String outputFileName) throws Exception, org.bytedeco.javacv.FrameRecorder.Exception, InterruptedException {
        FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(input);
        grabber.setImageMode(FrameGrabber.ImageMode.COLOR);

        grabber.start();
        int imageWidth = grabber.getImageWidth();
        int imageHeiht = grabber.getImageHeight();
        int audioChannels = grabber.getAudioChannels();
        double audioFrameRate = grabber.getAudioFrameRate();
        int frameLength = grabber.getLengthInFrames();
        double frameRate = grabber.getVideoFrameRate();

        FFmpegFrameRecorder recorder = null;
        if (outputFileName != null) {
            /**
             * FFmpegFrameRecorder(String filename, int imageWidth, int imageHeight,
             * int audioChannels) fileName可以是本地文件（会自动创建），也可以是RTMP路径（发布到流媒体服务器）
             * imageWidth = width （为捕获器设置宽） imageHeight = height （为捕获器设置高）
             * audioChannels = 2（立体声）；1（单声道）；0（无音频）
             */
            recorder = new FFmpegFrameRecorder(outputDir+ File.separator+outputFileName+".mp4", imageWidth, imageHeiht, audioChannels);
           // recorder.setVideoCodecName(encodeName);
//            if (pixFormat != null)
//                recorder.setPixelFormat(pixFormat);

            /**
             * 该参数用于降低延迟 参考FFMPEG官方文档：https://trac.ffmpeg.org/wiki/StreamingGuide
             * 官方原文参考：ffmpeg -f dshow -i video="Virtual-Camera" -vcodec libx264
             * -tune zerolatency -b 900k -f mpegts udp://10.1.0.102:1234
             */

           // recorder.setVideoOption("tune", "zerolatency");
            /**
             * 权衡quality(视频质量)和encode speed(编码速度) values(值)：
             * ultrafast(终极快),superfast(超级快), veryfast(非常快), faster(很快), fast(快),
             * medium(中等), slow(慢), slower(很慢), veryslow(非常慢)
             * ultrafast(终极快)提供最少的压缩（低编码器CPU）和最大的视频流大小；而veryslow(非常慢)提供最佳的压缩（高编码器CPU）的同时降低视频流的大小
             * 参考：https://trac.ffmpeg.org/wiki/Encode/H.264 官方原文参考：-preset ultrafast
             * as the name implies provides for the fastest possible encoding. If
             * some tradeoff between quality and encode speed, go for the speed.
             * This might be needed if you are going to be transcoding multiple
             * streams on one machine.
             */
            recorder.setVideoOption("preset", "ultrafast");
            /**
             * 参考转流命令: ffmpeg
             * -i'udp://localhost:5000?fifo_size=1000000&overrun_nonfatal=1' -crf 30
             * -preset ultrafast -acodec aac -strict experimental -ar 44100 -ac
             * 2-b:a 96k -vcodec libx264 -r 25 -b:v 500k -f flv 'rtmp://<wowza
             * serverIP>/live/cam0' -crf 30
             * -设置内容速率因子,这是一个x264的动态比特率参数，它能够在复杂场景下(使用不同比特率，即可变比特率)保持视频质量；
             * 可以设置更低的质量(quality)和比特率(bit rate),参考Encode/H.264 -preset ultrafast
             * -参考上面preset参数，与视频压缩率(视频大小)和速度有关,需要根据情况平衡两大点：压缩率(视频大小)，编/解码速度 -acodec
             * aac -设置音频编/解码器 (内部AAC编码) -strict experimental
             * -允许使用一些实验的编解码器(比如上面的内部AAC属于实验编解码器) -ar 44100 设置音频采样率(audio sample
             * rate) -ac 2 指定双通道音频(即立体声) -b:a 96k 设置音频比特率(bit rate) -vcodec libx264
             * 设置视频编解码器(codec) -r 25 -设置帧率(frame rate) -b:v 500k -设置视频比特率(bit
             * rate),比特率越高视频越清晰,视频体积也会变大,需要根据实际选择合理范围 -f flv
             * -提供输出流封装格式(rtmp协议只支持flv封装格式) 'rtmp://<FMS server
             * IP>/live/cam0'-流媒体服务器地址
             */
            //recorder.setVideoOption("crf", "25");
            // 2000 kb/s, 720P视频的合理比特率范围
            recorder.setVideoBitrate(1000*1024);
            // h264编/解码器
            recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264);
            // 封装格式flv mp4
            recorder.setFormat("mp4");
            // 视频帧率(保证视频质量的情况下最低25，低于25会出现闪屏)
            recorder.setFrameRate(frameRate);
            // 关键帧间隔，一般与帧率相同或者是视频帧率的两倍
         //   recorder.setGopSize(30 * 2);
            // 不可变(固定)音频比特率
           // recorder.setAudioOption("crf", "0");
            // 最高质量
            recorder.setAudioQuality(1);
            // 音频比特率
            recorder.setAudioBitrate(192000);

            // 音频采样率
            recorder.setSampleRate(44100);

            // 双通道(立体声)
            recorder.setAudioChannels(audioChannels);
            // 音频编/解码器
            recorder.setAudioCodec(avcodec.AV_CODEC_ID_AAC);

            recorder.start();
        }

//        CanvasFrame canvas = new CanvasFrame("预览");
//        canvas.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
//        canvas.setAlwaysOnTop(true);
//        canvas.setVisible(true);
         Frame frame = null;
        long curFrame = 0l;
        long curProcess = 0;
         System.out.println("开始转码");
         Map<String,String> s = grabber.getMetadata();
        for (; (frame = grabber.grab()) != null; ) {
            if(Thread.currentThread().isInterrupted()){
                    return;
            }
          //  canvas.showImage(frame);
            double process = (curFrame/(frameLength*1.0))*100;
            if(Math.round(process)>curProcess){
                curProcess=Math.round(process);
                System.out.println("当前进度"+curProcess+"%");

            }
            if (outputDir != null) {
                recorder.record(frame);
            }
            curFrame++;
         }

//        canvas.dispose();
        recorder.close();
        grabber.close();
    }

    /**
     *
     * @param frameLength 帧长
     * @param captureNumber 要截取的数量
     * @return
     */
    public List<Long> framRandom(long frameLength, int captureNumber){
          List<Long> ra = new ArrayList<>();
          for(int i=0;i<captureNumber;i++){
              ra.add(Math.round(Math.random()*frameLength));
          }
          return ra;
    }
    public List<File> capturePicture(File inputFile,String outDir,String fileName,int captureNumber,TaskCallBack<String> callBack,TaskCallBack<Long> callBack2) throws FrameGrabber.Exception {
        FFmpegFrameGrabber grabber = null;
        List<File> res = new ArrayList<>();
        if(!(inputFile.getName().indexOf("mp4")>0)){
                return res;
        }
        try{
          //  callBack.callBack(inputFile.getName()+"->截图开始");
            grabber = new FFmpegFrameGrabber(inputFile);
            grabber.setImageMode(FrameGrabber.ImageMode.COLOR);
            grabber.setVideoCodecName("h264_nvenc");
            grabber.start();
            int frameLength = grabber.getLengthInFrames();
            Frame frame = null;
            List<Long> captureIndex = new ArrayList<>();
            if(captureNumber>0){
                captureIndex =  framRandom(frameLength,captureNumber);
            }
            Long curFrame = 0l;
            int curImage = 1;
            long duration = 0l;
            for (; (frame = grabber.grabImage()) != null; ) {
                duration = grabber.getLengthInTime() / (1000 * 1000);
                if(captureIndex.contains(curFrame)){
                    Java2DFrameConverter converter = new Java2DFrameConverter();
                    BufferedImage image = converter.getBufferedImage(frame);
                    FileOutputStream outFile = null;
                    try {
                        File oDir = new File(outDir);
                        if(!oDir.exists()){
                            oDir.mkdirs();
                        }
                        File out = new File(outDir+File.separator+fileName+curFrame+".png");
                        outFile = new FileOutputStream(out);
                        ImageIO.write(image, "png", outFile);
                        callBack.callBack(inputFile.getName()+"截取图片第"+curImage+"张");
                        curImage++;
                        res.add(out);
                    } catch (IOException e) {

                    } finally {
                        try {
                            if (outFile != null) {
                                outFile.close();
                            }
                        } catch (IOException e) {
                                e.printStackTrace();
                        }
                    }
                }
                curFrame++;
            }

           // callBack.callBack(inputFile.getName()+"截取图片完成,共截取图片："+(curImage-1)+"张");
            callBack2.callBack(duration);
        }catch (FrameGrabber.Exception e){
                return res;
        }finally {
            grabber.release();
            grabber.close();
        }
        return res;
    }

    public static void main(String[] args) throws Exception {
        VideoF f = new VideoF();
        File inputFile = new File("C:\\Users\\admin\\Pictures\\4A-Chapter-5-Animated-text.mp4");
      //  f.capturePicture(inputFile,"D:\\video","videoFileName2",20,null);
    }
}
