package vfile;

import com.arcsoft.face.FaceInfo;
import net.coobird.thumbnailator.Thumbnails;
import net.coobird.thumbnailator.geometry.Position;
import org.bytedeco.javacv.*;
import org.bytedeco.javacv.Frame;
import org.bytedeco.opencv.global.opencv_core;
import org.bytedeco.opencv.global.opencv_imgcodecs;
import org.bytedeco.opencv.opencv_core.IplImage;
import org.springframework.util.CollectionUtils;
import util.WorkId;

import java.awt.*;
import java.awt.image.BufferedImage;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.nio.Buffer;
import java.util.List;

import static org.bytedeco.ffmpeg.global.avcodec.AV_CODEC_ID_AAC;
import static org.bytedeco.ffmpeg.global.avcodec.AV_CODEC_ID_H264;
import static org.bytedeco.ffmpeg.global.avutil.AV_PIX_FMT_YUV420P;

public class CheckAllFaceFromVido {
    static boolean exit = false;
    public static String savePath = "F:/face/";
    public static void main(String[] args) throws Exception {
        System.out.println("start...");
        String rtmpPath = savePath+"vido/2.mp4";
        String rtspPath = savePath+"vido/file1.flv";
        boolean saveVideo = false;
        push(rtmpPath,rtspPath, saveVideo);
        System.out.println("end...");
    }

    public static void push(String rtmpPath, String rtspPath, boolean saveVideo) throws Exception {
        // 使用rtsp的时候需要使用 FFmpegFrameGrabber，不能再用 FrameGrabber
        File vf = new File(rtmpPath);
        FFmpegFrameGrabber grabberI = FFmpegFrameGrabber.createDefault(vf);
        grabberI.start();
        FFmpegFrameRecorder recorder = new FFmpegFrameRecorder(rtspPath, grabberI.getImageWidth(), grabberI.getImageHeight(), 2);
        recorder.setVideoCodec(AV_CODEC_ID_H264);
        // 音频编/解码器
        recorder.setAudioCodec(AV_CODEC_ID_AAC);
        // rtmp的类型
        recorder.setFormat("flv");
        recorder.setPixelFormat(AV_PIX_FMT_YUV420P);
        recorder.start();
        //
        OpenCVFrameConverter.ToIplImage conveter = new OpenCVFrameConverter.ToIplImage();
        Java2DFrameConverter converter = new Java2DFrameConverter();
        System.out.println("all start!");
        int count = 0;
        //视频+音频
        //Frame frame = grabber.grab();
        //视频图片
        Frame frameI = null;
        //音频 grabber.grabSamples()
        Frame frameA = null;
        // 帧总数
        BufferedImage bImg = null;
        System.out.println("总时长:"+grabberI.getLengthInTime()/1000/60);
       // System.out.println("总音频长:"+grabberI.getLengthInAudioFrames());
        System.out.println("总视频长:"+grabberI.getLengthInVideoFrames());
        System.out.println("总贞长:"+grabberI.getLengthInFrames());
        int frame_number =  grabberI.getLengthInVideoFrames(); //grabberI.getLengthInAudioFrames() +
        long time = System.currentTimeMillis();
        int width = grabberI.getImageWidth();
        int height = grabberI.getImageHeight();
        int depth = 0;
        int channels = 0;
        int stride = 0;
        int pixelFormat = 0;
        for (int i = 1; i < frame_number; i++) {
            Frame frame1 = grabberI.grab();
            if(frame1 == null){
                continue;
            }
            Buffer[] smples = frame1.samples;
            if (smples != null) {
                recorder.recordSamples(smples);
            }
            Buffer[] imgs = frame1.image;
            if (imgs != null) {
                bImg = converter.convert(frame1);
                ByteArrayOutputStream stream2 = new ByteArrayOutputStream();
                Thumbnails.of(bImg).scale(1).outputFormat("jpg").toOutputStream(stream2);
                List<FaceInfo> code = FaceEngineUtils.checkFace(stream2.toByteArray());
                if(!CollectionUtils.isEmpty(code)){
                    for(FaceInfo f : code){
                        String fileName = savePath+"2/"+WorkId.sortUID();
                        Thumbnails.of(bImg).sourceRegion(new Rectangle(f.getRect().left,f.getRect().top,f.getRect().right-f.getRect().left ,f.getRect().bottom -f.getRect().top)).size(100,100).keepAspectRatio(false).outputFormat("jpg").toFile(fileName);

                    }
                }
                stream2.close();
                recorder.recordImage(width, height, frame1.imageDepth, frame1.imageChannels,frame1.imageStride, -1, imgs);
                recorder.setTimestamp(frame1.timestamp);
            }
        }
        grabberI.release();
        recorder.stop();
        recorder.release();
    }
}
