package com.qianyanglong.luping.utils;

import java.awt.AWTException;
import java.awt.Dimension;
import java.awt.Graphics2D;
import java.awt.Rectangle;
import java.awt.RenderingHints;
import java.awt.Robot;
import java.awt.Toolkit;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.ShortBuffer;
import java.util.Map;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;

import javax.imageio.ImageIO;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.DataLine;
import javax.sound.sampled.LineUnavailableException;
import javax.sound.sampled.TargetDataLine;

import org.bytedeco.ffmpeg.global.avcodec;
import org.bytedeco.ffmpeg.global.avutil;
import org.bytedeco.javacv.FFmpegFrameGrabber;
import org.bytedeco.javacv.FFmpegFrameRecorder;
import org.bytedeco.javacv.Frame;
import org.bytedeco.javacv.Java2DFrameConverter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class LuPing {
	
	private static final Logger logger = LoggerFactory.getLogger(LuPing.class);
	
	private final static AudioFormat.Encoding ENCODING = AudioFormat.Encoding.PCM_SIGNED;
	
	private final static float RATE = 48000f;
	
	private final static int CHANNELS = 2;
	
	private final static int SAMPLE_SIZE = 24;
	
	private final static boolean BIG_ENDIAN = false;
	
	private final static double frameRate1 = 12;
	
	private final static double frameRate2 = 12;
	
	private static FFmpegFrameRecorder recorder;
	
	private static TargetDataLine line;
	
	private static ScheduledThreadPoolExecutor exec1;
	private static ScheduledThreadPoolExecutor exec2;
	
	public static synchronized void luping(Map<String,Object> map,String path) {
		
		
		try {
			//初始化
//			FFmpegFrameGrabber a_grabber = new FFmpegFrameGrabber("audio=virtual-audio-capturer");
//			a_grabber.setFormat("dshow");
//			frameRate1 = a_grabber.getFrameRate();
//			a_grabber.start();
//			System.out.println("音频抓取器初始化完成");

//	        FFmpegFrameGrabber v_grabber = new FFmpegFrameGrabber("desktop");
//	        v_grabber.setFormat("gdigrab");
//	        frameRate2 = v_grabber.getFrameRate();
//	        System.out.println(frameRate2);
//	        v_grabber.setFrameRate(frameRate);
//	        try {
//				v_grabber.start();
//			} catch (Exception e1) {
//				// TODO Auto-generated catch block
//				e1.printStackTrace();
//			}
			logger.info("视频抓取器初始化完成");
	        File file = new File(path);
	        if(file.exists()) {
	        	file.delete();
	        }
	        Dimension screenSize = Toolkit.getDefaultToolkit().getScreenSize();
	        recorder = new FFmpegFrameRecorder(path,
	        		(int)screenSize.getWidth(),
	        		(int)screenSize.getHeight());
	        recorder.setPixelFormat(avutil.AV_PIX_FMT_NV12);//intel核显编码仅支持nv12和p010le
	        recorder.setVideoCodecName("h264_qsv");//硬件编码h264
	        recorder.setVideoBitrate(2000 * 1000);//码率，这里是2000kb/s
	        recorder.setMaxBFrames(0);//禁用B帧，降低延迟
	        recorder.setFormat("mp4");//封装格式
	        recorder.setGopSize((int)frameRate2);//关键帧间距
	        recorder.setFrameRate(frameRate2);//帧率
	        recorder.setInterleaved(true);
	        recorder.setVideoOption("tune", "zerolatency");//设置预设，zerolatency表示零延迟，适用于实时视频会议
	        recorder.setVideoOption("preset", "veryfast");//编码速度，gpu编码最快是veryfast
	        //recorder.setVideoOption("preset", "slow");//编码速度，gpu编码最快是veryfast
	        recorder.setVideoOption("crf", "17");//动态码率，取值范围0-51，建议17-28，17或18即可获得视觉上与无损无区别的画面，但算法上经过有损压缩
	        //recorder.setVideoOption("crf", "0");//动态码率，取值范围0-51，建议17-28，17或18即可获得视觉上与无损无区别的画面，但算法上经过有损压缩
	   	 
	        recorder.setAudioCodec(avcodec.AV_CODEC_ID_AAC);//编码格式
	        recorder.setAudioBitrate(320 * 1000);//固定码率
	        recorder.setAudioOption("vbr", "1");//可变比特率，1-5，1最低，5最高
	        recorder.setAudioChannels(2);//声道
	        recorder.setSampleRate(48000);//采样率
	        Robot robot = new Robot();
	        Rectangle rectangle = new Rectangle(0,0,(int)screenSize.getWidth(),(int)screenSize.getHeight());
	        exec1 = new ScheduledThreadPoolExecutor(1);
	        exec2 = new ScheduledThreadPoolExecutor(1);
	        long startTime = System.currentTimeMillis();
	        long pauseTime = System.currentTimeMillis();
	        AudioFormat audioFormat = new AudioFormat(44100.0F, 16, 2, true, false);
	        DataLine.Info dataLineInfo = new DataLine.Info(TargetDataLine.class, audioFormat);
	        line = (TargetDataLine) AudioSystem.getLine(dataLineInfo);
	        try {
	            line.open(audioFormat);
	        } catch (LineUnavailableException e1) {
	            // TODO Auto-generated catch block
	            e1.printStackTrace();
	        }
	        line.start();
	 
	        final int sampleRate = (int) audioFormat.getSampleRate();
	        final int numChannels = audioFormat.getChannels();
	 
	        int audioBufferSize = sampleRate * numChannels;
	        final byte[] audioBytes = new byte[audioBufferSize];
	        recorder.start();
	        exec1.scheduleAtFixedRate(new Runnable() {
	            @Override
	            public void run() {
	                BufferedImage screenCapture = robot.createScreenCapture(rectangle); // 截屏
	               
	                BufferedImage videoImg = new BufferedImage((int)screenSize.getWidth(), (int)screenSize.getHeight(),
	                        BufferedImage.TYPE_3BYTE_BGR); // 声明一个BufferedImage用重绘截图
	               
	                Graphics2D videoGraphics = videoImg.createGraphics();// 创建videoImg的Graphics2D
	               
	                videoGraphics.setRenderingHint(RenderingHints.KEY_DITHERING, RenderingHints.VALUE_DITHER_DISABLE);
	                videoGraphics.setRenderingHint(RenderingHints.KEY_COLOR_RENDERING,
	                        RenderingHints.VALUE_COLOR_RENDER_SPEED);
	                videoGraphics.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_SPEED);
	                videoGraphics.drawImage(screenCapture, 0, 0, null); // 重绘截图
	               
	                Java2DFrameConverter java2dConverter = new Java2DFrameConverter();
	               
	                Frame frame = java2dConverter.convert(videoImg);
	                try {
	                    long videoTS = 1000L
	                            * (System.currentTimeMillis() - startTime - (System.currentTimeMillis() - pauseTime));
	                    // 检查偏移量
	                    if (videoTS > recorder.getTimestamp()) {
	                        recorder.setTimestamp(videoTS);
	                    }
	                    recorder.record(frame); // 录制视频
	                } catch (Exception e) {
	                    // TODO Auto-generated catch block
	                    e.printStackTrace();
	                } finally {
	                	java2dConverter.close();
					}
	                // 释放资源
	                videoGraphics.dispose();
	                videoGraphics = null;
	                videoImg.flush();
	                videoImg = null;
	                java2dConverter = null;
	                screenCapture.flush();
	                screenCapture = null;
	 
	            }
	        }, 0, (int) (1000 / frameRate2), TimeUnit.MILLISECONDS);
	        
	 
	        
	        exec2.scheduleAtFixedRate(new Runnable() {
	            @Override
	            public void run() {
	                try {
	                    int nBytesRead = line.read(audioBytes, 0, line.available());
	                    int nSamplesRead = nBytesRead / 2;
	                    short[] samples = new short[nSamplesRead];
	 
	                    // Let's wrap our short[] into a ShortBuffer and
	                    // pass it to recordSamples
	                    ByteBuffer.wrap(audioBytes).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().get(samples);
	                    ShortBuffer sBuff = ShortBuffer.wrap(samples, 0, nSamplesRead);
	 
	                    // recorder is instance of
	                    // org.bytedeco.javacv.FFmpegFrameRecorder
	                    recorder.recordSamples(sampleRate, numChannels, sBuff);
	                    // System.gc();
	                } catch (org.bytedeco.javacv.FrameRecorder.Exception e) {
	                    e.printStackTrace();
	                }
	            }
	        }, 0, (int) (1000 / frameRate1), TimeUnit.MILLISECONDS);

	        //启动执行
	       
		} catch (org.bytedeco.javacv.FFmpegFrameRecorder.Exception e1) {
			// TODO Auto-generated catch block
			e1.printStackTrace();
		} catch (Exception e2) {
			// TODO Auto-generated catch block
			e2.printStackTrace();
		}        
	}
	
	public static void stop() {
		exec1.shutdownNow();
		exec2.shutdownNow();
		try {
			recorder.stop();
			recorder.release();
			recorder.close();
		} catch (org.bytedeco.javacv.FFmpegFrameRecorder.Exception e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (org.bytedeco.javacv.FrameRecorder.Exception e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		line.close();
		logger.info("停止");
	}
	
	public static synchronized void jieping(String path) {
		Robot robot;
		try {
			robot = new Robot();
			Dimension screenSize = Toolkit.getDefaultToolkit().getScreenSize();
			Rectangle rectangle = new Rectangle(screenSize);
			BufferedImage createScreenCapture = robot.createScreenCapture(rectangle);
			File output = new File(path);
			if(!output.getParentFile().exists()) {
				output.getParentFile().mkdirs();
			}
			ImageIO.write(createScreenCapture, "jpeg", output);
		} catch (AWTException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		
	}
}
