package java_option.player.javacv.core;

import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;

import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.DataLine;
import javax.sound.sampled.LineUnavailableException;
import javax.sound.sampled.SourceDataLine;
import javax.swing.JOptionPane;

import org.bytedeco.ffmpeg.avutil.AVFrame;
import org.bytedeco.ffmpeg.global.avutil;
import org.bytedeco.javacv.FFmpegFrameGrabber;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;


/**
 * 用于播放音频
 * 
 * @author java红了 <br/>
 *         2020-9-2819:32:50
 */
public class AudioPlayThread extends Thread {

	public static final Logger logger = LoggerFactory.getLogger(AudioPlayThread.class);

	private static AudioPlayThread instance;

	private Boolean isRunning = false;

	private AudioPlayThread() {

	}

	public static AudioPlayThread getInstance() {
		synchronized (AudioPlayThread.class) {
			if (instance == null) {
				instance = new AudioPlayThread();
			}
		}
		return instance;
	}

	private AudioFormat af = null;
	private SourceDataLine sourceDataLine;
	private DataLine.Info dataLineInfo;

	private Buffer[] buf;
	private FloatBuffer leftData, rightData;
	private ShortBuffer ILData, IRData;
	private ByteBuffer TLData, TRData;
	private float vol = 1;// 音量 0到1
	private int sampleFormat;
	private byte[] tl, tr;
	private byte[] combine;

	public void startPlay(FFmpegFrameGrabber frameGrabber) {
//		int sec = 60;
		// 之前纯音频文件设置了时间戳有问题，视频没问题。
		// 在JavaCV1.4.1修复了这个问题，因此可以调用
//		try {
//			this.frameGrabber.setTimestamp(sec * 1000000);
//		} catch (Exception e) {
//			e.printStackTrace();
//		}
		sampleFormat = frameGrabber.getSampleFormat();
		logger.debug("音频采样率	" + frameGrabber.getSampleRate());
		logger.debug("音频通道数	" + frameGrabber.getAudioChannels());

		// frameGrabber 需要start 否则frameGrabber.getSampleFormat() 会有问题, 导致程序报错
		initSourceDataLine(frameGrabber);

		//
		isRunning = true;
		super.start();
	}

	@Override
	public void run() {
		FrameWrapper frameWrapper;
		while (true) {
			if (isRunning == null) {
				break;
			}
			if (isRunning == false) {
				waitPlay();
			}

			frameWrapper = FrameFIFO.getInstance().readNextAudio();
			if (frameWrapper == null) {
//				logger.debug(tag, content);
				try {
					Thread.sleep(50);
				} catch (InterruptedException e) {
					e.printStackTrace();
				}
				continue;
			} else {
				logger.debug("" + frameWrapper.index);
				processAudio(frameWrapper.frame.samples);
				//播放
				sourceDataLine.drain();
			}
		}
	}

	public void processAudio(Buffer[] samples) {
		int k;
		buf = samples;
		switch (sampleFormat) {
		case avutil.AV_SAMPLE_FMT_FLTP:// 平面型左右声道分开。
			leftData = (FloatBuffer) buf[0];
			TLData = floatToByteValue(leftData, vol);
			rightData = (FloatBuffer) buf[1];
			TRData = floatToByteValue(rightData, vol);
			tl = TLData.array();
			tr = TRData.array();
			combine = new byte[tl.length + tr.length];
			k = 0;
			for (int i = 0; i < tl.length; i = i + 2) {// 混合两个声道。
				for (int j = 0; j < 2; j++) {
					combine[j + 4 * k] = tl[i + j];
					combine[j + 2 + 4 * k] = tr[i + j];
				}
				k++;
			}

			sourceDataLine.write(combine, 0, combine.length);
			break;
		case avutil.AV_SAMPLE_FMT_S16:// 非平面型左右声道在一个buffer中。
			if (buf[0] instanceof ShortBuffer) {
				ILData = (ShortBuffer) buf[0];
				TLData = shortToByteValue(ILData, vol);
				tl = TLData.array();
				sourceDataLine.write(tl, 0, tl.length);
			} else if (buf[0] instanceof FloatBuffer) {
				// java红了 修改
				leftData = (FloatBuffer) buf[0];
				TLData = floatToByteValue(leftData, vol);
				tl = TLData.array();
				sourceDataLine.write(tl, 0, tl.length);
			} else {
				logger.debug("音频播放出错 类型："+buf[0].getClass());
			}
			break;
		case avutil.AV_SAMPLE_FMT_FLT:// float非平面型
			leftData = (FloatBuffer) buf[0];
			TLData = floatToByteValue(leftData, vol);
			tl = TLData.array();
			sourceDataLine.write(tl, 0, tl.length);
			break;
		case avutil.AV_SAMPLE_FMT_S16P:// 平面型左右声道分开
			ILData = (ShortBuffer) buf[0];
			IRData = (ShortBuffer) buf[1];
			TLData = shortToByteValue(ILData, vol);
			TRData = shortToByteValue(IRData, vol);
			tl = TLData.array();
			tr = TRData.array();
			combine = new byte[tl.length + tr.length];
			k = 0;
			for (int i = 0; i < tl.length; i = i + 2) {
				for (int j = 0; j < 2; j++) {
					combine[j + 4 * k] = tl[i + j];
					combine[j + 2 + 4 * k] = tr[i + j];
				}
				k++;
			}
			sourceDataLine.write(combine, 0, combine.length);
			break;
		default:
			JOptionPane.showMessageDialog(null, "unsupport audio format", "unsupport audio format", JOptionPane.ERROR_MESSAGE);
//			System.exit(0);
			break;
		}
	}

	/**
	 * 位数
	 * 整数位
	 * 8 byte
	 * 16 short
	 * 32 int
	 * 64 long
	 * 浮点位
	 *	float
	 *	double
	 *
	 *
	 *
	 * 平面类型和非平面类型区别<br>
	 *
	 * 非平面类型：
	 * <p>双通道采用分开存储
	 * <p>c1c2 c1c2 c1c2
	 * <p>如果多个通道(5.1声道) 则
	 * <p>c1c2c3c4c5c6 c1c2c3c4c5c6 c1c2c3c4c5c6
	 *	<p>
	 * 平面型：
	 *<p>双通道采用
	 *<p> c1c1c1 c2c2c2
	 *
	 * <p>多通道（5.1声道）
	 * <p>c1c1c1 c2c2c2 c3c3c3 c4c4c4 c5c5c5
	 *
	 *
	 *
	 *
	 *
	 * @param fg
	 */
	private void initSourceDataLine(FFmpegFrameGrabber fg) {
		switch (fg.getSampleFormat()) {
		case avutil.AV_SAMPLE_FMT_U8:// 无符号short 8bit(直接使用)
			break;
		case avutil.AV_SAMPLE_FMT_S16:// 有符号short 16bit
			af = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, fg.getSampleRate(), 16, fg.getAudioChannels(), fg.getAudioChannels() * 2, fg.getSampleRate(), true);
			break;
		case avutil.AV_SAMPLE_FMT_S32:
			break;
		case avutil.AV_SAMPLE_FMT_FLT:
			af = new AudioFormat(AudioFormat.Encoding.PCM_FLOAT, fg.getSampleRate(), 16, fg.getAudioChannels(), fg.getAudioChannels() * 2, fg.getSampleRate(), true);
			break;
		case avutil.AV_SAMPLE_FMT_DBL:
			break;
		case avutil.AV_SAMPLE_FMT_U8P:
			break;
		case avutil.AV_SAMPLE_FMT_S16P:// 有符号short 16bit,平面型
			af = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, fg.getSampleRate(), 16, fg.getAudioChannels(), fg.getAudioChannels() * 2, fg.getSampleRate(), true);
			break;
		case avutil.AV_SAMPLE_FMT_S32P:// 有符号short 32bit，平面型，但是32bit的话可能电脑声卡不支持，这种音乐也少见
			af = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, fg.getSampleRate(), 32, fg.getAudioChannels(), fg.getAudioChannels() * 2, fg.getSampleRate(), true);
			break;
		case avutil.AV_SAMPLE_FMT_FLTP:// float 平面型 需转为16bit short
			af = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, fg.getSampleRate(), 16, fg.getAudioChannels(), fg.getAudioChannels() * 2, fg.getSampleRate(), true);
			break;
		case avutil.AV_SAMPLE_FMT_DBLP:
			break;
		case avutil.AV_SAMPLE_FMT_S64:// 有符号short 64bit 非平面型
			break;
		case avutil.AV_SAMPLE_FMT_S64P:// 有符号short 64bit平面型
			break;
		default:
			logger.debug("不支持的音乐格式");
//			System.exit(0);
		}
		dataLineInfo = new DataLine.Info(SourceDataLine.class, af, AudioSystem.NOT_SPECIFIED);
		try {
			sourceDataLine = (SourceDataLine) AudioSystem.getLine(dataLineInfo);
			sourceDataLine.open(af);
			sourceDataLine.start();
		} catch (LineUnavailableException e) {
			e.printStackTrace();
		}
	}

	/**
	 * 16 位数据转 8位数据
	 *
	 * @param arr
	 * @param vol
	 * @return
	 */
	public static ByteBuffer shortToByteValue(ShortBuffer arr, float vol) {
		int len = arr.capacity();
		ByteBuffer bb = ByteBuffer.allocate(len * 2);
		for (int i = 0; i < len; i++) {
			bb.putShort(i * 2, (short) ((float) arr.get(i) * vol));
		}
		return bb; // 默认转为大端序
	}

	/**
	 *
	 * @param arr 原始数据
	 * @param vol 音量
	 * @return
	 */
	public static ByteBuffer floatToByteValue(FloatBuffer arr, float vol) {
		int len = arr.capacity();
		float f;
		float v;
		ByteBuffer res = ByteBuffer.allocate(len * 2);
		v = 32768.0f * vol;
		for (int i = 0; i < len; i++) {
			f = arr.get(i) * v;// Ref：https://stackoverflow.com/questions/15087668/how-to-convert-pcm-samples-in-byte-array-as-floating-point-numbers-in-the-range
			if (f > v)
				f = v;
			if (f < -v)
				f = v;
			// 默认转为大端序
			res.putShort(i * 2, (short) f);// 注意乘以2，因为一次写入两个字节。
		}
		return res;
	}

	/**
	 * 设置音量
	 * 
	 * @param volume 1-100 之间
	 */
	public void setSoundVolume(float volume) {
		this.vol = volume;
	}

	public float getSoundVolume() {
		return this.vol;
	}

	public void waitPlay() {
		synchronized (instance) {
			try {
				instance.wait();
			} catch (InterruptedException e) {
				e.printStackTrace();
			}
		}
	}

	public void notifyPlay() {
		synchronized (instance) {
			instance.notifyAll();
		}
	}

	public void setRunning(Boolean isRunning) {
		this.isRunning = isRunning;
		if (isRunning != null && isRunning) {
			notifyPlay();
		}
		if (this.isRunning == null) {
			instance = null;
		}
	}
}
