package com.player;

import org.bytedeco.ffmpeg.global.avutil;
import org.bytedeco.javacv.Frame;

import javax.sound.sampled.*;
import javax.swing.*;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
import java.util.concurrent.TimeUnit;


public class AudioThread extends Thread {
    private static final float MAX_SHORT_VALUE = 32767.0f;
    private static final float MIN_SHORT_VALUE = -32768.0f;
    private static final float vol = 1;//音量

    private AudioFormat af = null;
    private SourceDataLine sourceDataLine;
    private final int sampleFormat;
    private final int sampleRate;
    private final int audioChannels;

    public SourceDataLine getSourceDataLine() {
        return sourceDataLine;
    }

    public AudioThread(int sampleFormat, int sampleRate, int audioChannels) {
        this.sampleFormat = sampleFormat;
        this.sampleRate = sampleRate;
        this.audioChannels = audioChannels;
        initSourceDataLine();

    }

    @Override
    public void run() {
        try {
            while (true) {
                Frame frame;
                try {
                    frame = Data.AUDIO_QUEUE.poll(1, TimeUnit.SECONDS);
                    if (frame == null || Data.closed.get()) {
                        break;
                    }
                } catch (InterruptedException e) {
                    throw new RuntimeException(e);
                }
                processAudio(frame.samples);
                frame.close();
            }
        } finally {
            try {
                sourceDataLine.drain();
                sourceDataLine.close();
            } catch (Exception e) {
                System.err.println("SourceDataLine closing failed: " + e.getMessage());
            }
        }

    }

    private void processAudio(Buffer[] samples) {
        switch (sampleFormat) {
            case avutil.AV_SAMPLE_FMT_FLTP://平面型左右声道分开。
            {
                FloatBuffer left = (FloatBuffer) samples[0];
                FloatBuffer right = (FloatBuffer) samples[1];
                ByteBuffer leftBytes = floatToByteValue(left);
                ByteBuffer rightBytes = floatToByteValue(right);
                byte[] combined = interleaveChannels(leftBytes.array(), rightBytes.array());
                sourceDataLine.write(combined, 0, combined.length);
                break;
            }
            case avutil.AV_SAMPLE_FMT_S16://非平面型左右声道在一个buffer中。
            {
                var ILData = (ShortBuffer) samples[0];
                var TLData = shortToByteValue(ILData);
                var tl = TLData.array();
                sourceDataLine.write(tl, 0, tl.length);
                break;
            }
            case avutil.AV_SAMPLE_FMT_FLT://float非平面型
            {
                var leftData = (FloatBuffer) samples[0];
                var TLData = floatToByteValue(leftData);
                var tl = TLData.array();
                sourceDataLine.write(tl, 0, tl.length);
                break;
            }
            case avutil.AV_SAMPLE_FMT_S16P://平面型左右声道分开
            {
                ShortBuffer left = (ShortBuffer) samples[0];
                ShortBuffer right = (ShortBuffer) samples[1];
                ByteBuffer leftBytes = shortToByteValue(left);
                ByteBuffer rightBytes = shortToByteValue(right);
                byte[] combined = interleaveChannels(leftBytes.array(), rightBytes.array());
                sourceDataLine.write(combined, 0, combined.length);
                break;
            }
            default:
            {
                JOptionPane.showMessageDialog(null, "unsupport audio format", "unsupport audio format", JOptionPane.ERROR_MESSAGE);
                System.exit(0);
                break;
            }
        }
    }

    private void initSourceDataLine() {
        switch (sampleFormat) {
            case avutil.AV_SAMPLE_FMT_U8://无符号short 8bit
                break;
            case avutil.AV_SAMPLE_FMT_S16://有符号short 16bit
                af = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, sampleRate, 16, audioChannels, audioChannels * 2, sampleRate, true);
                break;
            case avutil.AV_SAMPLE_FMT_S32:
                break;
            case avutil.AV_SAMPLE_FMT_FLT:
                af = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, sampleRate, 16, audioChannels, audioChannels * 2, sampleRate, true);
                break;
            case avutil.AV_SAMPLE_FMT_DBL:
                break;
            case avutil.AV_SAMPLE_FMT_U8P:
                break;
            case avutil.AV_SAMPLE_FMT_S16P://有符号short 16bit,平面型
                af = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, sampleRate, 16, audioChannels, audioChannels * 2, sampleRate, true);
                break;
            case avutil.AV_SAMPLE_FMT_S32P://有符号short 32bit，平面型，但是32bit的话可能电脑声卡不支持，这种音乐也少见
                af = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, sampleRate, 32, audioChannels, audioChannels * 2, sampleRate, true);
                break;
            case avutil.AV_SAMPLE_FMT_FLTP://float 平面型 需转为16bit short
                af = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, sampleRate, 16, audioChannels, audioChannels * 2, sampleRate, true);
                break;
            case avutil.AV_SAMPLE_FMT_DBLP:
                break;
            case avutil.AV_SAMPLE_FMT_S64://有符号short 64bit 非平面型
                break;
            case avutil.AV_SAMPLE_FMT_S64P://有符号short 64bit平面型
                break;
            default:
                System.out.println("不支持的音乐格式");
                System.exit(0);
        }
        DataLine.Info dataLineInfo = new DataLine.Info(SourceDataLine.class,
                af, AudioSystem.NOT_SPECIFIED);
        try {
            sourceDataLine = (SourceDataLine) AudioSystem.getLine(dataLineInfo);
            sourceDataLine.open(af);
            sourceDataLine.start();
        } catch (LineUnavailableException e) {
            System.err.println("SourceDataLine opening failed: " + e.getMessage());
        }
    }

    private byte[] interleaveChannels(byte[] left, byte[] right) {
        byte[] output = new byte[left.length + right.length];
        for (int srcPos = 0, destPos = 0; srcPos < left.length; srcPos += 2, destPos += 4) {
            // 左声道
            output[destPos] = left[srcPos];
            output[destPos + 1] = left[srcPos + 1];
            // 右声道
            output[destPos + 2] = right[srcPos];
            output[destPos + 3] = right[srcPos + 1];
        }
        return output;
    }

    private ByteBuffer shortToByteValue(ShortBuffer arr) {
        int len = arr.capacity();
        ByteBuffer bb = ByteBuffer.allocate(len << 1);
        for (int i = 0; i < len; i++) {
            bb.putShort(i << 1, (short) ((float) arr.get(i) * AudioThread.vol));
        }
        return bb; // 默认转为大端序
    }

    private ByteBuffer floatToByteValue(FloatBuffer arr) {
        int len = arr.capacity();
        ByteBuffer res = ByteBuffer.allocate(len << 1);
        float scale = MAX_SHORT_VALUE * AudioThread.vol;

        for (int i = 0; i < len; i++) {
            float sample = arr.get(i) * scale;
            sample = Math.max(MIN_SHORT_VALUE, Math.min(MAX_SHORT_VALUE, sample));
            res.putShort(i << 1, (short) sample);
        }
        return res;
    }

}
