package com.pi.shisan.service.audio;

import de.quippy.javamod.mixer.dsp.AudioProcessor;

import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.DataLine;
import javax.sound.sampled.SourceDataLine;

/**
 * 音频输出流，输出到系统混音器
 */
public class AudioOutputStream {

    private AudioFormat audioFormat;
    private SourceDataLine sourceLine;

    private AudioProcessor audioProcessor;

    public AudioOutputStream(AudioFormat audioFormat, AudioProcessor audioProcessor) {
        this.audioFormat = audioFormat;
        this.audioProcessor = audioProcessor;
        open();
    }

    private void open() {
        if (audioFormat != null) {
            try {
                DataLine.Info sourceLineInfo = new DataLine.Info(SourceDataLine.class, audioFormat);
                if (AudioSystem.isLineSupported(sourceLineInfo)) {
                    sourceLineInfo.getFormats();
                    sourceLine = (SourceDataLine) AudioSystem.getLine(sourceLineInfo);
                    sourceLine.open();
                    sourceLine.start();
                } else {
                    throw new RuntimeException("Audioformat is not supported");
                }
            } catch (Exception ex) {
                sourceLine = null;
                throw new RuntimeException("Error occured when opening audio device", ex);
            }
            if (audioProcessor != null) {
                audioProcessor.initializeProcessor(sourceLine);
                audioProcessor.setUseInternalCounter(false);
            }
        }
    }

    public void writeSampleData(byte[] samples) {
        writeSampleData(samples, 0, samples.length);
    }

    public void writeSampleData(byte[] samples, int start, int length) {
        if (audioProcessor != null) {
            int anzSamples = 0;
            try {
                anzSamples = audioProcessor.writeSampleData(samples, start, length);
            } catch (Exception e) {
                e.printStackTrace();
                anzSamples = length;
            }
            sourceLine.write(audioProcessor.getResultSampleBuffer(), 0, anzSamples);
        } else {
            sourceLine.write(samples, start, length);
        }
    }

    public void resume() {
        if (audioProcessor != null) {
            audioProcessor.resume();
        }
    }

    public void interrupt() {
        if (audioProcessor != null) {
            audioProcessor.pause();
        }
    }

    public void close() {
        if (audioProcessor != null) {
            try {
                audioProcessor.stop();
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
        synchronized (this) {
            this.notifyAll();
        }
        try {
            if (sourceLine.isRunning()) {
                sourceLine.drain();
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
//        try {
//            sourceLine.flush();
//        } catch (Exception e) {
//            e.printStackTrace();
//        }
        try {
            sourceLine.close();
        } catch (Exception e) {
            e.printStackTrace();
        }
        try {
            sourceLine.stop();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}
