package ldh.im.fxbase.service;

import ldh.im.fxbase.util.AudioUtil;

import javax.sound.sampled.*;
import java.io.*;

public class AudioService extends AudioBaseService {

    private static final AudioFormat.Encoding AUDIO_ENCODE = AudioFormat.Encoding.PCM_SIGNED;

    public AudioService(String saveDir) {
        super(saveDir);
    }

    public AudioService(String saveDir, int recordTotalTime) {
        super(saveDir, recordTotalTime);
    }

//    @Override
//    public AudioFormat getAudioFormat() {
//        float sampleRate = 16000;     // 8000,11025,16000,22050,44100
//        int sampleSizeInBits = 16;    // 8,16
//        int channels = 1;             // 1,2
//        boolean signed = true;        // true,false
//        boolean bigEndian = false;    // true,false
//        return new AudioFormat(sampleRate, sampleSizeInBits, channels, signed, bigEndian);
//    }

    @Override
    public AudioFormat getAudioFormat() {
        float sampleRate = 8000;     // 8000,11025,16000,22050,44100
        int sampleSizeInBits = 16;    // 8,16
        int channels = 1;             // 1,2
        int frameSize = 2;
        boolean signed = true;        // true,false
        boolean bigEndian = false;    // true,false
        return new AudioFormat(AUDIO_ENCODE, sampleRate, sampleSizeInBits, channels, frameSize, sampleRate,  bigEndian);
    }

    public Runnable getAudioRecordThread() {
        AudioFileFormat.Type fileType = AudioFileFormat.Type.WAVE;
        audioFile = saveDir + File.separator + getFileName() + "." + getFileType(fileType);
        String ff = audioFile;
        return () -> {
            File audioFile = new File(ff);

            int weight = 2;        //声音录入的权值
            int downSum = 0;      //判断是否停止的计数

            ByteArrayInputStream bais = null;
            ByteArrayOutputStream baos = new ByteArrayOutputStream();
            AudioInputStream ais = null;
            long startTime = System.currentTimeMillis();
            long time = 0;
            try {
                targetDataLine.open(audioFormat);
                targetDataLine.start();
                byte[] fragment = new byte[1024];

                ais = new AudioInputStream(targetDataLine);
                while (flag && time <= recordTotalTime * 1000) {
                    int length = targetDataLine.read(fragment, 0, fragment.length);
//                    fragment = AudioUtil.removeNoise(fragment, length);
                    baos.write(fragment, 0, length);
                    time = System.currentTimeMillis() - startTime;
                    changeTime(time);
                }

                //取得录音输入流
                audioFormat = getAudioFormat();
                byte audioData[] = baos.toByteArray();
                bais = new ByteArrayInputStream(audioData);
                ais = new AudioInputStream(bais, audioFormat, audioData.length / audioFormat.getFrameSize());
                AudioSystem.write(ais, fileType, audioFile);
                stopRecord();
                System.out.println("record time:" + time/100);
            } catch (Exception e) {
                e.printStackTrace();
            } finally {
                close(bais, baos, ais);
            }
        };
    }

    //关闭流
    private void close(ByteArrayInputStream bais, ByteArrayOutputStream baos, AudioInputStream ais) {
        try {
            ais.close();
            bais.close();
            baos.reset();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    private String getFileType(AudioFileFormat.Type fileType) {
        if (fileType == AudioFileFormat.Type.WAVE) {
            return "wave";
        }
        return "wave";
    }
}

