package com.example.recorder;

import android.media.AudioAttributes;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioRecord;
import android.media.AudioTrack;
import android.os.Build;
import android.support.annotation.RequiresApi;

import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;

/**
 * @author hdz
 * @date 2019/06/24
 */
public class AudioPlayer {
    private static final String TAG = "AudioPlayer";
    private int iSampleRate;
    private int iChannel;
    private int iBit;
    private AudioTrack mAudioTrack;
    private String mPcmPath;
    private int iPrimePlaySize;
    private WorkThread mWorkThread;
    private boolean bPlaying = false;

    private String mSdcardPath;
    private IAudioPlayCallback mAudioPlayCallback;
    private int channelCount = 0;

    public AudioPlayer(String sdcardPath, IAudioPlayCallback audioPlayCallback) {
        mSdcardPath = sdcardPath;
        mAudioPlayCallback = audioPlayCallback;
    }

    public void initPlayer(int iSampleRate, int iChannel, int iBit) {
        Logger.d(TAG, "initPlayer: iChannel=" + iChannel);
        this.channelCount = iChannel;
        this.iSampleRate = iSampleRate;
        if (iChannel == 1) {
            this.iChannel = AudioFormat.CHANNEL_OUT_MONO;
        } else if (iChannel == 2) {
            this.iChannel = AudioFormat.CHANNEL_OUT_STEREO;
        }

        this.iBit = (iBit == 16) ? AudioFormat.ENCODING_PCM_16BIT : AudioFormat.ENCODING_PCM_8BIT;
        this.mPcmPath = mSdcardPath + "/" + iSampleRate + "_" + iChannel + "_" + iBit + ".pcm";
    }

    public int getAudioSessionId() {
        if (mAudioTrack != null) {
            return mAudioTrack.getAudioSessionId();
        }
        return -1;
    }

    public void start() {
        bPlaying = true;
        mWorkThread = new WorkThread();
        mWorkThread.start();
    }

    public void stop() {
        bPlaying = false;
        if (mWorkThread != null) {
            try {
                mWorkThread.join();
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
            mWorkThread = null;
        }
        releaseAudioTrack();
    }


    private File checkFile() {
        File file = new File(mPcmPath);
        if (!file.exists()) {
            Logger.e(TAG + ",run: " + mPcmPath + " does not exist!");
            return null;
        }
        return file;
    }

    private String state2Str(int state) {
        switch (state) {
            case AudioTrack.STATE_UNINITIALIZED:
                return "AudioTrack.STATE_UNINITIALIZED";
            case AudioTrack.STATE_INITIALIZED:
                return "AudioTrack.STATE_INITIALIZED";
            case AudioTrack.STATE_NO_STATIC_DATA:
                return "AudioTrack.STATE_NO_STATIC_DATA";
            default:
                return "Unknown State: " + state;
        }
    }

    private String playState2Str(int state) {
        switch (state) {
            case AudioTrack.PLAYSTATE_STOPPED:
                return "AudioTrack.PLAYSTATE_STOPPED";
            case AudioTrack.PLAYSTATE_PAUSED:
                return "AudioTrack.PLAYSTATE_PAUSED";
            case AudioTrack.PLAYSTATE_PLAYING:
                return "AudioTrack.PLAYSTATE_PLAYING";
            default:
                return "Unknown State: " + state;
        }
    }

    private void createAudioTrack() {
        // 获得构建对象的最小缓冲区大小
        // 根据音频数据的特性来确定所要分配的缓冲区的最小size
        int minBufSize = AudioTrack.getMinBufferSize(iSampleRate, iChannel, iBit);
        if (channelCount > 2) {
            minBufSize = AudioRecord.getMinBufferSize(iSampleRate, AudioFormat.CHANNEL_IN_MONO, iBit) * channelCount;
        }
        iPrimePlaySize = minBufSize * 2;

        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
            AudioAttributes attributes = new AudioAttributes.Builder()
                    .setUsage(AudioAttributes.USAGE_MEDIA)
                    .setContentType(AudioAttributes.CONTENT_TYPE_MUSIC)
                    .build();
            mAudioTrack = new AudioTrack(attributes, createAudioFormat(), minBufSize, AudioTrack.MODE_STREAM, AudioManager.AUDIO_SESSION_ID_GENERATE);
        } else {
//		         STREAM_ALARM：警告声
//		         STREAM_MUSCI：音乐声，例如music等
//		         STREAM_RING：铃声
//		         STREAM_SYSTEM：系统声音
//		         STREAM_VOCIE_CALL：电话声音
            mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, iSampleRate, iChannel, iBit, minBufSize, AudioTrack.MODE_STREAM);
//				AudioTrack中有MODE_STATIC和MODE_STREAM两种分类。
//      		STREAM的意思是由用户在应用程序通过write方式把数据一次一次得写到audiotrack中。
//				这个和我们在socket中发送数据一样，应用层从某个地方获取数据，例如通过编解码得到PCM数据，然后write到audiotrack。
//				这种方式的坏处就是总是在JAVA层和Native层交互，效率损失较大。
//				而STATIC的意思是一开始创建的时候，就把音频数据放到一个固定的buffer，然后直接传给audiotrack，
//				后续就不用一次次得write了。AudioTrack会自己播放这个buffer中的数据。
//				这种方法对于铃声等内存占用较小，延时要求较高的声音来说很适用。
        }
    }

    @RequiresApi(api = Build.VERSION_CODES.M)
    private AudioFormat createAudioFormat() {
        AudioFormat format;
        if (channelCount > 2) {
            int channelIndexMask = 0;
            switch (channelCount) {
                case 4:
                    channelIndexMask = 0xf;
                    break;
                case 8:
                    channelIndexMask = 0xff;
                    break;
                default:
                    break;
            }
            format = new AudioFormat.Builder()
                    .setEncoding(iBit)
                    .setSampleRate(iSampleRate)
                    .setChannelIndexMask(channelIndexMask)
                    .build();
        } else {
            format = new AudioFormat.Builder()
                    .setEncoding(iBit)
                    .setSampleRate(iSampleRate)
                    .setChannelMask(iChannel)
                    .build();
        }
        return format;
    }

    private void releaseAudioTrack() {
        if (mAudioTrack != null) {
            mAudioTrack.stop();
            mAudioTrack.release();
            mAudioTrack = null;
        }
    }

    public interface IAudioPlayCallback {
        void onPlayFinish();
    }

    private class WorkThread extends Thread {
        @Override
        public void run() {
            super.run();

            File file = checkFile();
            if (file == null) {
                if (mAudioPlayCallback != null) {
                    mAudioPlayCallback.onPlayFinish();
                }
                return;
            }

            createAudioTrack();

            byte[] data = new byte[iPrimePlaySize];
            FileInputStream fileInputStream = null;
            try {
                fileInputStream = new FileInputStream(file);
                int total = fileInputStream.available();
                int offset = 0;
                mAudioTrack.play();
                while (bPlaying) {
                    int len = fileInputStream.read(data, 0, iPrimePlaySize);
                    if (len <= 0) {
                        break;
                    }
                    mAudioTrack.write(data, 0, iPrimePlaySize);
                    offset += len;
                    if (offset >= total) {
                        break;
                    }
                }
                if (mAudioPlayCallback != null) {
                    mAudioPlayCallback.onPlayFinish();
                }
                mAudioTrack.stop();
            } catch (Exception e) {
                e.printStackTrace();
            } finally {
                if (fileInputStream != null) {
                    try {
                        fileInputStream.close();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }
            }
        }
    }
}
