package com.cherry.ndkdemo;

import android.media.AudioAttributes;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioTrack;
import android.os.Build;

import androidx.annotation.NonNull;
import androidx.annotation.RequiresApi;

import static android.media.AudioManager.AUDIO_SESSION_ID_GENERATE;

/**
 * FFmpeg+AudioTrack方式对音频解码的文件进行播放操作
 *
 * @author DongMS
 * @since 2020/4/27
 */
public class AudioUtils {

    /**
     * 创建一个AudioTrack，用于播放
     *
     * @return
     */
    @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
    public static AudioTrack createAudioTrack(int sampleRateInHz, int nb_channels) {
        //固定格式的音频码流
        int audioFormat = AudioFormat.ENCODING_PCM_16BIT;

        //声道个数，影响声道布局
        int channelConfig;
        if (nb_channels == 1) {
            channelConfig = AudioFormat.CHANNEL_OUT_MONO;//左声道
        } else {
            channelConfig = AudioFormat.CHANNEL_OUT_STEREO;//立体声
        }
        int bufferSizeInBytes = AudioTrack.getMinBufferSize(sampleRateInHz, channelConfig, audioFormat);

        AudioAttributes attributes = new AudioAttributes.Builder()
                .setLegacyStreamType(AudioManager.STREAM_MUSIC)
                .build();

        AudioFormat format = new AudioFormat.Builder()
                .setChannelMask(channelConfig)
                .setEncoding(audioFormat)
                .setSampleRate(sampleRateInHz)
                .build();

        return new AudioTrack(attributes, format, bufferSizeInBytes, AudioTrack.MODE_STREAM, AUDIO_SESSION_ID_GENERATE);

    }

}
