package com.zqq.demo.camera;

import android.annotation.SuppressLint;
import android.content.Context;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaRecorder;
import android.media.audiofx.AcousticEchoCanceler;
import android.media.audiofx.AutomaticGainControl;

import com.zqq.base.utils.LogUtils;

import java.io.IOException;
import java.nio.ByteBuffer;

public class AudioRecordManager {
    Context context;
    private static volatile AudioRecordManager instance;
    private Thread audioThread;
    private MediaCodec mediaCodec;
    private MediaFormat mediaFormat;
    private boolean isAudioRecording;

    public static AudioRecordManager getInstance(Context context){
        if(instance == null){
            synchronized (AudioRecordManager.class){
                if(instance == null){
                    instance = new AudioRecordManager(context);
                }
            }
        }
        return instance;
    }

    public AudioRecordManager(Context context) {
        this.context = context;
    }

    private volatile boolean isStart = false;
    AudioRecord audioRecord;
    private AcousticEchoCanceler aec;
    private AutomaticGainControl agc;


    private static final int SAMPLE_RATE = 8000;
    private static final int CHANNEL_CONFIG = AudioFormat.CHANNEL_IN_MONO;
    private static final int AUDIO_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
    int bufferSize = AudioRecord.getMinBufferSize(
            SAMPLE_RATE, CHANNEL_CONFIG, AUDIO_FORMAT);
    public void start(){
        LogUtils.i("bl_audio","start  isStart>>"+ isStart);
        if(isStart){
            return;
        }

        if(audioRecord == null){

//            audioRecord = new AudioRecord(
//                    MediaRecorder.AudioSource.MIC,
//                    SAMPLE_RATE,
//                    CHANNEL_CONFIG,
//                    AUDIO_FORMAT,
//                    bufferSize * 2);
            audioRecord = chooseAudioRecord();
        }
        if(audioRecord == null){
            return;
        }

        if(AcousticEchoCanceler.isAvailable()) {
            aec = AcousticEchoCanceler.create(audioRecord.getAudioSessionId());
            if (aec != null) {
                aec.setEnabled(true);
            }
        }
        if(AutomaticGainControl.isAvailable()) {
            agc = AutomaticGainControl.create(audioRecord.getAudioSessionId());
            if (agc != null) {
                agc.setEnabled(true);
            }
        }
        LogUtils.i("bl_audio","audioRecord>>"+ audioRecord);
        if(audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
            audioRecord.startRecording();
        }
        isStart = true;
        isAudioRecording = true;

        audioThread = new Thread(new Runnable() {
            @Override
            public void run() {
                android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_AUDIO);

                if(audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING){
                    stopAudio();
                    return;
                }
                LogUtils.i("bl_audio","Thread isAudioRecording>>"+ isAudioRecording);
                byte[] mPcmBuffer = new byte[640];
                byte[] leftData = null;
                while (isAudioRecording){
                    int size = audioRecord.read(mPcmBuffer, 0, 640);
                    if(size == 0){
                        stopAudio();
                        break;
                    }
                    int len = mPcmBuffer.length / 2;

                    byte[] rightData = new byte[len];
                    for (int i = 0; i < len; i++) {
                        if (i % 2 == 0) {

                        } else {
                            if(rightData != null){
                                System.arraycopy(mPcmBuffer, i * 2, rightData, i - 1, 2);
                            }
                        }
                    }


                    processAudio(byteMerger(rightData),640);
                }
            }
        });
        audioThread.start();
    }

    // 单声道转双声道
    public byte[] byteMerger(byte[] byte_1){
        byte[] byte_2 = new byte[byte_1.length*2];
        for (int i = 0; i < byte_1.length; i++) {
            if(i%2 == 0){
                byte_2[2*i] =  byte_1[i];
                byte_2[2*i+1] =  byte_1[i+1];
            } else {
                byte_2[2*i] =  byte_1[i-1];
                byte_2[2*i+1] =  byte_1[i];
            }
        }
        return byte_2;
    }

    private long mPresentTimeUs;
    private void processAudio(byte[] rightData,int size) {

        if(mediaCodec == null){
            return;
        }

        ByteBuffer[] inBuffers = mediaCodec.getInputBuffers();
        ByteBuffer[] outBuffers = mediaCodec.getOutputBuffers();
        int inBufferIndex = mediaCodec.dequeueInputBuffer(-1);
        if (inBufferIndex >= 0) {
            ByteBuffer bb = inBuffers[inBufferIndex];
            bb.clear();
            bb.put(rightData, 0, size);
            long pts = System.nanoTime() / 1000 - mPresentTimeUs;
            mediaCodec.queueInputBuffer(inBufferIndex, 0, size, pts, 0);
//            if (inBufferIndex >= 0) {
//                ByteBuffer inputBuffer = inBuffers[inBufferIndex];
//                inputBuffer.clear();
//                inputBuffer.put(rightData, 0, size);
//                mediaCodec.queueInputBuffer(inBufferIndex, 0, size, 0, 0);
//            }
        }

//        MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
//        int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
//        while (outputBufferIndex >= 0) {
//            ByteBuffer outputBuffer = outBuffers[outputBufferIndex];
//            onEncodedAacFrame(outputBuffer,bufferInfo);
//            mediaCodec.releaseOutputBuffer(outputBufferIndex, false);
//            outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
//        }

        for (; ; ) {
            MediaCodec.BufferInfo aebi = new MediaCodec.BufferInfo();
            int outBufferIndex = mediaCodec.dequeueOutputBuffer(aebi, 0);
            if (outBufferIndex >= 0) {
                ByteBuffer bb = outBuffers[outBufferIndex];
                onEncodedAacFrame(bb, aebi);
                mediaCodec.releaseOutputBuffer(outBufferIndex, false);
            } else {
                break;
            }
        }
    }

    private void onEncodedAacFrame(ByteBuffer byteBuffer, MediaCodec.BufferInfo bufferInfo) {

//        LogUtils.i("bl_audio","onEncodedAacFrame>>"+ byteBuffer);
        if(mediaMuxerHelper != null){
            mediaMuxerHelper.writeAudioData(byteBuffer,bufferInfo);
        }
    }

    MediaMuxerHelper mediaMuxerHelper;

    public void setMediaMuxerHelper(MediaMuxerHelper mediaMuxerHelper) {
        this.mediaMuxerHelper = mediaMuxerHelper;
    }

    public static final int ABITRATE = 64 * 1024;
    public static final String ACODEC = "audio/mp4a-latm";
   public void initAudioCodec() {
        try {
            mPresentTimeUs = System.nanoTime() / 1000;
            mediaCodec = MediaCodec.createEncoderByType(ACODEC);
            int ach =  audioChannel == AudioFormat.CHANNEL_IN_STEREO ? 2 : 1;
            mediaFormat = MediaFormat.createAudioFormat(ACODEC, audioRate, ach);
            mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, ABITRATE);
            mediaFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
            mediaFormat.setInteger(MediaFormat.KEY_SAMPLE_RATE, audioRate);
            byte[] data = new byte[]{(byte) 0x15,(byte)0x90};//采样率8000，双声道
            ByteBuffer mCSD3 = ByteBuffer.wrap(data);
            mediaFormat.setByteBuffer("csd-0", mCSD3);
            mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
            mediaCodec.start();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    public MediaFormat getMediaFormat() {
        return mediaFormat;
    }

    /**
     * 停止录音
     */
    public void stopAudio() {
        isAudioRecording = false;
        try{
            if(audioThread != null && audioThread.isAlive()){
                audioThread.interrupt();
                audioThread = null;
            }

        }catch (Exception e){}


        try{
            if (audioRecord != null && audioRecord.getState() == AudioRecord.STATE_INITIALIZED) {
                audioRecord.stop();
                audioRecord.release();
            }
            if(mediaCodec != null){
                mediaCodec.stop();
                mediaCodec.release();
                mediaCodec = null;
            }

        }catch (Exception e){}

    }


    private static final int audioSource = MediaRecorder.AudioSource.MIC;//录音来源方式
    public static final int audioRate = 8000; //录音的采样频率
    public static final int audioChannel = AudioFormat.CHANNEL_IN_STEREO; // 录音的声道，双声道
    public static final int audioFormat = AudioFormat.ENCODING_PCM_16BIT; // 量化的深度
    public static final int SAMPLES_PER_FRAME = 1024; // AAC, bytes/frame/channel
    public int aChannelConfig=0;
    @SuppressLint("MissingPermission")
    public AudioRecord chooseAudioRecord() {

        AudioRecord mic=null;
        try {
            //AudioRecord mic = new AudioRecord(audioSource, audioRate, audioChannel, audioFormat, bufferSize * 2);
            mic = new AudioRecord(audioSource, audioRate, audioChannel, audioFormat, getPcmBufferSize());
            if (mic.getState() != AudioRecord.STATE_INITIALIZED) {
                mic.release();
                mic = null;
                LogUtils.i("xxxxxx","the AudioRecord is not uninitialized,audioSource="+audioSource);
                mic = new AudioRecord(MediaRecorder.AudioSource.DEFAULT, 8000, AudioFormat.CHANNEL_IN_STEREO, AudioFormat.ENCODING_PCM_16BIT, getPcmBufferSize());
                if (mic.getState() != AudioRecord.STATE_INITIALIZED) {
                    if (mic.getState() == AudioRecord.STATE_UNINITIALIZED) {
                        LogUtils.i("xxxxxx","the AudioRecord is not uninitialized,audioSource="+MediaRecorder.AudioSource.DEFAULT);
                    }
                    mic.release();
                    mic = null;
                }
                else {
                    aChannelConfig = AudioFormat.CHANNEL_IN_MONO;
                }
            } else {
                aChannelConfig = audioChannel;
            }
            LogUtils.i("xxxxxx","aChannelConfig="+aChannelConfig+",recordingState="+mic.getRecordingState());
            return mic;
        } catch (Exception e){
            LogUtils.i("xxxxxx","chooseAudioRecord(), Exception "+e.getMessage());
            if(mic!=null){
                mic.release();
                mic = null;
            }
            return null;
        }
    }


    private int getPcmBufferSize() {
        int min_buffer_size =AudioRecord.getMinBufferSize(audioRate, AudioFormat.CHANNEL_IN_STEREO, AudioFormat.ENCODING_PCM_16BIT);

        int pcmBufSize = 1280;
        if (pcmBufSize < min_buffer_size) {
            pcmBufSize = (min_buffer_size / SAMPLES_PER_FRAME + 1) * SAMPLES_PER_FRAME * 2;
        }
        return pcmBufSize;

    }


    private OnSpeakingListener onSpeakingListener;

    public void setOnSpeakingListener(OnSpeakingListener onSpeakingListener) {
        this.onSpeakingListener = onSpeakingListener;
    }

    public interface OnSpeakingListener{
        void onGetPcmFrame(byte[] datas,int len);
    }
}
