package com.thunder.ktv.scoreengine2test.audio;

import android.media.AudioAttributes;
import android.media.AudioFormat;
import android.media.AudioTrack;
import android.util.Log;

import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.Arrays;

public class WavAudioProvider implements IAudioProvider {
    private static final String TAG = "WavAudioProvider";
    private final String wavFilePath;

    // --- 目标音频参数 ---
    private static final int TARGET_SAMPLE_RATE = 48000;
    private static final int AUDIO_FORMAT = AudioFormat.ENCODING_PCM_16BIT;

    // --- 线程和播放器 ---
    private AudioTrack audioTrack;
    private Thread playbackThread;
    private int sourceChannels; // 新增：用于存储源文件的声道数 (1 或 2)

    // --- 状态标志 ---
    private volatile boolean isRunning = false;
    private volatile boolean isPaused = false;
    private volatile boolean isEndOfFile = false;

    // --- 线程安全的环形缓冲区 ---
    private final short[] circularBuffer;
    private int writePosition = 0;
    private int readPosition = 0;
    private int dataCount = 0;
    private final Object bufferLock = new Object();
    private final Object pauseLock = new Object();

    public WavAudioProvider(String wavFilePath) {
        this.wavFilePath = wavFilePath;
        File wavFile = new File(wavFilePath);
        if (!wavFile.exists() || !wavFile.isFile()) {
            Log.e(TAG, "WavAudioProvider: File not exists or is not a file -> " + wavFilePath);
        }

        // 初始化一个足以容纳2秒立体声音频的缓冲区，这样对单声道也足够用
        int maxChannels = 2;
        int bufferSizeInShorts = TARGET_SAMPLE_RATE * maxChannels * 2; // 2 seconds
        this.circularBuffer = new short[bufferSizeInShorts];
    }

    @Override
    public void start() {
        if (isRunning) {
            Log.w(TAG, "Provider is already started.");
            return;
        }

        // 1. 验证WAV文件格式并获取声道数
        if (!validateWavHeader()) {
            Log.e(TAG, "WAV file validation failed. Required: 48000Hz, 16-bit, Mono or Stereo.");
            return;
        }
        Log.d(TAG, "WAV file validation successful. Detected channels: " + sourceChannels);

        // 2. 根据声道数动态初始化 AudioTrack
        try {
            int channelConfig = (sourceChannels == 1) ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO;
            int minBufferSize = AudioTrack.getMinBufferSize(TARGET_SAMPLE_RATE, channelConfig, AUDIO_FORMAT);
            audioTrack = new AudioTrack.Builder()
                    .setAudioAttributes(new AudioAttributes.Builder()
                            .setUsage(AudioAttributes.USAGE_MEDIA)
                            .setContentType(AudioAttributes.CONTENT_TYPE_MUSIC)
                            .build())
                    .setAudioFormat(new AudioFormat.Builder()
                            .setEncoding(AUDIO_FORMAT)
                            .setSampleRate(TARGET_SAMPLE_RATE)
                            .setChannelMask(channelConfig) // 动态配置
                            .build())
                    .setBufferSizeInBytes(Math.max(minBufferSize, circularBuffer.length * 2))
                    .setTransferMode(AudioTrack.MODE_STREAM)
                    .build();
        } catch (Exception e) {
            Log.e(TAG, "Failed to create AudioTrack", e);
            return;
        }

        // 3. 重置状态并启动线程
        resetBuffer();
        isPaused = false;
        isEndOfFile = false;
        isRunning = true;

        playbackThread = new Thread(this::playbackLoop, "WavPlaybackThread");
        playbackThread.start();
        Log.d(TAG, "Audio provider started.");
    }

    private void playbackLoop() {
        Log.d(TAG, "Playback thread started.");
        try (FileInputStream fis = new FileInputStream(wavFilePath)) {
            fis.skip(44);
            audioTrack.play();
            short[] readBuffer = new short[1024 * sourceChannels]; // 缓冲区大小适应声道数
            byte[] byteBuffer = new byte[readBuffer.length * 2];

            while (isRunning) {
                synchronized (pauseLock) {
                    while (isPaused) {
                        pauseLock.wait();
                    }
                }

                int bytesRead = fis.read(byteBuffer);
                if (bytesRead == -1) break;

                int shortsRead = bytesRead / 2;
                ByteBuffer.wrap(byteBuffer, 0, bytesRead)
                        .order(ByteOrder.LITTLE_ENDIAN)
                        .asShortBuffer()
                        .get(readBuffer, 0, shortsRead);

//                audioTrack.write(readBuffer, 0, shortsRead);
                writeToCircularBuffer(readBuffer, shortsRead);
            }

        } catch (Exception e) {
            Log.e(TAG, "Exception in playbackLoop", e);
            if (e instanceof InterruptedException) {
                Thread.currentThread().interrupt();
            }
        } finally {
            Log.d(TAG, "Playback loop finished.");
            synchronized (bufferLock) {
                isEndOfFile = true;
                bufferLock.notifyAll();
            }
        }
    }

    /**
     * 从内部缓冲区读取数据，并确保总是返回单声道数据。
     * @param buffer 用于存储单声道数据的short数组
     * @return 实际读取的单声道样本数；如果结束则返回-1
     */
    @Override
    public int read(short[] buffer) {
        if (!isRunning && dataCount == 0) {
            return -1;
        }

        int monoSamplesRead = 0;
        synchronized (bufferLock) {
            try {
                while (dataCount == 0 && !isEndOfFile) {
                    if (!isRunning) return -1;
                    bufferLock.wait();
                }

                if (dataCount == 0 && isEndOfFile) {
                    return -1;
                }

                // --- 核心逻辑：根据源声道数进行处理 ---
                if (sourceChannels == 1) {
                    // 源是单声道，直接拷贝
                    int countToRead = Math.min(buffer.length, dataCount);
                    for (int i = 0; i < countToRead; i++) {
                        buffer[i] = circularBuffer[readPosition];
                        readPosition = (readPosition + 1) % circularBuffer.length;
                    }
                    dataCount -= countToRead;
                    monoSamplesRead = countToRead;
                } else { // sourceChannels == 2
                    // 源是立体声，提取左声道
                    int countToRead = Math.min(buffer.length, dataCount / 2);
                    for (int i = 0; i < countToRead; i++) {
                        buffer[i] = circularBuffer[readPosition]; // 读取左声道
                        readPosition = (readPosition + 2) % circularBuffer.length; // 跳过右声道
                    }
                    dataCount -= countToRead * 2;
                    monoSamplesRead = countToRead;
                }

                bufferLock.notifyAll();

            } catch (InterruptedException e) {
                Thread.currentThread().interrupt();
                Log.w(TAG, "read() interrupted while waiting for data.");
                return -1;
            }
        }
        return monoSamplesRead;
    }

    /**
     * 验证WAV文件头，现在支持单声道和立体声。
     * @return true 如果文件格式有效，否则 false
     */
    private boolean validateWavHeader() {
        try (FileInputStream fis = new FileInputStream(wavFilePath)) {
            byte[] header = new byte[44];
            if (fis.read(header) != 44) return false;

            ByteBuffer bb = ByteBuffer.wrap(header).order(ByteOrder.LITTLE_ENDIAN);

            final int RIFF_ID = 0x46464952;
            final int WAVE_ID = 0x45564157;
            final int FMT_ID = 0x20746d66;

            if (bb.getInt(0) != RIFF_ID || bb.getInt(8) != WAVE_ID || bb.getInt(12) != FMT_ID) {
                Log.e(TAG, "Invalid WAV file identifiers (RIFF/WAVE/fmt)");
                return false;
            }

            short audioFormat = bb.getShort(20);
            short numChannels = bb.getShort(22);
            int sampleRate = bb.getInt(24);
            short bitsPerSample = bb.getShort(34);

            Log.d(TAG, String.format("WAV Header Info: Format=%d, Channels=%d, SampleRate=%d, BitsPerSample=%d",
                    audioFormat, numChannels, sampleRate, bitsPerSample));

            boolean isValid = audioFormat == 1 &&
                    (numChannels == 1 || numChannels == 2) && // 接受 1 或 2 声道
                    sampleRate == TARGET_SAMPLE_RATE &&
                    bitsPerSample == 16;

            if (isValid) {
                this.sourceChannels = numChannels; // 保存检测到的声道数
            } else {
                Log.e(TAG, "WAV format validation failed. Mismatched audio parameters.");
            }
            return isValid;

        } catch (IOException e) {
            Log.e(TAG, "Error reading WAV header", e);
            return false;
        }
    }

    // --- 以下方法无需修改 ---

    private void writeToCircularBuffer(short[] data, int count) throws InterruptedException {
        synchronized (bufferLock) {
            for (int i = 0; i < count; i++) {
                while (dataCount == circularBuffer.length) {
                    bufferLock.wait();
                }
                circularBuffer[writePosition] = data[i];
                writePosition = (writePosition + 1) % circularBuffer.length;
                dataCount++;
            }
            bufferLock.notifyAll();
        }
    }

    @Override
    public void pause() { if (!isRunning || isPaused) return; isPaused = true; if (audioTrack != null && audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING) audioTrack.pause(); Log.d(TAG, "Audio provider paused."); }
    @Override
    public void resume() { if (!isRunning || !isPaused) return; isPaused = false; if (audioTrack != null && audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PAUSED) audioTrack.play(); synchronized (pauseLock) { pauseLock.notifyAll(); } Log.d(TAG, "Audio provider resumed."); }
    @Override
    public void stop() { if (!isRunning) return; isRunning = false; if (isPaused) resume(); if (playbackThread != null) playbackThread.interrupt(); try { if (playbackThread != null) playbackThread.join(500); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } playbackThread = null; if (audioTrack != null && audioTrack.getPlayState() != AudioTrack.PLAYSTATE_STOPPED) audioTrack.stop(); Log.d(TAG, "Audio provider stopped."); }
    @Override
    public void release() { stop(); if (audioTrack != null) { audioTrack.release(); audioTrack = null; } Log.d(TAG, "Audio provider released."); }
    private void resetBuffer() { synchronized (bufferLock) { writePosition = 0; readPosition = 0; dataCount = 0; Arrays.fill(circularBuffer, (short) 0); } }
}
