package com.thunder.record_test.views;

import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Path;
import android.util.AttributeSet;
import android.view.View;

import java.util.ArrayList;
import java.util.List;

/**
 * 双声道波形图视图，用于同时显示左右声道的音频波形
 * 支持单声道和双声道数据的显示
 */
public class StereoWaveformView extends View {
    private static final int MAX_AMPLITUDE = 32767; // 音频振幅的最大值
    private static final int LINE_WIDTH = 2; // 线宽改为2
    private static final int LEFT_CHANNEL_COLOR = Color.parseColor("#4285F4"); // 左声道颜色 (蓝色)
    private static final int RIGHT_CHANNEL_COLOR = Color.parseColor("#EA4335"); // 右声道颜色 (红色)
    private static final int MONO_CHANNEL_COLOR = Color.parseColor("#6200EE"); // 单声道颜色 (紫色)
    
    // 左右声道偏移量，用于错开两个波形
    private static final int CHANNEL_OFFSET_PIXELS = 2; // 固定偏移2像素
    
    private Paint leftChannelPaint;
    private Paint rightChannelPaint;
    private Paint monoChannelPaint;
    private Path leftWavePath;
    private Path rightWavePath;
    private Path monoWavePath;
    private List<Short> leftChannelData = new ArrayList<>();
    private List<Short> rightChannelData = new ArrayList<>();
    private List<Short> monoChannelData = new ArrayList<>();
    private int maxDataSize = 100; // 最大显示数据点数
    
    // 增益控制
    private float amplitudeGain = 0.0f; // 默认增益为0.0，不改变原始值
    private static final float MIN_GAIN = -5.0f; // 最小增益，-5
    private static final float MAX_GAIN = 5.0f; // 最大增益，5
    
    // 声道模式
    private boolean stereoMode = true; // 默认为双声道模式

    public StereoWaveformView(Context context) {
        super(context);
        init();
    }

    public StereoWaveformView(Context context, AttributeSet attrs) {
        super(context, attrs);
        init();
    }

    public StereoWaveformView(Context context, AttributeSet attrs, int defStyleAttr) {
        super(context, attrs, defStyleAttr);
        init();
    }

    private void init() {
        // 左声道画笔
        leftChannelPaint = new Paint();
        leftChannelPaint.setColor(LEFT_CHANNEL_COLOR);
        leftChannelPaint.setStrokeWidth(LINE_WIDTH);
        leftChannelPaint.setAntiAlias(true);
        leftChannelPaint.setStyle(Paint.Style.STROKE);
        
        // 右声道画笔
        rightChannelPaint = new Paint();
        rightChannelPaint.setColor(RIGHT_CHANNEL_COLOR);
        rightChannelPaint.setStrokeWidth(LINE_WIDTH);
        rightChannelPaint.setAntiAlias(true);
        rightChannelPaint.setStyle(Paint.Style.STROKE);
        
        // 单声道画笔
        monoChannelPaint = new Paint();
        monoChannelPaint.setColor(MONO_CHANNEL_COLOR);
        monoChannelPaint.setStrokeWidth(LINE_WIDTH);
        monoChannelPaint.setAntiAlias(true);
        monoChannelPaint.setStyle(Paint.Style.STROKE);
        
        leftWavePath = new Path();
        rightWavePath = new Path();
        monoWavePath = new Path();
    }
    
    /**
     * 设置声道模式
     * @param stereo true为双声道模式，false为单声道模式
     */
    public void setStereoMode(boolean stereo) {
        this.stereoMode = stereo;
        // 切换模式时清除数据
        clear();
    }
    
    /**
     * 获取当前声道模式
     * @return true为双声道模式，false为单声道模式
     */
    public boolean isStereoMode() {
        return stereoMode;
    }

    @Override
    protected void onDraw(Canvas canvas) {
        super.onDraw(canvas);
        
        if (stereoMode) {
            drawStereoWaveform(canvas);
        } else {
            drawMonoWaveform(canvas);
        }
    }
    
    /**
     * 绘制双声道波形
     * @param canvas 画布
     */
    private void drawStereoWaveform(Canvas canvas) {
        if (leftChannelData.isEmpty() && rightChannelData.isEmpty()) {
            return;
        }
        
        float width = getWidth();
        float height = getHeight();
        float centerY = height / 2;
        
        // 计算每个数据点的x坐标间隔
        float xInterval = width / (maxDataSize - 1);
        
        // 绘制左声道波形
        if (!leftChannelData.isEmpty()) {
            leftWavePath.reset();
            float x = 0;
            boolean firstPoint = true;
            
            for (int i = Math.max(0, leftChannelData.size() - maxDataSize); i < leftChannelData.size(); i++) {
                short amplitude = leftChannelData.get(i);
                // 归一化振幅到[-1, 1]范围
                float scaledAmplitude = amplitude / (float)MAX_AMPLITUDE;
                
                // 应用增益
                scaledAmplitude = applyGain(scaledAmplitude);
                
                // 将振幅映射到视图高度的一半，左声道在上半部分，应用2像素的向上偏移
                float y = centerY - (scaledAmplitude * centerY / 2) - CHANNEL_OFFSET_PIXELS;
                
                // 限制y值在视图范围内
                y = Math.max(0, Math.min(height, y));
                
                if (firstPoint) {
                    leftWavePath.moveTo(x, y);
                    firstPoint = false;
                } else {
                    leftWavePath.lineTo(x, y);
                }
                
                x += xInterval;
            }
            
            canvas.drawPath(leftWavePath, leftChannelPaint);
        }
        
        // 绘制右声道波形
        if (!rightChannelData.isEmpty()) {
            rightWavePath.reset();
            float x = 0;
            boolean firstPoint = true;
            
            for (int i = Math.max(0, rightChannelData.size() - maxDataSize); i < rightChannelData.size(); i++) {
                short amplitude = rightChannelData.get(i);
                // 归一化振幅到[-1, 1]范围
                float scaledAmplitude = amplitude / (float)MAX_AMPLITUDE;
                
                // 应用增益
                scaledAmplitude = applyGain(scaledAmplitude);
                
                // 将振幅映射到视图高度的一半，右声道在下半部分，应用2像素的向下偏移
                float y = centerY + (scaledAmplitude * centerY / 2) + CHANNEL_OFFSET_PIXELS;
                
                // 限制y值在视图范围内
                y = Math.max(0, Math.min(height, y));
                
                if (firstPoint) {
                    rightWavePath.moveTo(x, y);
                    firstPoint = false;
                } else {
                    rightWavePath.lineTo(x, y);
                }
                
                x += xInterval;
            }
            
            canvas.drawPath(rightWavePath, rightChannelPaint);
        }
    }
    
    /**
     * 绘制单声道波形
     * @param canvas 画布
     */
    private void drawMonoWaveform(Canvas canvas) {
        if (monoChannelData.isEmpty()) {
            return;
        }
        
        float width = getWidth();
        float height = getHeight();
        float centerY = height / 2;
        
        // 计算每个数据点的x坐标间隔
        float xInterval = width / (maxDataSize - 1);
        
        // 绘制单声道波形
        monoWavePath.reset();
        float x = 0;
        boolean firstPoint = true;
        
        for (int i = Math.max(0, monoChannelData.size() - maxDataSize); i < monoChannelData.size(); i++) {
            short amplitude = monoChannelData.get(i);
            // 归一化振幅到[-1, 1]范围
            float scaledAmplitude = amplitude / (float)MAX_AMPLITUDE;
            
            // 应用增益
            scaledAmplitude = applyGain(scaledAmplitude);
            
            // 将振幅映射到视图高度的一半
            float y = centerY - (scaledAmplitude * centerY);
            
            // 限制y值在视图范围内
            y = Math.max(0, Math.min(height, y));
            
            if (firstPoint) {
                monoWavePath.moveTo(x, y);
                firstPoint = false;
            } else {
                monoWavePath.lineTo(x, y);
            }
            
            x += xInterval;
        }
        
        canvas.drawPath(monoWavePath, monoChannelPaint);
    }
    
    /**
     * 应用增益到音频振幅
     * @param amplitude 原始振幅（归一化到[-1,1]范围）
     * @return 应用增益后的振幅
     */
    private float applyGain(float amplitude) {
        // 0增益对应值为1.0f，即不改变原始波形
        if (amplitudeGain == 0.0f) {
            return amplitude;
        }
        
        float gainFactor;
        
        if (amplitudeGain > 0) {
            // 正增益：线性增加到5倍（当增益为5时）
            gainFactor = 1.0f + (amplitudeGain / MAX_GAIN) * 4.0f;
        } else {
            // 负增益：线性减小到0.1倍（当增益为-5时）
            gainFactor = 1.0f - (Math.abs(amplitudeGain) / Math.abs(MIN_GAIN)) * 0.9f;
        }
        
        float result = amplitude * gainFactor;
        
        // 限制在[-1, 1]范围内，防止溢出
        return Math.max(-1.0f, Math.min(1.0f, result));
    }
    
    /**
     * 设置波形增益
     * @param gain 增益值，范围从MIN_GAIN到MAX_GAIN
     */
    public void setAmplitudeGain(float gain) {
        this.amplitudeGain = Math.max(MIN_GAIN, Math.min(MAX_GAIN, gain));
        invalidate(); // 重绘视图以应用新增益
    }
    
    /**
     * 获取当前波形增益
     * @return 当前增益值
     */
    public float getAmplitudeGain() {
        return amplitudeGain;
    }
    
    /**
     * 添加双声道音频数据
     * @param leftAmplitude 左声道振幅值
     * @param rightAmplitude 右声道振幅值
     */
    public void addAmplitude(short leftAmplitude, short rightAmplitude) {
        if (stereoMode) {
            leftChannelData.add(leftAmplitude);
            rightChannelData.add(rightAmplitude);
            
            // 保持数据量在可控范围内
            if (leftChannelData.size() > maxDataSize * 2) {
                leftChannelData = leftChannelData.subList(leftChannelData.size() - maxDataSize, leftChannelData.size());
                rightChannelData = rightChannelData.subList(rightChannelData.size() - maxDataSize, rightChannelData.size());
            }
        } else {
            // 在单声道模式下，将双声道数据混合为单声道
            short monoAmplitude = (short) ((leftAmplitude + rightAmplitude) / 2);
            addMonoAmplitude(monoAmplitude);
        }
        
        invalidate(); // 重绘视图
    }
    
    /**
     * 添加单声道音频数据
     * @param amplitude 振幅值
     */
    public void addMonoAmplitude(short amplitude) {
        monoChannelData.add(amplitude);
        
        // 保持数据量在可控范围内
        if (monoChannelData.size() > maxDataSize * 2) {
            monoChannelData = monoChannelData.subList(monoChannelData.size() - maxDataSize, monoChannelData.size());
        }
        
        invalidate(); // 重绘视图
    }
    
    /**
     * 添加一组音频数据
     * @param buffer 音频数据缓冲区
     * @param bufferSize 缓冲区大小
     */
    public void addAudioData(byte[] buffer, int bufferSize) {
        if (stereoMode) {
            // 双声道模式：从字节数组中提取左右声道数据（假设是16位PCM数据，左右声道交错存储）
            for (int i = 0; i < bufferSize; i += 32) { // 每隔16个采样点取一个以减少数据量，双声道要隔得更远
                if (i + 3 < bufferSize) {
                    // 左声道：将两个字节组合成一个short值
                    short leftAmplitude = (short) ((buffer[i] & 0xFF) | ((buffer[i + 1] & 0xFF) << 8));
                    
                    // 右声道：将两个字节组合成一个short值
                    short rightAmplitude = (short) ((buffer[i + 2] & 0xFF) | ((buffer[i + 3] & 0xFF) << 8));
                    
                    addAmplitude(leftAmplitude, rightAmplitude);
                }
            }
        } else {
            // 单声道模式：从字节数组中提取单声道数据
            for (int i = 0; i < bufferSize; i += 16) { // 每隔8个采样点取一个以减少数据量
                if (i + 1 < bufferSize) {
                    // 将两个字节组合成一个short值
                    short amplitude = (short) ((buffer[i] & 0xFF) | ((buffer[i + 1] & 0xFF) << 8));
                    addMonoAmplitude(amplitude);
                }
            }
        }
    }
    
    /**
     * 清除所有数据
     */
    public void clear() {
        leftChannelData.clear();
        rightChannelData.clear();
        monoChannelData.clear();
        invalidate();
    }
} 