package com.thunder.ktv.scoreengine2test.ui;

import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Path;
import android.graphics.PorterDuff;
import android.util.AttributeSet;
import android.view.SurfaceView;

import com.thunder.ktv.scoreengine2test.R;

/**
 * WaveformView 类
 * WaveformView Class
 *
 * 该类设计用于实时显示音频波形。它支持分别用白色和红色显示左右声道的波形。此外，还设有黄色的中心线和峰值线，帮助可视化音频信号的振幅。
 * This class is designed to display audio waveforms in real-time. It supports drawing waveforms for both left and right audio channels,
 * represented in white and red colors respectively. Additionally, it features a center line and peak lines in yellow to help visualize
 * the amplitude of the audio signals.
 *
 * 创建时间：2024年4月12日
 * Created on: April 12, 2024
 *
 * 作者：来自OpenAI的ChatGPT by 成凯
 * Author: ChatGPT from OpenAI by 成凯
 *
 * 使用方法：
 * Usage:
 * - 通过调用 updateAudioData(short[] leftData, short[] rightData) 更新左右声道的音频数据。
 * - Update the audio data by calling updateAudioData(short[] leftData, short[] rightData) with audio samples for left and right channels.
 * - 视图将在提供新数据时自动重绘更新的波形。
 * - The view will automatically redraw itself with the updated waveforms whenever new data is provided.
 * - 确保以实时方式捕捉并提供音频数据，以动态显示波形。
 * - Ensure audio data is captured and provided in real-time to display the waveform dynamically.
 */
public class WaveformView extends SurfaceView {
    private final Paint paintLeft = new Paint();
    private final Paint paintRight = new Paint();
    private final Paint paintCenterLine = new Paint();
    private final Path wavePathLeft = new Path();
    private final Path wavePathRight = new Path();
    private short[] audioDataLeft;
    private short[] audioDataRight;

    private static final int BACKGROUND_COLOR = 0xCCCCCCCC;

    public WaveformView(Context context, AttributeSet attrs) {
        super(context, attrs);
        init();
    }

    private void init() {
        // 初始化左声道绘制参数，使用白色
        paintLeft.setColor(Color.GREEN);
        paintLeft.setStrokeWidth(2);
        paintLeft.setStyle(Paint.Style.STROKE);
//        paintLeft.setAntiAlias(true);

        // 初始化右声道绘制参数，使用红色
        paintRight.setColor(Color.RED);
        paintRight.setStrokeWidth(2);
        paintRight.setStyle(Paint.Style.STROKE);
//        paintRight.setAntiAlias(true);

        // 初始化中线和峰值线绘制参数，使用黄色
        paintCenterLine.setColor(getResources().getColor(R.color.common_yellow));
        paintCenterLine.setStrokeWidth(2);
        paintCenterLine.setStyle(Paint.Style.STROKE);
    }

    public synchronized void updateAudioData(short[] leftData, short[] rightData) {
        this.audioDataLeft = leftData;
        this.audioDataRight = rightData;
        invalidate(); // 触发视图重绘
    }

    @Override
    protected synchronized void onDraw(Canvas canvas) {
        super.onDraw(canvas);
        canvas.drawColor(BACKGROUND_COLOR, PorterDuff.Mode.CLEAR); // Clear with transparency if needed
        canvas.drawColor(BACKGROUND_COLOR);
        drawCenterAndPeakLines(canvas);
        drawWaveform(canvas, audioDataLeft, wavePathLeft, paintLeft,-1);
        drawWaveform(canvas, audioDataRight, wavePathRight, paintRight,1);
    }

//    private void drawWaveform(Canvas canvas, short[] audioData, Path wavePath, Paint paint,int yOffset) {
//        if (audioData != null && audioData.length > 0) {
//            wavePath.reset();
//            float width = getWidth();
//            float height = getHeight();
//            float centerY = height / 2f;
//            float maxAmplitude = Short.MAX_VALUE;  // 16-bit PCM audio
//
//            wavePath.moveTo(0, centerY);
//            int len = audioData.length/2;
//            for (int i = 0; i < len; i++) {
//                float x = i * (width / (float) len);
//                float y = centerY - ((audioData[i] / maxAmplitude) * centerY) + yOffset;
//                if (i == 0) {
//                    wavePath.moveTo(x, y);
//                } else {
//                    wavePath.lineTo(x, y);
//                }
//            }
//
//            canvas.drawPath(wavePath, paint);
//        }
//    }

    private void drawWaveform(Canvas canvas, short[] audioData, Path wavePath, Paint paint, float xOffset) {
        if (audioData != null && audioData.length > 0) {
            wavePath.reset();
            float width = getWidth();
            float height = getHeight();
            float centerY = height / 2f;

            // 计算实际数据的最大振幅
            float maxAmplitude = 0;
            for (short sample : audioData) {
                maxAmplitude = Math.max(maxAmplitude, Math.abs(sample));
            }
            maxAmplitude = Math.max(maxAmplitude, 1);  // 避免除零错误

            maxAmplitude = Short.MAX_VALUE;

            wavePath.moveTo(xOffset, centerY);  // 开始点稍作偏移
            for (int i = 0; i < audioData.length; i++) {
                float x = xOffset + i * (width / (float) audioData.length);  // 每个数据点都加上偏移量
                float y = centerY - (audioData[i] / maxAmplitude) * (centerY * 0.9f);  // 使用 90% 的中心高度以避免边缘溢出
                if (i == 0) {
                    wavePath.moveTo(x, y);
                } else {
                    wavePath.lineTo(x, y);
                }
            }

            canvas.drawPath(wavePath, paint);
        }
    }


    private void drawCenterAndPeakLines(Canvas canvas) {
        float width = getWidth();
        float height = getHeight();
        float centerY = height / 2f;

        // Draw center line
        canvas.drawLine(0, centerY, width, centerY, paintCenterLine);

        // Draw peak lines at 10% and 90% of height
        float peakOffset = centerY * 0.9f;
        canvas.drawLine(0, centerY - peakOffset, width, centerY - peakOffset, paintCenterLine);
        canvas.drawLine(0, centerY + peakOffset, width, centerY + peakOffset, paintCenterLine);
    }
}



