package com.wutong.resonance.model.plugin.drawer;

import com.wutong.resonance.model.audio.tool.AudioFile;
import javafx.animation.Animation;
import javafx.animation.KeyFrame;
import javafx.animation.Timeline;
import javafx.application.Platform;
import javafx.scene.layout.Pane;
import javafx.util.Duration;
import lombok.Getter;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;

import javax.sound.sampled.*;
import java.util.Arrays;

/**
 * 扫描式波形绘制插件
 * <p>
 * 该插件用于将音频文件的波形以扫描的方式绘制到画布上。它支持多声道音频文件的绘制，并可配置波形颜色和背景颜色。
 *
 * @author wutong
 * @since 1.0.0
 */
@Slf4j
@Getter
@Setter
public class WaveformDrawer extends DrawerPlugin {

    /**
     * 标识是否正在绘制波形。
     */
    private volatile boolean isDrawing = false;

    /**
     * 用于控制波形扫描动画的时间线。
     */
    private Timeline timeline;

    /**
     * 存储每个像素点每个声道的最大采样值。
     */
    private short[][] maxValues;

    /**
     * 存储每个像素点每个声道的最小采样值。
     */
    private short[][] minValues;

    /**
     * 画布上的像素点数量，决定了波形的绘制精度。
     */
    private int numPixels;

    /**
     * 当前正在绘制的像素点的索引。
     */
    private int currentPixel = 0;

    /**
     * 构造方法，创建一个新的 {@code WaveformDrawer} 实例。
     *
     * @param parent 父容器，用于承载插件的画布。
     */
    public WaveformDrawer(Pane parent) {
        super("波形绘制插件", "扫描式波形绘制", parent);
    }

    /**
     * 只支持Wave文件的读取和绘制
     *
     * @param audioFile 音频文件
     */
    @Override
    public void draw(AudioFile audioFile) {
        cancel();
        isDrawing = true;

        THREAD_POOL.execute(() -> {
            try {
                AudioInputStream audioInputStream = AudioSystem.getAudioInputStream(audioFile);
                AudioFormat format = audioInputStream.getFormat();

                if (format.getEncoding() != AudioFormat.Encoding.PCM_SIGNED || format.getSampleSizeInBits() != 16) {
                    throw new UnsupportedOperationException("仅支持16位有符号PCM格式的WAV文件");
                }

                byte[] bytes = audioInputStream.readAllBytes();
                audioInputStream.close();

                int channelCount = format.getChannels();
                short[] samples = new short[bytes.length / 2];
                for (int i = 0; i < samples.length; i++) {
                    samples[i] = (short) ((bytes[i * 2] & 0xFF) | (bytes[i * 2 + 1] << 8));
                }

                numPixels = (int) canvas.getWidth();
                int numSamplePoints = samples.length / channelCount;
                int samplesPerPixel = numSamplePoints / numPixels;

                maxValues = new short[channelCount][numPixels];
                minValues = new short[channelCount][numPixels];

                for (int c = 0; c < channelCount; c++) {
                    Arrays.fill(maxValues[c], Short.MIN_VALUE);
                    Arrays.fill(minValues[c], Short.MAX_VALUE);
                }

                for (int i = 0; i < samples.length; i++) {
                    int channel = i % channelCount;
                    int samplePointIndex = i / channelCount;
                    int pixelIndex = Math.min(samplePointIndex / samplesPerPixel, numPixels - 1);

                    maxValues[channel][pixelIndex] = (short) Math.max(
                            maxValues[channel][pixelIndex], samples[i]);
                    minValues[channel][pixelIndex] = (short) Math.min(
                            minValues[channel][pixelIndex], samples[i]);
                }

                Platform.runLater(() -> {
                    gc.setFill(backgroundColor);
                    gc.fillRect(0, 0, canvas.getWidth(), canvas.getHeight());

                    currentPixel = 0;
                    timeline = new Timeline(
                            new KeyFrame(Duration.millis(1), event -> {
                                if (!isDrawing || currentPixel >= numPixels) {
                                    timeline.stop();
                                    return;
                                }

                                gc.setStroke(waveformColor);
                                int pixelToDraw = currentPixel++;

                                for (int c = 0; c < channelCount; c++) {
                                    double maxY = normalize(
                                            maxValues[c][pixelToDraw], c, channelCount);
                                    double minY = normalize(
                                            minValues[c][pixelToDraw], c, channelCount);
                                    gc.strokeLine(pixelToDraw, maxY, pixelToDraw, minY);
                                }
                            })
                    );
                    timeline.setCycleCount(Animation.INDEFINITE);
                    timeline.play();
                });

            } catch (Exception e) {
                log.error("波形绘制失败", e);
            }
        });
    }

    /**
     * 多声道归一化方法，将采样值转换为画布上的坐标。
     *
     * @param sample        采样值。
     * @param channel       声道索引。
     * @param totalChannels 总声道数。
     *
     * @return 归一化后的 Y 坐标。
     */
    private double normalize(short sample, int channel, int totalChannels) {
        double channelHeight = canvas.getHeight() / totalChannels;
        double centerY = channel * channelHeight + channelHeight / 2;
        return (sample / 32768.0) * (channelHeight / 2) + centerY;
    }

    /**
     * 取消波形绘制，停止动画并重置状态。
     */
    @Override
    public void cancel() {
        isDrawing = false;
        if (timeline != null) {
            timeline.stop();
        }
        currentPixel = 0;
    }


    /**
     * 初始化插件，绑定画布大小并填充背景颜色。
     */
    @Override
    public void init() {
        super.init();
        gc.setFill(backgroundColor);
        gc.fillRect(0, 0, canvas.getWidth(), canvas.getHeight());
    }

    /**
     * 关闭插件，取消波形绘制并释放资源。
     */
    @Override
    public void close() {
        cancel();
        super.close();
    }
}