<!DOCTYPE html>
<html>

<head>
    <title>Circular Waveform Looper</title>
    <script src="https://cdnjs.cloudflare.com/ajax/libs/wavesurfer.js/7.7.3/wavesurfer.min.js"></script>
    <style>
        .loopy-container {
            padding: 20px;
        }

        .controls {
            margin-bottom: 20px;
        }

        .transport-controls {
            display: flex;
            align-items: center;
            gap: 10px;
        }

        .bpm {
            font-size: 14px;
        }

        .tracks-grid {
            display: grid;
            grid-template-columns: repeat(auto-fill, minmax(200px, 1fr));
            gap: 20px;
        }

        .track {
            text-align: center;
        }

        .track-info {
            margin-bottom: 10px;
            display: flex;
            justify-content: center;
            align-items: center;
            gap: 10px;
        }

        .inner-circle {
            position: absolute;
            top: 50%;
            left: 50%;
            transform: translate(-50%, -50%);
            width: 30%;
            height: 30%;
            border-radius: 50%;
            background: white;
            z-index: 2;
        }

        .waveform-container {
            position: absolute;
            top: 0;
            left: 0;
            width: 100%;
            height: 100%;
            pointer-events: none;
            z-index: 1;
        }

        .circular-canvas {
            position: absolute;
            top: 0;
            left: 0;
            width: 100%;
            height: 100%;
            transform: rotate(-90deg);
        }

        .progress-indicator {
            position: absolute;
            width: 15px;
            height: 6px;
            background: #ff4081;
            top: -12px;
            left: 50%;
            transform-origin: 70% 112px;
            z-index: 3;
            margin-left: -10px;
            pointer-events: none;
            box-shadow: 0 0 4px rgba(0, 0, 0, 0.3);
            transition: transform 0.05s ease;
        }

        .progress-track {
            position: absolute;
            top: -10px;
            left: -10px;
            right: -10px;
            bottom: -10px;
            border: 2px solid #e0e0e0;
            border-radius: 50%;
            pointer-events: none;
            transition: transform 0.05s ease;
        }

        .track-circle {
            position: relative;
            width: 200px;
            height: 200px;
            border-radius: 50%;
            cursor: pointer;
            margin: 0 auto;
            background: #f0f0f0;
            transition: transform 0.05s ease;
        }

        .track-circle.active {
            border: 2px solid #007bff;
        }

        .track-circle,
        .progress-track,
        .progress-indicator {
            transition: transform 0.1s linear;
        }

        .file-input {
            display: none;
        }

        .load-button {
            padding: 8px 16px;
            background: #007bff;
            color: white;
            border: none;
            border-radius: 4px;
            cursor: pointer;
        }

        .track-play-button {
            padding: 4px 8px;
            background: #28a745;
            color: white;
            border: none;
            border-radius: 4px;
            cursor: pointer;
        }
    </style>
</head>

<body>
    <div id="app">
        <div class="loopy-container">
            <div class="controls">
                <div class="transport-controls">
                    <button @click="togglePlayAll">{{ isPlayingAll ? '⏸ All' : '▶ All' }}</button>
                    <div class="bpm">{{ bpm }} BPM</div>
                    <input type="file" class="file-input" ref="fileInput" @change="handleFileUpload" accept="audio/*">
                    <button class="load-button" @click="triggerFileInput">Load Audio</button>
                </div>
            </div>

            <div class="tracks-grid">
                <div class="track" v-for="track in tracks" :key="track.id">
                    <div class="track-info">
                        <span>Track {{ track.id + 1 }}</span>
                        <button class="track-play-button" @click.stop="toggleTrack(track.id)">{{ track.isPlaying ? '⏸' :
                            '▶' }}</button>
                    </div>
                    <div class="track-circle" :class="{ active: track.id === activeTrackId }"
                        @click="selectTrack(track.id)" :style="getTrackStyle(track)">
                        <div class="progress-track" :style="getTrackStyle(track)"></div>
                        <div class="inner-circle"></div>
                        <div :id="'waveform-' + track.id" class="waveform-container">
                            <canvas :id="'canvas-' + track.id" class="circular-canvas"></canvas>
                        </div>
                        <div class="progress-indicator" :style="getProgressIndicatorStyle(track)"></div>
                    </div>
                </div>
            </div>
        </div>
    </div>

    <script src="https://unpkg.com/vue@3/dist/vue.global.js"></script>
    <script>
        const app = Vue.createApp({
            data() {
                return {
                    isPlayingAll: false,
                    bpm: 120,
                    activeTrackId: null,
                    tracks: [
                        { id: 0, color: '#1e88e5', wavesurfer: null, audioBuffer: null, peaks: null, isPlaying: false, progress: 0, amplitude: 1, animationFrame: null, isAnalyserConnected: false },
                        { id: 1, color: '#43a047', wavesurfer: null, audioBuffer: null, peaks: null, isPlaying: false, progress: 0, amplitude: 1, animationFrame: null, isAnalyserConnected: false },
                        { id: 2, color: '#e53935', wavesurfer: null, audioBuffer: null, peaks: null, isPlaying: false, progress: 0, amplitude: 1, animationFrame: null, isAnalyserConnected: false },
                    ],
                    audioContext: null,
                    audioInitialized: false,
                }
            },

            async mounted() {
                try {
                    this.audioContext = new (window.AudioContext || window.webkitAudioContext)();
                    this.audioContext.suspend();
                    window.addEventListener('resize', this.handleResize);

                    await this.loadDefaultAudio();
                    document.addEventListener('click', this.initializeAudio, { once: true });
                } catch (error) {
                    console.error('Error in mounted:', error);
                }
            },

            beforeUnmount() {
                window.removeEventListener('resize', this.handleResize);
            },

            methods: {
                handleResize() {
                    this.tracks.forEach(track => {
                        if (track.peaks) {
                            this.drawCircularWaveform(track.id, track.peaks);
                        }
                    });
                },

                async initializeAudio() {
                    if (!this.audioInitialized) {
                        await this.audioContext.resume();
                        this.audioInitialized = true;
                        console.log('Audio context initialized');

                        // 重新加载所有轨道的 WaveSurfer
                        for (let track of this.tracks) {
                            if (track.audioBuffer) {
                                await this.initWaveSurfer(track.id);
                            }
                        }
                    }
                },

                async toggleTrack(trackId) {
                    const track = this.tracks[trackId];
                    if (!track.wavesurfer || !track.audioBuffer) return;

                    if (!this.audioInitialized) {
                        await this.initializeAudio();
                    }

                    track.isPlaying = !track.isPlaying;

                    if (track.isPlaying) {
                        track.wavesurfer.play();
                        // 只在第一次或没有完整波形数据时进行动画
                        if (!track.hasPlayedFirstTime || !track.finalPeaks) {
                            this.drawCircularWaveform(trackId, track.peaks, true);
                        } else {
                            // 后续播放时直接显示完整波形
                            this.drawCircularWaveform(trackId, track.finalPeaks, false);
                        }
                    } else {
                        track.wavesurfer.pause();
                        if (track.waveformAnimationFrame) {
                            cancelAnimationFrame(track.waveformAnimationFrame);
                        }
                    }

                    this.updateGlobalPlayState();
                },

                togglePlayAll() {
                    this.isPlayingAll = !this.isPlayingAll;
                    this.tracks.forEach(track => {
                        if (track.wavesurfer && track.audioBuffer) {
                            track.isPlaying = this.isPlayingAll;
                            if (this.isPlayingAll) {
                                track.wavesurfer.play();
                            } else {
                                track.wavesurfer.pause();
                            }
                        }
                    });
                },

                updateGlobalPlayState() {
                    this.isPlayingAll = this.tracks.every(track => track.isPlaying);
                },



                async loadDefaultAudio() {
                    try {
                        console.log('Loading default audio file...');
                        const response = await fetch('./audio/1-4.mp3');
                        const arrayBuffer = await response.arrayBuffer();
                        console.log('Audio file loaded, size:', arrayBuffer.byteLength);

                        for (let track of this.tracks) {
                            console.log(`Processing track ${track.id}`);
                            await this.loadAudioForTrack(track.id, arrayBuffer.slice(0));
                        }
                    } catch (error) {
                        console.error('Error loading default audio:', error);
                    }
                },

                async initWaveSurfer(trackId) {
                    // 1. 初始化准备
                    const track = this.tracks[trackId];

                    // 清理已存在的WaveSurfer实例，防止内存泄漏
                    if (track.wavesurfer) {
                        track.wavesurfer.destroy();
                    }

                    // 创建一个隐藏的容器用于WaveSurfer
                    let container = document.getElementById(`hidden-wavesurfer-${trackId}`);
                    if (!container) {
                        container = document.createElement('div');
                        container.id = `hidden-wavesurfer-${trackId}`;
                        container.style.display = 'none';
                        document.body.appendChild(container);
                    }

                    // 2. 创建WaveSurfer实例
                    track.wavesurfer = WaveSurfer.create({
                        container: container,
                        height: 1,
                        width: 1,
                        backend: 'MediaElement', // 使用MediaElement后端，它提供更好的音频源控制
                        waveColor: track.color,
                        interact: false,
                        cursorWidth: 0,
                        fillParent: true,
                        minPxPerSec: 1,
                        mediaControls: false,
                        normalize: true
                    });

                    // 3. 设置音频分析器
                    // 创建音频上下文和分析器节点
                    let audioContext = new (window.AudioContext || window.webkitAudioContext)();
                    let analyser = audioContext.createAnalyser();
                    analyser.fftSize = 256; // 设置FFT大小，决定频率数据的精度
                    const bufferLength = analyser.frequencyBinCount; // 获取频率数据数组的长度
                    const dataArray = new Uint8Array(bufferLength); // 创建用于存储频率数据的数组
                    let source = null;

                    // 4. 音频准备完成时的处理
                    track.wavesurfer.on('ready', () => {
                        // 获取音频元素并连接音频节点
                        const audio = track.wavesurfer.media;

                        // 只在第一次连接音频源
                        if (!source) {
                            source = audioContext.createMediaElementSource(audio);
                            source.connect(analyser); // 连接到分析器
                            analyser.connect(audioContext.destination); // 连接到输出设备
                        }
                    });

                    // 5. 音频分析逻辑
                    let lastUpdate = performance.now(); // 用于控制更新频率

                    const analyzeAudio = () => {
                        // 如果不在播放状态，重置振幅并停止分析
                        if (!track.isPlaying) {
                            track.amplitude = 1;
                            if (track.animationFrame) {
                                cancelAnimationFrame(track.animationFrame);
                                track.animationFrame = null;
                            }
                            return;
                        }

                        // 获取频率数据
                        analyser.getByteFrequencyData(dataArray);
                        const now = performance.now();

                        // 限制更新频率到约60fps
                        if (now - lastUpdate > 16) {
                            // 计算平均频率值
                            let sum = 0;
                            for (let i = 0; i < bufferLength; i++) {
                                sum += dataArray[i];
                            }
                            const average = sum / bufferLength;

                            // 计算目标振幅并应用平滑过渡
                            const targetAmplitude = 1 + (average / 256) * 1.5;
                            track.amplitude = track.amplitude * 0.7 + targetAmplitude * 0.3;

                            lastUpdate = now;
                        }

                        // 请求下一帧动画
                        track.animationFrame = requestAnimationFrame(analyzeAudio);
                    };

                    // 6. 事件监听器设置
                    // 播放开始
                    track.wavesurfer.on('play', () => {
                        if (audioContext.state === 'suspended') {
                            audioContext.resume();
                        }
                        analyzeAudio(); // 开始分析音频
                    });

                    // 播放暂停
                    track.wavesurfer.on('pause', () => {
                        if (track.animationFrame) {
                            cancelAnimationFrame(track.animationFrame);
                            track.animationFrame = null;
                        }
                        track.amplitude = 1; // 重置振幅
                    });

                    // 播放进度更新
                    track.wavesurfer.on('audioprocess', () => {
                        if (track.isPlaying) {
                            track.progress = track.wavesurfer.getCurrentTime() / track.wavesurfer.getDuration();
                        }
                    });

                    // 播放结束
                    track.wavesurfer.on('finish', () => {
                        track.isPlaying = false;
                        track.progress = 0;
                        track.amplitude = 1;

                        // 停止波形动画
                        if (track.waveformAnimationFrame) {
                            cancelAnimationFrame(track.waveformAnimationFrame);
                            track.waveformAnimation.isAnimating = false;
                        }

                        if (track.animationFrame) {
                            cancelAnimationFrame(track.animationFrame);
                            track.animationFrame = null;
                        }
                        this.updateGlobalPlayState();
                    });

                    // 7. 加载音频数据
                    if (track.audioBuffer) {
                        const blob = new Blob([await this.audioBufferToWav(track.audioBuffer)], { type: 'audio/wav' });
                        await track.wavesurfer.loadBlob(blob);
                    }
                },

                getTrackStyle(track) {
                    // 增大缩放效果
                    const scale = 1 + (track.amplitude - 1) * 0.3;
                    return {
                        transform: `scale(${scale})`
                    };
                },

                getProgressIndicatorStyle(track) {
                    const angle = track.progress * 360;
                    const scale = 1 + (track.amplitude - 1) * 0.3;
                    return {
                        transform: `rotate(${angle}deg) scale(${scale})`
                    };
                },

                async loadAudioForTrack(trackId, arrayBuffer) {
                    const track = this.tracks[trackId];
                    try {
                        console.log(`Decoding audio for track ${trackId}...`);
                        const audioBuffer = await this.audioContext.decodeAudioData(arrayBuffer);

                        track.audioBuffer = audioBuffer;
                        track.peaks = await this.getPeaks(audioBuffer);

                        // 只在音频上下文已初始化时创建 WaveSurfer
                        if (this.audioInitialized) {
                            await this.initWaveSurfer(trackId);
                        }

                        console.log(`Drawing waveform for track ${trackId}...`);
                        this.drawCircularWaveform(trackId, track.peaks);
                    } catch (error) {
                        console.error('Error loading audio for track:', trackId, error);
                    }
                },

                audioBufferToWav(audioBuffer) {
                    const numOfChan = audioBuffer.numberOfChannels,
                        length = audioBuffer.length * numOfChan * 2,
                        buffer = new ArrayBuffer(44 + length),
                        view = new DataView(buffer),
                        channels = [],
                        sampleRate = audioBuffer.sampleRate;

                    // 写入 WAV 头部信息
                    const writeString = (view, offset, string) => {
                        for (let i = 0; i < string.length; i++) {
                            view.setUint8(offset + i, string.charCodeAt(i));
                        }
                    };

                    writeString(view, 0, 'RIFF');
                    view.setUint32(4, 36 + length, true);
                    writeString(view, 8, 'WAVE');
                    writeString(view, 12, 'fmt ');
                    view.setUint32(16, 16, true);
                    view.setUint16(20, 1, true);
                    view.setUint16(22, numOfChan, true);
                    view.setUint32(24, sampleRate, true);
                    view.setUint32(28, sampleRate * 2 * numOfChan, true);
                    view.setUint16(32, numOfChan * 2, true);
                    view.setUint16(34, 16, true);
                    writeString(view, 36, 'data');
                    view.setUint32(40, length, true);

                    // 写入采样数据
                    const offset = 44;
                    for (let i = 0; i < audioBuffer.numberOfChannels; i++) {
                        channels.push(audioBuffer.getChannelData(i));
                    }

                    let pos = 44;
                    for (let i = 0; i < audioBuffer.length; i++) {
                        for (let j = 0; j < numOfChan; j++) {
                            const sample = Math.max(-1, Math.min(1, channels[j][i]));
                            view.setInt16(pos, sample < 0 ? sample * 0x8000 : sample * 0x7FFF, true);
                            pos += 2;
                        }
                    }

                    return buffer;
                },


                drawCircularWaveform(trackId, peaks, isFirstTime = false) {
                    const canvas = document.getElementById('canvas-' + trackId);
                    if (!canvas) return;

                    const track = this.tracks[trackId];
                    const ctx = canvas.getContext('2d');

                    canvas.width = canvas.offsetWidth;
                    canvas.height = canvas.offsetHeight;

                    const centerX = canvas.width / 2;
                    const centerY = canvas.height / 2;
                    const radius = Math.min(centerX, centerY) * 0.7;

                    // 如果不是第一次播放且已有完整波形数据，直接绘制完整波形
                    if (!isFirstTime && track.finalPeaks) {
                        const drawFullWaveform = () => {
                            ctx.clearRect(0, 0, canvas.width, canvas.height);
                            ctx.strokeStyle = track.color;
                            ctx.lineWidth = 2;

                            const angleStep = (Math.PI * 2) / track.finalPeaks.length;

                            track.finalPeaks.forEach((peak, i) => {
                                const angle = i * angleStep;

                                const radiusVariation = radius * 0.5 * peak;
                                const innerRadius = radius - radiusVariation;
                                const outerRadius = radius + radiusVariation;

                                const startX = centerX + innerRadius * Math.cos(angle);
                                const startY = centerY + innerRadius * Math.sin(angle);
                                const endX = centerX + outerRadius * Math.cos(angle);
                                const endY = centerY + outerRadius * Math.sin(angle);

                                ctx.beginPath();
                                ctx.moveTo(startX, startY);
                                ctx.lineTo(endX, endY);
                                ctx.stroke();
                            });
                        };

                        drawFullWaveform();
                        return;
                    }

                    // 第一次播放时的动画逻辑
                    if (!track.waveformAnimation) {
                        track.waveformAnimation = {
                            isAnimating: false,
                            startTime: null,
                            duration: track.wavesurfer.getDuration() * 1000
                        };
                    }

                    const drawWaveform = (currentTime) => {
                        if (!track.waveformAnimation.startTime) {
                            track.waveformAnimation.startTime = currentTime;
                        }

                        const audioProgress = track.wavesurfer.getCurrentTime() / track.wavesurfer.getDuration();
                        const totalPeaks = peaks.length;
                        const currentPeaks = Math.ceil(totalPeaks * audioProgress);

                        ctx.clearRect(0, 0, canvas.width, canvas.height);
                        ctx.strokeStyle = track.color;
                        ctx.lineWidth = 2;

                        const totalAngle = Math.PI * 2;
                        const angleStep = totalAngle / currentPeaks;

                        for (let i = 0; i < currentPeaks; i++) {
                            const peak = peaks[i];
                            const angle = i * angleStep;

                            const radiusVariation = radius * 0.5 * peak;
                            const innerRadius = radius - radiusVariation;
                            const outerRadius = radius + radiusVariation;

                            const startX = centerX + innerRadius * Math.cos(angle);
                            const startY = centerY + innerRadius * Math.sin(angle);
                            const endX = centerX + outerRadius * Math.cos(angle);
                            const endY = centerY + outerRadius * Math.sin(angle);

                            ctx.beginPath();
                            ctx.moveTo(startX, startY);
                            ctx.lineTo(endX, endY);
                            ctx.stroke();
                        }

                        if (track.isPlaying) {
                            track.waveformAnimationFrame = requestAnimationFrame(drawWaveform);

                            // 当动画完成时保存完整波形数据
                            if (currentPeaks >= totalPeaks && !track.hasPlayedFirstTime) {
                                track.hasPlayedFirstTime = true;
                                track.finalPeaks = peaks;
                            }
                        } else {
                            cancelAnimationFrame(track.waveformAnimationFrame);
                        }
                    };

                    if (track.isPlaying) {
                        track.waveformAnimation.startTime = null;
                        track.waveformAnimationFrame = requestAnimationFrame(drawWaveform);
                    }
                },

                async getPeaks(audioBuffer, length = 720) {
                    const sampleSize = Math.floor(audioBuffer.length / length);
                    const sampleStep = ~~(sampleSize / 10) || 1;
                    const peaks = new Float32Array(length);

                    const channel = audioBuffer.getChannelData(0);
                    let maxPeak = 0;

                    for (let i = 0; i < length; i++) {
                        const start = sampleSize * i;
                        let max = 0;

                        for (let j = 0; j < sampleSize; j += sampleStep) {
                            const value = Math.abs(channel[start + j]);
                            if (value > max) {
                                max = value;
                            }
                        }

                        if (max > maxPeak) {
                            maxPeak = max;
                        }

                        peaks[i] = max;
                    }

                    if (maxPeak > 0) {
                        for (let i = 0; i < length; i++) {
                            peaks[i] = peaks[i] / maxPeak;
                        }
                    }

                    return peaks;
                },

                async handleFileUpload(event) {
                    if (this.activeTrackId === null || !event.target.files.length) return;

                    const file = event.target.files[0];
                    const arrayBuffer = await file.arrayBuffer();
                    await this.loadAudioForTrack(this.activeTrackId, arrayBuffer);
                },

                triggerFileInput() {
                    if (this.activeTrackId !== null) {
                        this.$refs.fileInput.click();
                    } else {
                        alert('Please select a track first');
                    }
                },

                selectTrack(trackId) {
                    this.activeTrackId = trackId;
                    this.toggleTrack(trackId);
                },

                togglePlay() {
                    this.isPlaying = !this.isPlaying;
                    this.tracks.forEach(async track => {
                        if (track.wavesurfer && track.audioBuffer) {
                            if (!this.audioInitialized) {
                                await this.initializeAudio();
                            }
                            if (this.isPlaying) {
                                track.wavesurfer.play();
                            } else {
                                track.wavesurfer.pause();
                            }
                        }
                    });
                }
            }
        });

        app.mount('#app');
    </script>
</body>

</html>