class StreamingAudioAnalyzer {
    constructor() {
        this.audioContext = null;
        this.analyser = null;
        this.source = null;
        this.isStreaming = false;
        this.streamBuffer = [];
        this.externalSpectrumData = null;
        this.spectrumSync = null;
        
        // 流式缓冲配置
        this.bufferSize = 4096;
        this.analysisInterval = 23; // ~43fps (1000/23)
        this.streamStartTime = 0;
        
        // 性能优化配置
        this.useWebWorker = true;
        this.compressionLevel = 'medium'; // low, medium, high
        
        this.initializeStreamingSupport();
    }

    initializeStreamingSupport() {
        // 创建Web Worker用于离线频谱计算
        if (this.useWebWorker && typeof Worker !== 'undefined') {
            this.createSpectrumWorker();
        }
    }

    createSpectrumWorker() {
        // 创建专用于频谱计算的Web Worker
        const workerCode = `
            // FFT Web Worker代码
            class FFTWorker {
                constructor() {
                    this.fftCache = new Map();
                }

                fft(signal) {
                    const N = signal.length;
                    if (N <= 1) return signal;

                    const cacheKey = N + '_' + signal.slice(0, 16).join(',');
                    if (this.fftCache.has(cacheKey)) {
                        return this.fftCache.get(cacheKey);
                    }

                    const even = [];
                    const odd = [];
                    for (let i = 0; i < N; i++) {
                        if (i % 2 === 0) {
                            even.push(signal[i]);
                        } else {
                            odd.push(signal[i]);
                        }
                    }

                    const evenFFT = this.fft(even);
                    const oddFFT = this.fft(odd);

                    const result = new Array(N);
                    for (let k = 0; k < N / 2; k++) {
                        const angle = -2 * Math.PI * k / N;
                        const cos = Math.cos(angle);
                        const sin = Math.sin(angle);
                        
                        const t = {
                            real: cos * oddFFT[k].real - sin * oddFFT[k].imag,
                            imag: sin * oddFFT[k].real + cos * oddFFT[k].imag
                        };

                        result[k] = {
                            real: evenFFT[k].real + t.real,
                            imag: evenFFT[k].imag + t.imag
                        };

                        result[k + N / 2] = {
                            real: evenFFT[k].real - t.real,
                            imag: evenFFT[k].imag - t.imag
                        };
                    }

                    this.fftCache.set(cacheKey, result);
                    return result;
                }

                analyzeChunk(timeData, timestamp) {
                    const fftSize = 2048;
                    const input = new Array(fftSize);
                    
                    for (let i = 0; i < fftSize; i++) {
                        input[i] = {
                            real: i < timeData.length ? timeData[i] : 0,
                            imag: 0
                        };
                    }

                    // 汉宁窗
                    for (let i = 0; i < fftSize; i++) {
                        const windowValue = 0.5 * (1 - Math.cos(2 * Math.PI * i / (fftSize - 1)));
                        input[i].real *= windowValue;
                    }

                    const fftResult = this.fft(input);
                    const spectrum = new Array(1024);
                    
                    for (let i = 0; i < 1024; i++) {
                        const real = fftResult[i].real;
                        const imag = fftResult[i].imag;
                        const magnitude = Math.sqrt(real * real + imag * imag);
                        const db = 20 * Math.log10(magnitude + 1e-10);
                        spectrum[i] = Math.max(0, Math.min(255, (db + 60) * 255 / 60));
                    }

                    return { spectrum, timestamp };
                }
            }

            const fftWorker = new FFTWorker();

            self.onmessage = function(e) {
                const { timeData, timestamp, command } = e.data;
                
                if (command === 'analyze') {
                    const result = fftWorker.analyzeChunk(timeData, timestamp);
                    self.postMessage(result);
                } else if (command === 'clearCache') {
                    fftWorker.fftCache.clear();
                }
            };
        `;

        const blob = new Blob([workerCode], { type: 'application/javascript' });
        this.spectrumWorker = new Worker(URL.createObjectURL(blob));
        
        this.spectrumWorker.onmessage = (e) => {
            const { spectrum, timestamp } = e.data;
            this.onSpectrumReady(spectrum, timestamp);
        };
    }

    // 处理网络音频流
    async handleAudioStream(streamUrl) {
        try {
            this.isStreaming = true;
            this.streamStartTime = Date.now();
            
            if (!this.audioContext) {
                this.audioContext = new (window.AudioContext || window.webkitAudioContext)();
            }

            // 创建音频元素用于流播放
            const audio = new Audio();
            audio.crossOrigin = 'anonymous';
            audio.src = streamUrl;
            
            // 连接到Web Audio API
            this.source = this.audioContext.createMediaElementSource(audio);
            this.analyser = this.audioContext.createAnalyser();
            
            this.analyser.fftSize = 2048;
            this.analyser.smoothingTimeConstant = 0.8;
            
            this.source.connect(this.analyser);
            this.analyser.connect(this.audioContext.destination);
            
            // 开始流式分析
            this.startStreamAnalysis();
            
            // 播放流
            await audio.play();
            
            return { success: true, audio };
            
        } catch (error) {
            console.error('流媒体处理失败:', error);
            return { success: false, error };
        }
    }

    startStreamAnalysis() {
        const frequencyData = new Uint8Array(this.analyser.frequencyBinCount);
        const timeData = new Float32Array(this.analyser.fftSize);
        
        const analyze = () => {
            if (!this.isStreaming) return;
            
            // 获取实时数据
            this.analyser.getByteFrequencyData(frequencyData);
            this.analyser.getFloatTimeDomainData(timeData);
            
            const currentTime = (Date.now() - this.streamStartTime) / 1000;
            
            // 发送到Web Worker进行离线分析
            if (this.spectrumWorker) {
                this.spectrumWorker.postMessage({
                    command: 'analyze',
                    timeData: Array.from(timeData),
                    timestamp: currentTime
                });
            }
            
            // 实时可视化使用内置分析器数据
            this.onRealtimeSpectrum(frequencyData, currentTime);
            
            setTimeout(analyze, this.analysisInterval);
        };
        
        analyze();
    }

    // 加载独立的频谱文件
    async loadExternalSpectrum(spectrumUrl, audioUrl) {
        try {
            console.log('加载外部频谱文件...');
            
            // 获取频谱数据
            const response = await fetch(spectrumUrl);
            if (!response.ok) {
                throw new Error('频谱文件加载失败');
            }
            
            const spectrumData = await this.parseSpectrumFile(response);
            this.externalSpectrumData = spectrumData;
            
            // 创建同步对象
            this.spectrumSync = new SpectrumSynchronizer(spectrumData, audioUrl);
            
            console.log(`外部频谱加载完成: ${spectrumData.frames.length} 帧`);
            return { success: true, frames: spectrumData.frames.length };
            
        } catch (error) {
            console.error('外部频谱加载失败:', error);
            return { success: false, error };
        }
    }

    async parseSpectrumFile(response) {
        const contentType = response.headers.get('content-type');
        
        if (contentType.includes('application/json')) {
            // JSON格式频谱文件
            return await response.json();
        } else if (contentType.includes('application/octet-stream')) {
            // 二进制格式频谱文件
            return await this.parseBinarySpectrum(response);
        } else {
            throw new Error('不支持的频谱文件格式');
        }
    }

    async parseBinarySpectrum(response) {
        const buffer = await response.arrayBuffer();
        const view = new DataView(buffer);
        
        // 读取文件头
        const version = view.getUint32(0, true);
        const frameCount = view.getUint32(4, true);
        const frameSize = view.getUint32(8, true);
        const sampleRate = view.getFloat32(12, true);
        const compressionType = view.getUint8(16);
        
        const frames = [];
        let offset = 32; // 头部大小
        
        for (let i = 0; i < frameCount; i++) {
            const frameData = new Uint8Array(frameSize);
            
            if (compressionType === 0) {
                // 无压缩
                for (let j = 0; j < frameSize; j++) {
                    frameData[j] = view.getUint8(offset + j);
                }
            } else if (compressionType === 1) {
                // RLE压缩
                frameData = this.decompressRLE(view, offset, frameSize);
            }
            
            frames.push(Array.from(frameData));
            offset += frameSize;
        }
        
        return {
            version,
            frameCount,
            frameSize,
            sampleRate,
            frames,
            metadata: {
                duration: frameCount / (sampleRate / 512), // 基于hopSize计算
                compressionType
            }
        };
    }

    decompressRLE(view, offset, expectedSize) {
        const result = new Uint8Array(expectedSize);
        let resultIndex = 0;
        let currentOffset = offset;
        
        while (resultIndex < expectedSize) {
            const count = view.getUint8(currentOffset++);
            const value = view.getUint8(currentOffset++);
            
            for (let i = 0; i < count && resultIndex < expectedSize; i++) {
                result[resultIndex++] = value;
            }
        }
        
        return result;
    }

    // 生成独立的频谱文件
    async generateSpectrumFile(audioBuffer, format = 'binary', compression = 'rle') {
        console.log('生成独立频谱文件...');
        
        const channelData = audioBuffer.getChannelData(0);
        const sampleRate = audioBuffer.sampleRate;
        const fftSize = 2048;
        const hopSize = 512;
        const frameCount = Math.floor((channelData.length - fftSize) / hopSize);
        
        const frames = [];
        
        // 并行处理以提高性能
        const chunkSize = 100;
        for (let chunk = 0; chunk < frameCount; chunk += chunkSize) {
            const chunkEnd = Math.min(chunk + chunkSize, frameCount);
            const chunkPromises = [];
            
            for (let frame = chunk; frame < chunkEnd; frame++) {
                chunkPromises.push(this.analyzeFrame(channelData, frame, hopSize, fftSize));
            }
            
            const chunkResults = await Promise.all(chunkPromises);
            frames.push(...chunkResults);
            
            // 更新进度
            const progress = ((chunk + chunkSize) / frameCount * 100).toFixed(1);
            console.log(`频谱生成进度: ${progress}%`);
        }
        
        const spectrumData = {
            version: 1,
            frameCount: frames.length,
            frameSize: 1024,
            sampleRate,
            frames,
            metadata: {
                duration: audioBuffer.duration,
                hopSize,
                fftSize,
                generatedAt: new Date().toISOString()
            }
        };
        
        if (format === 'binary') {
            return this.createBinarySpectrumFile(spectrumData, compression);
        } else {
            return this.createJSONSpectrumFile(spectrumData, compression);
        }
    }

    async analyzeFrame(channelData, frameIndex, hopSize, fftSize) {
        const startSample = frameIndex * hopSize;
        const timeSlice = channelData.slice(startSample, startSample + fftSize);
        
        // 使用之前实现的FFT方法
        const spectrum = this.getSpectrum(timeSlice);
        const frequencyData = new Uint8Array(1024);
        
        for (let i = 0; i < frequencyData.length; i++) {
            const magnitude = spectrum[i] || 0;
            const db = 20 * Math.log10(magnitude + 1e-10);
            frequencyData[i] = Math.max(0, Math.min(255, (db + 60) * 255 / 60));
        }
        
        return Array.from(frequencyData);
    }

    createBinarySpectrumFile(spectrumData, compression) {
        const headerSize = 32;
        const frameSize = spectrumData.frameSize;
        let totalSize = headerSize;
        
        // 压缩帧数据
        const compressedFrames = [];
        let compressionType = 0;
        
        if (compression === 'rle') {
            compressionType = 1;
            for (const frame of spectrumData.frames) {
                const compressed = this.compressRLE(frame);
                compressedFrames.push(compressed);
                totalSize += compressed.length;
            }
        } else {
            // 无压缩
            compressedFrames.push(...spectrumData.frames);
            totalSize += spectrumData.frameCount * frameSize;
        }
        
        // 创建二进制文件
        const buffer = new ArrayBuffer(totalSize);
        const view = new DataView(buffer);
        
        // 写入头部
        view.setUint32(0, spectrumData.version, true);
        view.setUint32(4, spectrumData.frameCount, true);
        view.setUint32(8, frameSize, true);
        view.setFloat32(12, spectrumData.sampleRate, true);
        view.setUint8(16, compressionType);
        
        // 写入帧数据
        let offset = headerSize;
        for (const frame of compressedFrames) {
            for (let i = 0; i < frame.length; i++) {
                view.setUint8(offset + i, frame[i]);
            }
            offset += frame.length;
        }
        
        return new Blob([buffer], { type: 'application/octet-stream' });
    }

    compressRLE(data) {
        const compressed = [];
        let i = 0;
        
        while (i < data.length) {
            const value = data[i];
            let count = 1;
            
            // 计算连续相同值的数量
            while (i + count < data.length && data[i + count] === value && count < 255) {
                count++;
            }
            
            compressed.push(count, value);
            i += count;
        }
        
        return new Uint8Array(compressed);
    }

    createJSONSpectrumFile(spectrumData, compression) {
        let outputData = spectrumData;
        
        if (compression === 'deflate' && typeof pako !== 'undefined') {
            // 使用pako库进行deflate压缩
            const jsonString = JSON.stringify(spectrumData.frames);
            const compressed = pako.deflate(jsonString);
            
            outputData = {
                ...spectrumData,
                frames: Array.from(compressed),
                compressed: true,
                compressionType: 'deflate'
            };
        }
        
        const jsonString = JSON.stringify(outputData, null, 2);
        return new Blob([jsonString], { type: 'application/json' });
    }

    // 性能优化方法
    optimizePerformance(level = 'medium') {
        const configs = {
            low: {
                analysisInterval: 50, // 20fps
                fftSize: 1024,
                smoothing: 0.9,
                bufferSize: 2048
            },
            medium: {
                analysisInterval: 23, // 43fps
                fftSize: 2048,
                smoothing: 0.8,
                bufferSize: 4096
            },
            high: {
                analysisInterval: 16, // 60fps
                fftSize: 4096,
                smoothing: 0.7,
                bufferSize: 8192
            }
        };
        
        const config = configs[level];
        this.analysisInterval = config.analysisInterval;
        this.bufferSize = config.bufferSize;
        
        if (this.analyser) {
            this.analyser.fftSize = config.fftSize;
            this.analyser.smoothingTimeConstant = config.smoothing;
        }
    }

    // 事件回调
    onRealtimeSpectrum(frequencyData, timestamp) {
        // 实时频谱数据回调
        // 由继承类实现
    }

    onSpectrumReady(spectrum, timestamp) {
        // Web Worker计算完成的频谱数据回调
        // 由继承类实现
    }

    // 清理资源
    cleanup() {
        this.isStreaming = false;
        
        if (this.spectrumWorker) {
            this.spectrumWorker.terminate();
        }
        
        if (this.source) {
            this.source.disconnect();
        }
        
        if (this.analyser) {
            this.analyser.disconnect();
        }
    }
}

// 频谱同步器类
class SpectrumSynchronizer {
    constructor(spectrumData, audioUrl) {
        this.spectrumData = spectrumData;
        this.audioUrl = audioUrl;
        this.frameRate = spectrumData.sampleRate / 512; // 基于hopSize
        this.preloadBuffer = [];
        this.preloadSize = 60; // 预加载1秒的数据
    }

    getSpectrumAtTime(currentTime) {
        const frameIndex = Math.floor(currentTime * this.frameRate);
        
        if (frameIndex >= 0 && frameIndex < this.spectrumData.frames.length) {
            return this.spectrumData.frames[frameIndex];
        }
        
        return null;
    }

    preloadSpectrum(startTime, duration) {
        const startFrame = Math.floor(startTime * this.frameRate);
        const frameCount = Math.ceil(duration * this.frameRate);
        
        this.preloadBuffer = this.spectrumData.frames.slice(startFrame, startFrame + frameCount);
        return this.preloadBuffer.length;
    }
}

// 导出类
window.StreamingAudioAnalyzer = StreamingAudioAnalyzer;
window.SpectrumSynchronizer = SpectrumSynchronizer;
