
importScripts('spark-md5.min.js');

class UploadPool {
  constructor(maxConcurrent) {
    this.max = maxConcurrent
    this.active = 0
    this.queue = []
  }

  add(task) {
    return new Promise(resolve => {
      this.queue.push(async () => {
        this.active++
        try {
          await task()
          resolve()
        } catch (err) {
          this.retry(task, err)
        } finally {
          this.active--
          this.next()
        }
      })
      this.next()
    })
  }

  next() {
    while (this.active < this.max && this.queue.length) {
      this.queue.shift()()
    }
  }

  retry(task, err) {
    if (task.retries < 3) {
      task.retries = (task.retries || 0) + 1
      this.queue.unshift(() => task())
    } else {
      self.postMessage({ type: 'error', message: err.message })
    }
  }
}

const config = {
  CHUNK_SIZE: 2 * 1024 * 1024,  // 2MB分片平衡内存与网络效率
  MAX_CONCURRENT: 4             // 根据HTTP/2建议调整并发数
}

const pool = new UploadPool(config.MAX_CONCURRENT)
let fileMeta = null

self.onmessage = async (e) => {
  if (e.data.file) {
    fileMeta = {
      file: e.data.file,
      totalChunks: Math.ceil(e.data.file.size / config.CHUNK_SIZE)
    }
    await processChunks()
  }
}

async function processChunks() {
  const hashPromises = []
  
  // 增量计算文件hash避免内存峰值
  for (let i = 0; i < fileMeta.totalChunks; i++) {
    const chunk = fileMeta.file.slice(
      i * config.CHUNK_SIZE,
      Math.min((i + 1) * config.CHUNK_SIZE, fileMeta.file.size)
    )
    
    hashPromises.push(
      pool.add(async () => {
        const hash = await calculateChunkHash(chunk)
        return { chunk, index: i, hash }
      })
    )
    
    updateProgress('slice', i / fileMeta.totalChunks * 50)
  }

  const chunks = await Promise.all(hashPromises)
  await uploadChunks(chunks)
}

async function uploadChunks(chunks) {
  await Promise.all(
    chunks.map(item => 
      pool.add(async () => {
        const formData = new FormData()
        formData.append('chunk', item.chunk)
        formData.append('hash', item.hash)
        formData.append('index', item.index)
        
        const res = await fetch(uploadUrl, {
          method: 'POST',
          body: formData
        })
        if (!res.ok) throw new Error(`Chunk ${item.index} upload failed`)
        
        updateProgress('upload', 
          50 + (item.index / fileMeta.totalChunks * 50)
        )
      })
    )
  )
  
  self.postMessage({ type: 'complete' })
}

function updateProgress(phase, percent) {
  self.postMessage({ 
    type: 'progress', 
    phase, 
    percent: Math.min(100, percent.toFixed(2)) 
  })
}

async function calculateChunkHash(chunk) {
  return new Promise(resolve => {
    const spark = new SparkMD5.ArrayBuffer()
    const reader = new FileReader()
    
    reader.onload = e => {
      spark.append(e.target.result)
      resolve(spark.end())
    }
    
    reader.readAsArrayBuffer(chunk)
  })
}
