
importScripts('spark-md5.min.js'); // 引入MD5计算库^3^

const CHUNK_SIZE = 5 * 1024 * 1024; // 5MB分片
const MAX_CONCURRENT = 3; // 最大并发数
let activeNum = 0
const chunks = [];
const uploadUrl = ''
self.onmessage = async (e) => {
  const { file } = e.data;
  // 分片处理
  for (let i = 0; i < Math.ceil(file.size / CHUNK_SIZE); i++) {
    const chunk = file.slice(i * CHUNK_SIZE, (i + 1) * CHUNK_SIZE);
    const hash = await calculateChunkHash(chunk);
    chunks.push({ chunk, index: i, hash });
    self.postMessage({ type: 'slice_progress', percent: (i) / Math.ceil(file.size / CHUNK_SIZE) * 50 });
  }
  await handleUpload()
};

async function handleUpload(){
  while(chunks.length > 0){
    if(activeNum < MAX_CONCURRENT){
      uploadChunk()
    } else {
      await new Promise(r => setTimeout(r, 100))
    }
  }
}

async function uploadChunk() {
  if(activeNum >= MAX_CONCURRENT || chunks.length === 0) return
  const { chunk, hash, index } = chunks.shift()
  activeNum = activeNum +1
  const formData = new FormData();
  formData.append('chunk', chunk);
  formData.append('hash', hash);
  formData.append('index', index);
  
  try{
    const response = await fetch(uploadUrl, {
      method: 'POST',
      body: formData
    });
    if(response.status === 200){
      self.postMessage({ type: 'slice_progress', percent: 50 + (index) / Math.ceil(file.size / CHUNK_SIZE) * 50 });
    }else{
      chunks.unshift({chunk, hash, index})
    }
  }catch{
    chunks.unshift({chunk, hash, index})
  }finally{
    activeNum--
  }
}

async function calculateChunkHash(chunk) {
    return new Promise((resolve) => {
      const spark = new SparkMD5.ArrayBuffer();
      const reader = new FileReader();
      
      reader.onload = (e) => {
        spark.append(e.target.result);
        resolve(spark.end()); // 返回分片的MD5哈希值
      };
      
      reader.readAsArrayBuffer(chunk);
    });
  }
  
