importScripts("./spark-md5.min.js");
let spark = null;

let config = {};

self.addEventListener("message", async (event) => {
  if (event.data === "finish") {
    self.postMessage("finish");
  } else {
    const {
      file,
      chunkSize,
      isGenerateChunkHash,
      numSamples,
      retryNum,
      isGenerateFileHash,
      isAutoSample,
    } = event.data;
    config = {
      chunkSize: chunkSize || 1024 * 1024,
      isGenerateChunkHash,
      isGenerateFileHash,
      numSamples: numSamples || 0,
      retryNum,
      isAutoSample,
    };
    if (!spark) {
      spark = new self.SparkMD5.ArrayBuffer();
    }

    const chunks = await processFile(file);
    console.time();
    const fileHash = await calculateFileHash(file);
    console.timeEnd();
    self.postMessage({
      chunks,
      fileHash,
    });
  }
});

// 抽样的个数根据文件大小实行动态抽样
function calculateSamplingSize(fileSize) {
  const baseSamplingSize = 10;
  const additionalSamplesPerIncrement = 2;
  const incrementThreshold = 50 * 1024 * 1024;

  if (fileSize < incrementThreshold) {
    return baseSamplingSize;
  } else {
    const additionalIncrements =
      Math.floor((fileSize - incrementThreshold) / incrementThreshold) + 1;
    return (
      baseSamplingSize + additionalIncrements * additionalSamplesPerIncrement
    );
  }
}

// 抽样生成文件hash,
function sampleFileHash(file, numSamples) {
  console.info("numSamples:", numSamples);
  return new Promise((resolve, reject) => {
    const fileSize = file.size;
    const bytesPerSample = Math.ceil(fileSize / numSamples);
    const bytesPerSubSample = Math.ceil(bytesPerSample / 10);
    const reader = new FileReader();
    const spark = new SparkMD5.ArrayBuffer();

    let currentPosition = 0;
    let samplesProcessed = 0;

    reader.onload = function (e) {
      const buffer = e.target.result;
      spark.append(buffer);

      currentPosition += bytesPerSample;
      samplesProcessed++;

      if (samplesProcessed < numSamples) {
        processNextSample();
      } else {
        resolve(spark.end());
      }
    };

    reader.onerror = function (err) {
      reject(err);
    };

    function processNextSample() {
      const sliceStart = currentPosition;
      const sliceEnd = Math.min(currentPosition + bytesPerSubSample, fileSize);
      const slice = file.slice(sliceStart, sliceEnd);
      currentPosition = sliceEnd;
      reader.readAsArrayBuffer(slice);
    }

    processNextSample();
  });
}

async function calculateFileHash(file) {
  if (config.isAutoSample) {
    const num = calculateSamplingSize(file.size);
    return await sampleFileHash(file, num);
  } else if (config.numSamples) {
    return await sampleFileHash(file, config.numSamples);
  } else {
    return new Promise((resolve, reject) => {
      const fileReader = new FileReader();

      fileReader.onload = function (e) {
        const spark = new SparkMD5.ArrayBuffer();
        spark.append(e.target.result);
        const hash = spark.end();
        resolve(hash);
      };

      fileReader.onerror = function (e) {
        reject(e);
      };

      fileReader.readAsArrayBuffer(file);
    });
  }
}

function generateChunkHash(arrayBuffer) {
  if (config.isGenerateChunkHash) {
    return arrayBuffer ? spark.append(arrayBuffer).end() : null;
  } else {
    return null;
  }
}

function processFile(file) {
  return new Promise((resolve) => {
    const reader = new FileReader();
    const chunks = [];
    let index = 0;

    reader.onload = () => {
      const arrayBuffer = reader.result;
      const chunkHash = generateChunkHash(arrayBuffer) + `-${index}`;
      const progress = 0;

      chunks.push({
        chunk: new Blob([arrayBuffer], { type: file.type }),
        size: arrayBuffer.byteLength,
        index,
        chunkHash,
        progress,
        retryNumed: 0,
        retryNum: config.retryNum,
      });

      if (index * config.chunkSize < file.size) {
        readNextChunk();
      } else {
        resolve(chunks);
      }
    };

    function readNextChunk() {
      const start = index * config.chunkSize;
      const end = Math.min(start + config.chunkSize, file.size);
      const chunk = file.slice(start, end);
      reader.readAsArrayBuffer(chunk);
      index++;
    }

    readNextChunk();
  });
}
