import SparkMD5 from 'spark-md5';
// 获取地址组api  合并文件api
import {
  getShardingUrl,
  mergeFile,
  getMultipartUrl,
  mergeBigFile
} from '@/apis/file';
import { useFileStore } from '@/stores';

const uploadPart = async (url, filePart) => {
  try {
    const response = await fetch(url, {
      method: 'PUT',
      body: filePart
    });
    return response.status === 200 || response.status === 0;
  } catch (error) {
    console.error('上传失败', error);
    return false;
  }
};

// 中间层函数：处理通用的分片上传逻辑
const handleUploadChunks = async (
  file,
  chunkSize,
  presignedUrls,
  arrayBuffer,
  mergeFn,
  md5,
  webkitRelativePath = null
) => {
  const fileStore = useFileStore();
  for (
    let currentChunk = 0;
    currentChunk < presignedUrls.length;
    currentChunk++
  ) {
    !webkitRelativePath &&
      fileStore.setTotal(
        (((currentChunk + 1) / presignedUrls.length) * 100).toFixed(2)
      );
    const url = presignedUrls[currentChunk].url;
    const start = (presignedUrls[currentChunk].partNum - 1) * chunkSize;
    const end = Math.min(
      file.size,
      presignedUrls[currentChunk].partNum * chunkSize
    );
    const chunk = arrayBuffer.slice(start, end);
    const success = await uploadPart(url, chunk);

    if (!success) {
      // 重新获取上传地址并重试
      let data = await getShardingUrl({
        filePath: file.name,
        fileSize: file.size,
        md5
      });
      const finishedNum = data.data.finishedNum;
      const totalNum = data.data.totalNum;
      if (finishedNum === totalNum) return await mergeFn(md5);
      presignedUrls = data.data.presignedUrls;
      currentChunk = -1; // 重试当前分片
      continue;
    }
  }

  !webkitRelativePath && fileStore.setTotal(0);
  await mergeFn(md5);
  fileStore.updateFileList();
};

// 上传小文件
export const uploadFile = async (file) => {
  const fileStore = useFileStore();
  const arrayBuffer = await file.arrayBuffer();
  const md5 = SparkMD5.ArrayBuffer.hash(arrayBuffer);

  let data = await getShardingUrl({
    filePath: file.name,
    fileSize: file.size,
    md5
  });

  let finishedNum = data.data.finishedNum;
  let totalNum = data.data.totalNum;
  if (finishedNum === totalNum) return mergeFile(md5);

  const chunkSize = data.data.chunkSize;
  const presignedUrls = data.data.presignedUrls || [];

  // 复用通用分片上传逻辑
  await handleUploadChunks(
    file,
    chunkSize,
    presignedUrls,
    arrayBuffer,
    mergeFile,
    md5
  );

  fileStore.setTotal(0);
};

// 上传大文件
export const uploadBigFile = async (file, webkitRelativePath, currentPaths) => {
  const fileStore = useFileStore();
  const currentPath = currentPaths ? currentPaths : fileStore.getPath();
  const arrayBuffer = await file.arrayBuffer();
  const md5 = SparkMD5.ArrayBuffer.hash(arrayBuffer);

  let data = await getMultipartUrl({
    filePath: webkitRelativePath
      ? currentPath === '/'
        ? webkitRelativePath
        : currentPath + webkitRelativePath
      : currentPath === '/'
      ? file.name
      : currentPath + file.name,
    fileSize: file.size,
    md5
  });

  let finishedNum = data.data.finishedNum;
  let totalNum = data.data.totalNum;
  if (finishedNum === totalNum) return mergeBigFile(md5);

  const chunkSize = data.data.chunkSize;
  const presignedUrls = data.data.presignedUrls || [];

  // 复用通用分片上传逻辑
  await handleUploadChunks(
    file,
    chunkSize,
    presignedUrls,
    arrayBuffer,
    mergeBigFile,
    md5,
    webkitRelativePath
  );
};

// 上传大文件文件夹
export const uploadBigFolder = async (files) => {
  const fileStore = useFileStore();
  const currentPath = fileStore.getPath();
  fileStore.setTotal(files.length + 1);

  for (let i = 0; i < files.length; i++) {
    fileStore.setTotal((((i + 1) / files.length) * 100).toFixed(2));
    await uploadBigFile(files[i], files[i].webkitRelativePath, currentPath);
  }

  fileStore.setTotal(0);
};
