<!--
 * @Descripttion: 文件上传
 * @Author: jhw
 * @Date: 2023-07-18 17:24:47
 * @LastEditors: jhw
 * @LastEditTime: 2023-07-21 14:40:55
-->
<template>
  <div>
    <h1>大文件上传</h1>
    <el-upload class="upload-demo" drag :http-request="uploadFile" :limit="1">
      <el-icon class="el-icon--upload"><upload-filled /></el-icon>
      <div class="el-upload__text">文件拖到这 <em>点击上传</em></div>
      <template #tip>
        <div class="el-upload__tip">这里写一些小提示</div>
      </template>
    </el-upload>
    <el-progress v-show="percentage !== 0 && percentage !== 100" :percentage="percentage" :show-text="false" />
  </div>
</template>

<script setup lang="ts">
import { Message, UploadFilled } from '@element-plus/icons-vue';
import { UploadRequestOptions } from 'element-plus';
import sparkmd5 from 'spark-md5';
import { commonApi } from '@/api';
import { ref } from 'vue';

const { uploadChunk, mergeChunk, uploadedChunks } = commonApi;

const percentage = ref(0);

// 文件md5值计算- 大文件使用增量算法
const changeBuffer = (chunks: any) => {
  return new Promise((resolve, reject) => {
    const spark = new sparkmd5.ArrayBuffer();
    function _read(i: number) {
      if (i >= chunks.length) {
        resolve(spark.end());
        return;
      }
      const blob = chunks[i].file;
      const reader = new FileReader();
      reader.onload = (e) => {
        spark.append(e.target?.result as ArrayBuffer);
        _read(i + 1);
      };
      reader.onerror = (err) => {
        reject(err);
      };
      reader.readAsArrayBuffer(blob);
    }
    _read(0);
  });
};
// const changeBuffer = (file: File) => {
//   return new Promise((resolve, reject) => {
//     const fileReader = new FileReader();
//     const spark = new sparkmd5.ArrayBuffer();
//     fileReader.readAsArrayBuffer(file);
//     fileReader.onload = (e) => {
//       spark.append(e.target?.result as ArrayBuffer);
//       resolve(spark.end());
//     };
//     fileReader.onerror = (err) => {
//       reject(err);
//     };
//   });
// };

// 文件切片处理 file: 文件 Hash: 文件md5值
const fileChunk = (file: File) => {
  let fileSize = 1024 * 1024; // 切片大小 1M
  let chunks = Math.ceil(file.size / fileSize); // 切片总数
  const suffix = /\.([a-zA-Z0-9]+)$/.exec(file.name)?.[1];
  if (chunks > 100) {
    chunks = 100;
    fileSize = Math.ceil(file.size / chunks);
  }
  const fileChunkList = [];
  let cur = 0;
  while (cur < chunks) {
    fileChunkList.push({
      file: file.slice(cur * fileSize, (cur + 1) * fileSize),
      // filename: `${Hash}_${cur + 1}.${suffix}`,
      filename: `${cur + 1}.${suffix}`,
    });
    cur++;
  }
  return fileChunkList;
};

// 文件合并
const mergeFileChunk = (HASH: any, count: number) => {
  mergeChunk({ HASH, count })
    .then((result) => {
      console.log(result);
    })
    .catch(() => {
      console.log('合并失败');
    });
};

// 文件上传
const uploadFile = async (file: UploadRequestOptions) => {
  const chunkLists = fileChunk(file.file);
  const Hash = await changeBuffer(chunkLists);
  const maxRequest = 2; // 最大并发数
  const len = chunkLists.length; // 切片总数
  let index = 0; // 当前上传切片数
  const taskList: Array<Promise<any>> = []; // 任务执行池

  // 获取已经上传的切片
  const result = await uploadedChunks({ Hash });
  if (result.fileList.length > 0) {
    // 更新当前上传进度
    percentage.value = Math.floor((result.fileList.length / len) * 100);
    index = result.fileList.length;
  }

  // 通过map生成若干个form对象上传用
  const fmReqs = chunkLists
    .filter((item) => !result.fileList.includes(item.filename))
    .map((chunk) => {
      const fm = new FormData();
      fm.append('file', chunk.file);
      fm.append('filename', chunk.filename);
      fm.append('HASH', Hash as string);
      return { fm };
    });

  while (index < len) {
    const task = uploadChunk(fmReqs[index].fm);
    task
      .then(() => {
        // 计算当前上传进度
        percentage.value = Math.floor(((index + 1) / len) * 100) > 100 ? 100 : Math.floor(((index + 1) / len) * 100);

        // 请求成功后，任务池中移除当前任务
        taskList.splice(taskList.indexOf(task), 1);
      })
      .catch(() => {
        Message.error('当前分片上传失败！');
      });
    taskList.push(task);

    // 当前任务数达到最大并发数时，需要等待最少一个任务执行完毕
    if (taskList.length >= maxRequest) {
      await Promise.race(taskList);
    }

    index++;
  }

  // 所有分片都上传完毕后，请求合并接口
  await Promise.all(taskList);
  mergeFileChunk(Hash, len);
};
</script>

<style scoped></style>
