<template>
    <uploader
    ref="uploaderRef"
    :options="options"
    :autoStart="false"
    :file-status-text="fileStatusText"
    class="uploader-example"
    @file-added="onFileAdded"
    @file-success="onFileSuccess"
    @file-progress="onFileProgress"
    @file-error="onFileError"
    >
    <uploader-unsupport></uploader-unsupport>
    <uploader-drop>
        <div>
            <uploader-btn id="global-uploader-btn" ref="uploadBtn" :attrs="attrs"
                >选择文件<i class="el-icon-upload el-icon--right"></i
            ></uploader-btn>
        </div>
    </uploader-drop>
    <uploader-list></uploader-list>
</uploader>
</template>

<script setup name="FileModal">
import { ACCEPT_CONFIG } from '@/utils/accept.config';
import SparkMD5 from 'spark-md5';
import { useUserStore } from '@/stores/user'
import { mergeFile } from '@/api/file';
const userStore = useUserStore();
const token = userStore.token;
const uploaderRef = ref();
const skip = ref(false);
const options = reactive({
    target:  '/api/uploader/chunk',       //目标上传 URL，
    testChunks: true,       //是否开启服务器分片校验，对应GET类型同名的target URL
    chunkSize:  5 * 1024 * 1024, //分块大小(单位：字节)
    fileParameterName: 'file', //上传文件时文件内容的参数名，对应chunk里的Multipart对象名，默认对象名为file
    query: (file) => ({ // 分片检查请求参数（GET）在分片上传的时候会先get请求target来判断是都已经
        identifier: file.uniqueIdentifier, 
        filename: file.name
    }),
    // 分片上传请求参数（POST）
    params: (file, chunk) => ({
        identifier: file.uniqueIdentifier,
        chunkNumber: chunk.offset + 1
    }),
    headers: {
        // 需要携带token信息，当然看各项目情况具体定义
        Authorization: `Bearer ${token}`
    },
    maxChunkRetries: 3,      //失败后最多自动重试上传次数
 //剩余时间
    parseTimeRemaining: function(timeRemaining, parsedTimeRemaining) {
            return parsedTimeRemaining
                .replace(/\syears?/, '年')
                .replace(/\days?/, '天')
                .replace(/\shours?/, '小时')
                .replace(/\sminutes?/, '分钟')
                .replace(/\sseconds?/, '秒')
    },
    // 服务器分片校验函数
    checkChunkUploadedByResponse: function (chunk, response) {
        let result = JSON.parse(response);
        console.log(result)
        if (result.data.skipUpload) {
            console.log("已经上传了")
                //秒传
            skip.value = true
            return true
        }
        chunk.file.objectName = result.data.objectName;
        chunk.file.uploadId = result.data.uploadId;
        return (result.uploadedChunks || []).indexOf(chunk.offset + 1) >= 0;
    },
    processParams: (params, file) => {
          return {
            ...params,
            objectName: file.objectName,
            uploadId: file.uploadId
          };
        }
});
const attrs = reactive({
    accept: ACCEPT_CONFIG.getAll(),
});
const fileStatusText = reactive({
    success: '上传成功',
        error: '上传出错了',
        uploading: '上传中...',
        paused: '暂停中...',
        waiting: '等待中...',
        cmd5: '计算文件MD5中...'
});


onMounted(() => {
});

function onFileProgress(rootFile,file,chunk){
    console.log(`上传中 ${file.name}，chunk：${chunk.startByte / 1024 / 1024} ~ ${chunk.endByte / 1024 / 1024}`)
}
function onFileAdded(file) {
    computeMD5(file);
}

function onFileSuccess(rootFile, file, response, chunk) {

    console.log("文件上传成功rootFile",rootFile)
    console.log("文件上传成功file",file)
    
    console.log("文件上传成功response",response)
    
    console.log("文件上传成功chunk",chunk)
    const result = JSON.parse(response)
    if (result.code == 200 && !skip.value) {
        //refProjectId为预留字段，可关联附件所属目标，例如所属档案，所属工程等
    mergeFile({
        identifier: file.uniqueIdentifier, // 必须与分片上传时一致
    
    

    })
        .then(responseData => {
            console.log("合并的结果",response)
            if (!responseData.success) {
                //todo 显示未成功
                console.log('合并操作未成功，结果码：' + responseData.data.code);
            }
        //	ElMessage.success(responseData.data);
        })
        .catch(function (error) {
            console.log('合并后捕获的未知异常：' + error);
        });
      } else {
        console.log('上传成功，不需要合并')
      }
      if (skip.value) {
        skip.value = false
      }
  
}
function onFileError(rootFile, file, response, chunk) {
    console.log('上传完成后异常信息：' + response);
}
/**
 * 计算文件的MD5 值
 */
function  computeMD5(file) {
    let fileReader = new FileReader()
      let time = new Date().getTime()
      let blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice
      let currentChunk = 0
      //分片大小
      const chunkSize = options.chunkSize
      //分片数
      let chunks = Math.ceil(file.size / chunkSize)
      //追加数组缓冲区。
      let spark = new SparkMD5.ArrayBuffer()
      // 文件状态设为"计算MD5"
      file.cmd5 = true //文件状态为“计算md5...”
      file.pause()
      loadNext()
      // 读取文件
      fileReader.onload = function(e) {
        spark.append(e.target.result)
        if (currentChunk < chunks) {
          currentChunk++
          loadNext()
          // 实时展示MD5的计算进度
          console.log(
            `第${currentChunk}分片解析完成, 开始第${
              currentChunk + 1
            } / ${chunks}分片解析`
          )
        } else {
          // MD5计算完成
          let md5 = spark.end()
          console.log(
            `MD5计算完毕：${file.name} \nMD5：${md5} \n分片：${chunks} 大小:${
              file.size
            } 用时：${new Date().getTime() - time} ms`
          )
          spark.destroy() //释放缓存
          file.uniqueIdentifier = md5 //将文件md5赋值给文件唯一标识
          file.cmd5 = false //取消计算md5状态
          file.resume() //开始上传
        }
      }
      // 读取文件出错
      fileReader.onerror = function() {
      //  this.$message.error(`文件${file.name}读取出错，请检查该文件`)
        file.cancel()
      }

      //读取方法
      function loadNext() {
        let start = currentChunk * chunkSize
        let end = start + chunkSize;
        (end > file.size) && (end = file.size)
        fileReader.readAsArrayBuffer(blobSlice.call(file.file, start, end))
      }
}



</script>

<style scoped>
.ant-form {
margin-top: 36px;
}
</style>