<template>
    <uploader
        ref="uploaderRef"
        :options="options"
        :autoStart="false"
        :file-status-text="fileStatusText"
        class="uploader-ui"
        @file-added="onFileAdded"
        @file-success="onFileSuccess"
        @file-progress="onFileProgress"
        @file-error="onFileError"
        >
        <uploader-unsupport></uploader-unsupport>
        <uploader-drop>
            <div>
                <uploader-btn id="global-uploader-btn" ref="uploadBtn" :attrs="attrs"
                    >选择文件<i class="el-icon-upload el-icon--right"></i
                ></uploader-btn>
            </div>
        </uploader-drop>
        <uploader-list></uploader-list>
    </uploader>

  </template>

  <script setup name="FileModa1l">
  import { ACCEPT_CONFIG } from '@/utils/accept.config';
  import SparkMD5 from 'spark-md5';
  import { useUserStore } from '@/stores/user'
  import { mergeFile } from '@/api/upload';
  const userStore = useUserStore();
  const token = userStore.token;
  const options = reactive({
      target:  'http://localhost:8097/uploader/chunk',       //目标上传 URL，
      chunkSize: '2048000', //分块大小(单位：字节)
      fileParameterName: 'file', //上传文件时文件内容的参数名，对应chunk里的Multipart对象名，默认对象名为file
      // 分片检查请求参数（GET）在分片上传的时候会先get请求target来判断是都已经
      // query: (file) => ({
      //     identifier: file.uniqueIdentifier,
      //     filename: file.name
      // }),
      // 分片上传请求参数（POST）
      params: (file, chunk) => ({
          identifier: file.uniqueIdentifier,
          chunkNumber: chunk.offset + 1
      }),
      headers: {
          // 需要携带token信息，当然看各项目情况具体定义
          Authorization: `Bearer ${token}`
      },
      maxChunkRetries: 3,      //失败后最多自动重试上传次数
      testChunks: true,       //是否开启服务器分片校验，对应GET类型同名的target URL
      // 服务器分片校验函数
      checkChunkUploadedByResponse: function (chunk, response_msg) {
          let objMessage = JSON.parse(response_msg);
          if (objMessage.skipUpload) {
              return true;
          }
          return (objMessage.uploadedChunks || []).indexOf(chunk.offset + 1) >= 0;
      },
  });
  const attrs = reactive({
      accept: ACCEPT_CONFIG.getAll(),
  });
  const fileStatusText = reactive({
      success: '上传成功',
      error: '上传失败',
      uploading: '上传中',
      paused: '暂停',
      waiting: '等待上传',
  });
  


  //标题
  const title=ref('上传')
  //设置弹框的开闭 第一个参数是否是是新增  第二个参数是数据
  const openModal =async ()=>{
    open.value = true;
  }
  
  
  onMounted(() => {
      //console.log(uploaderRef.value, 'uploaderRef.value');
  });
  
  function onFileProgress(rootFile,file,chunk){
      console.log(`上传中 ${file.name}，chunk：${chunk.startByte / 1024 / 1024} ~ ${chunk.endByte / 1024 / 1024}`)
  }
  function onFileAdded(file) {
      computeMD5(file);
  }
   
  function onFileSuccess(rootFile, file, response, chunk) {
  
      console.log("文件上传成功rootFile",rootFile)
      console.log("文件上传成功file",file)
      
      console.log("文件上传成功response",response)
      
      console.log("文件上传成功chunk",chunk)
      if(chunk.offset==0){
        //如果文件的chunk是0 就不用合并
        return;
      }
      //refProjectId为预留字段，可关联附件所属目标，例如所属档案，所属工程等
      mergeFile({
          identifier: file.uniqueIdentifier, // 必须与分片上传时一致
          filename: file.name,               // 原始文件名包含扩展名
          totalSize:file.size,
          totalChunks:chunk.offset
  
      })
          .then((responseData) => {
        console.log("合并的结果",response)
              if (responseData.data.code === 415) {
                  console.log('合并操作未成功，结果码：' + responseData.data.code);
              }
          //	ElMessage.success(responseData.data);
          })
          .catch(function (error) {
              console.log('合并后捕获的未知异常：' + error);
          });
  }
  function onFileError(rootFile, file, response, chunk) {
      console.log('上传完成后异常信息：' + response);
  }
   
  /**
   * 计算md5，实现断点续传及秒传
   * @param file
   */
  function computeMD5(file) {
      file.pause();
   
      //单个文件的大小限制2G
      let fileSizeLimit = 2 * 1024 * 1024 * 1024;
      console.log('文件大小：' + file.size);
      console.log('限制大小：' + fileSizeLimit);
      if (file.size > fileSizeLimit) {
          file.cancel();
      }
   
      let fileReader = new FileReader();
      let time = new Date().getTime();
      let blobSlice =
          File.prototype.slice ||
          File.prototype.mozSlice ||
          File.prototype.webkitSlice;
      let currentChunk = 0;
      const chunkSize = 10 * 1024 * 1000;
      let chunks = Math.ceil(file.size / chunkSize);
      let spark = new SparkMD5.ArrayBuffer();
      //由于计算整个文件的Md5太慢，因此采用只计算第1块文件的md5的方式
      let chunkNumberMD5 = 1;
   
      loadNext();
   
      fileReader.onload = (e) => {
          spark.append(e.target.result);
   
          if (currentChunk < chunkNumberMD5) {
              loadNext();
          } else {
              let md5 = spark.end();
              file.uniqueIdentifier = md5;
              file.resume();
              console.log(
                  `MD5计算完毕：${file.name} \nMD5：${md5} \n分片：${chunks} 大小:${
                      file.size
                  } 用时：${new Date().getTime() - time} ms`
              );
          }
      };
   
      fileReader.onerror = function () {
          error(`文件${file.name}读取出错，请检查该文件`);
          file.cancel();
      };
   
      function loadNext() {
          let start = currentChunk * chunkSize;
          let end = start + chunkSize >= file.size ? file.size : start + chunkSize;
   
          fileReader.readAsArrayBuffer(blobSlice.call(file.file, start, end));
          currentChunk++;
          console.log('计算第' + currentChunk + '块');
      }
  }
  const uploaderRef = ref();
  function close() {
      uploaderRef.value.cancel();
  }
  function error(msg) {
      console.log(msg, 'msg');
  }
  
  //关闭弹框
  const cancel = e => {
    open.value = false
  }
  //提交
  const handleOk = async e => {
    
  
  };
  
  const handleChange = value => {
    console.log(`selected ${value}`);
  };
  //在这里暴露这个方法 在外面调用
  defineExpose({ openModal })
  
  </script>
  
  <style scoped>
  .ant-form {
    margin-top: 36px;
  }
  </style>