<template>
  <div class="fileUpload">
    <uploader
      ref="uploader"
      :options="options"
      :file-status-text="fileStatusText"
      :autoStart="false"
      @file-added="onFileAdded"
      @file-progress="onFileProgress"
      @file-success="onFileSuccess"
      @file-error="onFileError"
      class="uploader">
      <uploader-unsupport></uploader-unsupport>
      <uploader-drop>
        <uploader-btn class="upfile"><i class="iconfont icon-upload"></i> 上传文件</uploader-btn>
        <uploader-btn class="updir" :directory="true"><i class="iconfont icon-dir"></i> 上传文件夹</uploader-btn>
      </uploader-drop>
      <uploader-list></uploader-list>
    </uploader>
  </div>
</template>

<script>
import axios from 'axios';
import SparkMD5 from 'spark-md5';

export default {
  data() {
    return {
      options: {
        target: 'http://localhost:18989/fileUpload',
        chunkSize: 2097152,  //2MB
        simultaneousUploads: 1, //并发上传数
        maxChunkRetries: 2, //最大自动失败重试上传次数
        testChunks: true,   //开启服务端分片校验
        // 服务器分片校验函数
        checkChunkUploadedByResponse: (chunk, message) => {

          let obj = JSON.parse(message);
          if (obj.code == 200) {
           // this.statusTextMap.success = '秒传文件';
            //return true;
          }

          return (obj.uploaded || []).indexOf(chunk.offset + 1) >= 0
        },
      },

      statusTextMap: {
        success: '上传成功',
        error: '上传出错了',
        uploading: '上传中...',
        paused: '暂停',
        waiting: '等待中...',
        cmd5: '计算md5...'
      },

      fileStatusText: (status, response) => {
        return this.statusTextMap[status];
      },
    }
  },
  created() {
    //
  },
  methods: {
    onFileAdded(file) {
      // 计算MD5
      this.computeMD5(file);
    },

    //计算MD5
    computeMD5(file) {
      let blobSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice,
        chunkSize = 2097152,
        chunks = Math.ceil(file.size / chunkSize),
        currentChunk = 0,
        spark = new SparkMD5.ArrayBuffer(),
        fileReader = new FileReader();

      let time = new Date().getTime();

      file.cmd5 = true;

      fileReader.onload = (e) => {
        this.statusTextMap.paused ="校验中...";

        spark.append(e.target.result);   // Append array buffer
        currentChunk++;

        if (currentChunk < chunks) {
          //console.log(`第${currentChunk}分片解析完成, 开始第${currentChunk +1} / ${chunks}分片解析`);
          let percent = Math.floor(currentChunk / chunks * 100);
          file.cmd5progress = percent;
          loadNext();
        } else {
          console.log('finished loading');
          let md5 = spark.end();
          console.log(`MD5计算完成：${file.name} \nMD5：${md5} \n分片：${chunks} 大小:${file.size} 用时：${new Date().getTime() - time} ms`);
          spark.destroy(); //释放缓存
          file.uniqueIdentifier = md5; //将文件md5赋值给文件唯一标识
          file.cmd5 = false; //取消计算md5状态
          this.statusTextMap.paused ="暂停...";

          file.resume(); //开始上传
        }
      };

      fileReader.onerror = () => {
        console.warn('oops, something went wrong.');
        file.cancel();
      };

      let loadNext = () => {
        let start = currentChunk * chunkSize,
          end = ((start + chunkSize) >= file.size) ? file.size : start + chunkSize;

        fileReader.readAsArrayBuffer(blobSlice.call(file.file, start, end));
      };

      loadNext();
    },
    // 文件进度的回调
    onFileProgress(rootFile, file, chunk) {
      console.log(`上传中 ${file.name}，chunk：${chunk.startByte / 1024 / 1024} ~ ${chunk.endByte / 1024 / 1024}`)
    },
    onFileSuccess(rootFile, file, response, chunk) {
      let resp = JSON.parse(response);
      console.log(resp)
    },

    onFileError(rootFile, file, response, chunk) {
      console.log('Error:', response)
    },
  }
}
</script>
