import Util from '@/utils/test.js'
import Http from '@/utils/http.js'
import SparkMD5 from 'spark-md5'

const upload = {
  // 启用分片上传
  chunked: true,
  chunkSize: 4, // 切片大小  单位M
  /**
   * 上传入口
   * @param    {object}                       params    上传数据
   * @param    {boolean}                       chunked   是否切片
   * @param    {integer}                       chunkSize 切片大小
   */
  submit: function(params, chunked, chunkSize) {
    chunkSize = Util.isDef(chunkSize) ? chunkSize : this.chunkSize
    chunkSize = chunkSize * 1024 * 1024 // 单位转换为byte
    if (Util.isTrue(chunked)) {
      return this.uploadChunk(params, chunkSize)
    }
  },
  /**
   * 开始处理上传
   * @param    {[type]}                       params    [description]
   * @param    {[type]}                       chunkSize [description]
   */
  uploadChunk: function(params, chunkSize) {
    var _this = this
    var file = params.file
    // 总片数
    var chunks = this.getchunkCount(file, chunkSize)
    // 计算MP5值后上传
    _this.getFileMD5(file, function(md5) {
      // 使用MP5校验是否已经传过文件
      Http.get(
        '/upload/fast',
        {
          md5: md5,
          name: file.name
        },
        function(response) {
          // 上传过
          params.onSuccess(response, params.file)
        },
        function(response) {
          // 未上传过调用上传前置方法
          _this.beforeUploadChunk(params, {
            file: file,
            chunkSize: chunkSize,
            chunks: chunks,
            md5: md5
          })
        },
        false
      )
    })
  },
  /**
   * 上传前置方法
   * @param    {[type]}                       params [description]
   * @param    {[type]}                       info   [description]
   */
  beforeUploadChunk: function(params, info) {
    var _this = this
    var currentChunk = Util.isDef(info.chunk) ? info.chunk : 0
    if (currentChunk >= info.chunks) {
      return _this.merageUploadChunks(params, info)
    }
    var chunkFile = _this.getChunkFileData(info.file, currentChunk, info.chunkSize)
    var chunkFR = new FileReader()
    chunkFR.readAsBinaryString(chunkFile)
    // 文件加载完成后上传
    return chunkFR.addEventListener(
      'load',
      function(e) {
        // var chunkBolb = e.target.result;
        var ret = _this.uploadChunkCheck(params, {
          md5: info.md5,
          file: chunkFile,
          chunk: currentChunk,
          chunks: info.chunks,
          size: e.total
        })
        var _callback = function(params, info, currentChunk) {
          // 进度回调
          params.file.percent = Math.floor((currentChunk / info.chunks) * 100)
          params.onProgress(params.file)
          // 上传下一片
          info.chunk = currentChunk + 1
          // 回调上传
          return _this.beforeUploadChunk(params, info)
        }
        if (ret instanceof Promise) {
          return ret.then(function(res) {
            if (res === true) {
              return _callback(params, info, currentChunk)
            }
          })
        } else {
          return _callback(params, info, currentChunk)
        }
      },
      false
    )
  },
  /**
   * 验证是否上传分片
   * @param    {[type]}                       params      [description]
   * @param    {[type]}                       checkParams [description]
   * @return   {[type]}                                   [description]
   */
  uploadChunkCheck: function(params, checkParams) {
    var _this = this
    return Http.get(
      '/upload/chunk/check',
      {
        md5: checkParams.md5,
        chunk: checkParams.chunk,
        chunks: checkParams.chunks,
        size: checkParams.size
      },
      function(response) {
        // 存在
        return true
      },
      function(response) {
        // 不存在,上传文件
        return _this.putFile(checkParams)
      },
      false
    )
  },
  putFile: function(filedata) {
    var data = new FormData()
    data.append('name', filedata.md5 + '.part')
    data.append('md5', filedata.md5)
    data.append('file', filedata.file)
    data.append('chunk', filedata.chunk)
    data.append('chunks', filedata.chunks)
    data.append('size', filedata.size)
    return Http.post(
      '/upload/chunk',
      data,
      function(response) {
        // 上传成功
        return true
      },
      function(response) {
        return response
      },
      false,
      {
        headers: {
          'Content-Type': 'multipart/form-data;charset=UTF-8'
        }
      }
    )
  },
  merageUploadChunks: function(params, info) {
    return Http.post(
      '/upload/chunk/merage',
      {
        name: params.file.name,
        md5: info.md5,
        chunks: info.chunks,
        chunkSize: info.chunkSize
      },
      function(response) {
        params.onSuccess(response, params.file)
      },
      function(response) {
        return false
      },
      false
    )
  },
  /**
   * 获取切片总数
   * @param    {[type]}                       file      [description]
   * @param    {[type]}                       chunkSize [description]
   * @return   {[type]}                                 [description]
   */
  getchunkCount: function(file, chunkSize) {
    return Math.ceil(file.size / chunkSize)
  },
  /**
   * 获取切片文件
   * @param    {[type]}                       file         [description]
   * @param    {[type]}                       currentChunk [description]
   * @param    {[type]}                       chunkSize    [description]
   * @return   {[type]}                                    [description]
   */
  getChunkFileData: function(file, currentChunk, chunkSize) {
    var start = currentChunk * chunkSize
    var end = Math.min(file.size, start + chunkSize)
    var chunkFile = file.slice(start, end)
    return chunkFile
  },
  // 获得文件md5
  getFileMD5: function(file, callback) {
    // 声明必要的变量
    var fileReader = new FileReader()

    // 文件每块分割2M，计算分割详情
    var chunkSize = 2097152
    var chunks = Math.ceil(file.size / chunkSize)
    var currentChunk = 0

    // 创建md5对象（基于SparkMD5）
    var spark = new SparkMD5()

    // 每块文件读取完毕之后的处理
    fileReader.onload = function(e) {
      // 每块交由sparkMD5进行计算
      spark.appendBinary(e.target.result)
      currentChunk++

      // 如果文件处理完成计算MD5，如果还有分片继续处理
      if (currentChunk < chunks) {
        loadNext()
      } else {
        callback(spark.end())
      }
    }

    // 处理单片文件的上传
    function loadNext() {
      var start = currentChunk * chunkSize
      var end = start + chunkSize >= file.size ? file.size : start + chunkSize

      fileReader.readAsBinaryString(file.slice(start, end))
    }
    loadNext()
  }
}

export default upload
