import React from "react";
import { Upload } from "../../component";
import SparkMD5 from "spark-md5";
import axios from "axios";
import { message as antdMessage } from "antd";
const Home = () => {
  const upload = async (file: File) => {
    // 1.获取唯一 hash 值
    const hash = await createFileHash(file);
    const extName = file.name.slice(file.name.lastIndexOf("."));
    // 2. 请求上传之前文件检查结果
    const beforeUploadResult = await axios.post("/api/checkFile", {
      hash,
      extName,
    });
    // 3. 判断
    const { uploaded, code, message } = beforeUploadResult.data;
    if (uploaded === true) {
      // 秒传
      antdMessage.success(message);
    } else {
      // 获取已经上传过切片
      const existChunks = uploaded ? uploaded : [];
      // 没有上传过
      // 每段数据大小 2MB
      const chunkSize = 2 * 1024 * 1024;
      // 总数据大小
      const totalSize = file.size;
      // 分成多少切片
      const chunkCount = Math.ceil(totalSize / chunkSize);
      // 切片数据
      const chunks = [];

      for (let i = 0; i < chunkCount; i++) {
        const name = hash + "-" + i + extName;
        if (!existChunks.includes(name)) {
          const chunk = file.slice(i * chunkSize, (i + 1) * chunkSize);
          chunks.push({
            chunk,
            name,
          });
        }
      }

      console.log(chunks);

      // 发生请求
      const chunksRequest = chunks.map((v) => {
        const formData = new FormData();
        formData.append("chunk", v.chunk, v.name);
        return axios.post("/api/upload", formData);
      });
      // 等待所有上传请求完成
      await Promise.all(chunksRequest);
      // 合并切片
      await axios.post("/api/mergeFile", { hash, extName });

      // console.log('上传成功')
    }
  };
  const createFileHash = (file: File) => {
    return new Promise((resolve) => {
      var blobSlice = File.prototype.slice,
        chunkSize = 2097152, // Read in chunks of 2MB
        chunks = Math.ceil(file.size / chunkSize),
        currentChunk = 0,
        spark = new SparkMD5.ArrayBuffer(),
        fileReader = new FileReader();

      fileReader.onload = function (e) {
        // console.log("read chunk nr", currentChunk + 1, "of", chunks);
        spark.append(fileReader.result as ArrayBuffer); // Append array buffer
        currentChunk++;

        if (currentChunk < chunks) {
          loadNext();
        } else {
          // console.log("finished loading");
          // console.info("computed hash", spark.end()); // Compute hash
          resolve(spark.end());
        }
      };

      fileReader.onerror = function () {
        console.warn("oops, something went wrong.");
      };

      function loadNext() {
        var start = currentChunk * chunkSize,
          end = start + chunkSize >= file.size ? file.size : start + chunkSize;

        fileReader.readAsArrayBuffer(blobSlice.call(file, start, end));
      }

      loadNext();
    });
  };
  return (
    <div>
      <Upload upload={upload}>
        <button>上传文件</button>
      </Upload>
    </div>
  );
};

export default Home;
