import { FC } from "react";
import { Upload } from "../../component";
import SparkMD5 from "spark-md5";
import axios from "axios";
import { message as antdMessage } from "antd";
const Home: FC = () => {
  const upload = async (file: File) => {
    const hash = await createFileHash(file);
    const extName = file.name.slice(file.name.lastIndexOf("."));
    const beforeUploadResult = await axios.post("/api/checkFile", {
      hash,
      extName,
    });
    const { uploaded, message } = beforeUploadResult.data;
    if (uploaded === true) antdMessage.success(message);
    else {
      const existChunks = uploaded ? uploaded : [],
        chunkSize = 2 * 1024 * 1024,
        totalSize = file.size,
        chunkCount = Math.ceil(totalSize / chunkSize),
        chunks = [];
      for (let i = 0; i < chunkCount; i++) {
        const name = hash + "-" + i + extName;
        if (!existChunks.includes(name)) {
          const chunk = file.slice(i * chunkSize, (i + 1) * chunkSize);
          chunks.push({ chunk, name });
        }
      }
      const chunksRequest = chunks.map(v => {
        const formData = new FormData();
        formData.append("chunk", v.chunk, v.name);
        return axios.post("/api/upload", formData);
      });
      await Promise.all(chunksRequest);
      await axios.post("/api/mergeFile", { hash, extName });
    }
  };
  const createFileHash = (file: File) => {
    return new Promise(resolve => {
      var blobSlice = File.prototype.slice,
        chunkSize = 2097152,
        chunks = Math.ceil(file.size / chunkSize),
        currentChunk = 0,
        spark = new SparkMD5.ArrayBuffer(),
        fileReader = new FileReader();
      fileReader.onload = function () {
        spark.append(fileReader.result as ArrayBuffer);
        currentChunk++;
        if (currentChunk < chunks) loadNext();
        else resolve(spark.end());
      };
      fileReader.onerror = function () {
        console.warn("oops, something went wrong.");
      };
      function loadNext() {
        var start = currentChunk * chunkSize,
          end = start + chunkSize >= file.size ? file.size : start + chunkSize;
        fileReader.readAsArrayBuffer(blobSlice.call(file, start, end));
      }
      loadNext();
    });
  };
  return (
    <div>
      <Upload upload={upload}>
        <button>上传文件</button>
      </Upload>
    </div>
  );
};
export default Home;
