import React, { useState, useCallback } from "react";
import { Button } from "antd";
import SparkMD5 from "spark-md5";

const calculateHash = (chunk: Blob): Promise<string> => {
  return new Promise((resolve, reject) => {
    const spark = new SparkMD5.ArrayBuffer();
    const reader = new FileReader();
    reader.onload = (e) => {
      if (e.target?.result) {
        spark.append(e.target.result as ArrayBuffer);
        resolve(spark.end());
      } else {
        reject(new Error("Failed to read chunk"));
      }
    };
    reader.onerror = () => reject(new Error("FileReader error"));
    reader.readAsArrayBuffer(chunk);
  });
};

const chunkSize = 1024 * 1024 * 5;

export const LargeFileSlice = () => {
  const [file, setFile] = useState<File | null>(null);
  const [hashs, setHashs] = useState<string[]>([]);
  const [totalHash, setTotalHash] = useState<string>("");
  const [isLoading, setIsLoading] = useState<boolean>(false);

  const readAndCalculateHash = useCallback(async () => {
    if (!file) return;
    const chunkCount = Math.ceil(file.size / chunkSize);
    const stream = new ReadableStream({
      async start(controller) {
        for (let i = 0; i < chunkCount; i++) {
          const start = i * chunkSize;
          const end = start + chunkSize;
          const chunk = file.slice(start, end);
          // 使用spark-md5计算分片hash
          const hash = await calculateHash(chunk);
          controller.enqueue(hash);
        }
        controller.close();
      },
    });
    const reader = stream.getReader();
    while (true) {
      const { done, value } = await reader.read();
      if (done) break;
      setHashs((prev) => [...prev, value]);
    }
  }, [file]);

  const oneTimeCalculateHash = useCallback(async () => {
    if (!file) return;
    setIsLoading(true);
    setTotalHash("");

    try {
      const spark = new SparkMD5.ArrayBuffer();
      const chunkCount = Math.ceil(file.size / chunkSize);

      for (let i = 0; i < chunkCount; i++) {
        const start = i * chunkSize;
        const end = start + chunkSize;
        const chunk = file.slice(start, end);

        // 使用 Promise 包装 FileReader
        const arrayBuffer = await new Promise<ArrayBuffer>(
          (resolve, reject) => {
            const reader = new FileReader();
            reader.onload = (e) => {
              if (e.target?.result) {
                resolve(e.target.result as ArrayBuffer);
              } else {
                reject(new Error("Failed to read chunk"));
              }
            };
            reader.onerror = () => reject(new Error("FileReader error"));
            reader.readAsArrayBuffer(chunk);
          }
        );

        // 增加一个数组储存每片的hash
        const hash = await calculateHash(chunk);
        setHashs((prev) => [...prev, hash]);
        spark.append(arrayBuffer);
      }

      const hash = spark.end();
      setTotalHash(hash);
    } catch (error) {
      console.error("计算哈希值时出错:", error);
    } finally {
      setIsLoading(false);
    }
  }, [file]);

  return (
    <div className="flex flex-col gap-4">
      <input
        type="file"
        accept="*"
        onChange={(e) => setFile(e.target.files?.[0] || null)}
      />
      <div className="flex gap-4">
        <div>
          <Button onClick={readAndCalculateHash}>边读边计算分片hash</Button>
        </div>
        <div>
          <Button onClick={oneTimeCalculateHash}>一次性计算总hash</Button>
        </div>
      </div>
      <div>
        <p>计算完成</p>
        <p>文件信息</p>
        <p>文件名: {file?.name}</p>
        <p>文件类型: {file?.type}</p>
        <p>文件大小: {file?.size}b</p>
        <p>文件分片数: {hashs.length}</p>
        <p>文件分片hash数组</p>
        <div className="w-[1000px] word-break-all h-[200px] overflow-y-auto">
          {hashs.map((hash, index) => (
            <p key={index}>
              第{index + 1}片 {hash}
            </p>
          ))}
        </div>
        <div>
          <p>总hash</p>
          <p>{isLoading ? "计算中..." : totalHash}</p>
        </div>
      </div>
    </div>
  );
};
