import service from '../utils/request.js';
import SparkMD5 from 'spark-md5'
import pLimit from 'p-limit'

// 文件删除
export const deleteFileById = (id) => {
    return service.post(`/api/common/file/delete/${id}`);
};

// 批量删除文件
export const fileBatchDelete = (ids) => {
    return service.post(`/api/common/file/batchDelete`, ids);
};

// 获取文件外链
export const getFileUrl = (id) => {
    return service.get(`/api/common/file/getFileUrl/${id}`);
};

// 文件上传
export const fileUpload = (file) => {
    const formData = new FormData();
    formData.append('file', file);
    return service.post('/api/common/file/upload', formData, {
        headers: {
            'Content-Type': 'multipart/form-data',
        },
    });
};

// Base64 图片上传
export const imageUpload = (imageBase64) => {
    return service.post('/api/common/file/upload/image', {imageBase64});
};

// 文件流上传
export const streamUpload = (fileName, fileType) => {
    return service.post('/api/common/file/upload/stream', null, {
        params: {fileName, fileType}
    });
};

/**
 * 检查分片状态
 * @param {string} fileMd5 - 文件MD5
 * @param {number} totalChunks - 总分片数
 */
export const checkChunkStatus = (fileMd5, totalChunks) => {
    return service.get('/api/common/file/chunk/status', {
        params: { fileMd5, totalChunks }
    })
}

export const chunkUpload = async (file, options = {}, onProgress) => {
    const {
        chunkSize = 10 * 1024 * 1024,
        maxRetries = 3,
        parallel = 3
    } = options

    // 生成文件标识 [[3]](#__3)
    const fileMd5 = await computeFileMd5(file)
    const totalChunks = Math.ceil(file.size / chunkSize)

    // 检查已上传分片 [[0]](#__0)
    const { data: status } = await checkChunkStatus(fileMd5, totalChunks)
    const uploadedChunks = status.uploadedChunks || []

    // 创建分片任务
    const chunks = Array.from({length: totalChunks}, (_, i) => ({
        index: i,
        blob: file.slice(i * chunkSize, (i + 1) * chunkSize),
        isUploaded: uploadedChunks.includes(i)
    })).filter(chunk => !chunk.isUploaded)

    // 分片上传核心逻辑 [[7]](#__7)
    const uploadTask = async (chunk, retries = 0) => {
        try {
            const formData = new FormData()
            formData.append('chunk', chunk.blob, file.name)
            formData.append('params', new Blob([JSON.stringify({
                chunkIndex: chunk.index,
                fileMd5,
                fileName: file.name,
                fileType: file.type,
                totalChunks
            })], {type: 'application/json'}))

            // 关键修改：处理接口响应 [[8]](#__8)
            const response = await service.post('/api/common/file/smartUpload', formData, {
                headers: {'Content-Type': 'multipart/form-data'},
                onUploadProgress: e => {
                    const loaded = uploadedChunks.length * chunkSize + e.loaded
                    const percent = Math.round(loaded / file.size * 100)
                    onProgress?.(percent * 0.9) // 分片阶段占90%进度
                }
            })
            // 最后分片返回完整外链，中间分片返回index
            return response.code === 200 ? response.data : chunk.index;
        } catch (err) {
            if (retries < maxRetries) return uploadTask(chunk, retries + 1)
            throw new Error(`分片${chunk.index}上传失败: ${err.message}`)
        }
    }

    // 并行控制 [[4]](#__4)
    const limit = pLimit(parallel)
    const tasks = chunks.map(chunk =>
        limit(() => uploadTask(chunk))
    )

    // 等待所有分片完成
    const results = await Promise.all(tasks)

    // 提取最终外链（最后一个分片返回的链接）
    const fileUrl = results.find(r => typeof r === 'string')

    // 触发合并完成进度
    onProgress?.(100)
    return fileUrl
}

// MD5计算工具 [[3]](#__3)
const computeFileMd5 = (file) => {
    return new Promise((resolve) => {
        const spark = new SparkMD5.ArrayBuffer()
        const reader = new FileReader()

        reader.onload = e => {
            spark.append(e.target.result)
            const md5 = spark.end()
            resolve(md5)
        }

        reader.readAsArrayBuffer(file)
    })
}
