// src/services/file.service.js

/**
 * 文件上传服务
 *
 */

import path from "path";
import fs from "fs";
import crypto from "crypto";
import { Op } from "sequelize";
import { db } from "../config/sequelize.js";
import { ConflictError } from "./../utils/errors.js";

export class FileService {
  constructor(db) {
    if (!db || !db.models) {
      throw new Error("必须提供有效的 Sequelize 实例");
    }
    this.db = db;
    this.File = db.models.File;
    this.FileChunk = db.models.FileChunk;
    this.uploadDir = path.resolve(process.cwd(), "uploads");
    this.Op = db.sequelize.Op;

    // 确保上传目录存在
    this.ensureUploadDirs();
  }

  // 确保上传目录存在
  ensureUploadDirs() {
    const dirs = [
      this.uploadDir,
      path.join(this.uploadDir, "chunks"),
      path.join(this.uploadDir, "merged"),
    ];

    dirs.forEach((dir) => {
      if (!fs.existsSync(dir)) {
        fs.mkdirSync(dir, { recursive: true, mode: 0o755 }); // 确保可写权限
      } else {
        // 检查现有目录权限
        try {
          fs.accessSync(dir, fs.constants.W_OK);
          console.log(`目录可写: ${dir}`);
        } catch (err) {
          console.error(`目录不可写: ${dir}`, err);
          throw new Error(`目录不可写: ${dir}`);
        }
      }
    });
  }
  async getFileList() {
    // 实现文件列表获取逻辑
    return this.File.findAll();
  }
  async saveFile({ file, userId, type }) {
    // 实现文件保存逻辑
    return {
      url: `/uploads/${file.filename}`,
      size: file.size,
      type: file.mimetype,
    };
  }
  async saveThumbnail({ file, userId, type }) {
    // 实现缩略图保存逻辑
    return {
      url: `/uploads/thumbnails/${file.filename}`,
      size: file.size,
      type: file.mimetype,
    };
  }

  // 添加 calculateChunkHash 方法
  async calculateChunkHash(buffer) {
    return new Promise((resolve, reject) => {
      const hash = crypto.createHash("md5");
      hash.update(buffer);
      resolve(hash.digest("hex"));
    });
  }
  /**
   * 计算文件流的分块哈希值
   * @param {string} filePath - 需要计算哈希的文件路径
   * @returns {Promise<string>} 返回一个Promise，解析为文件的MD5哈希值（十六进制字符串）
   */
  async calculateChunkHashStream(filePath) {
    return new Promise((resolve, reject) => {
      // 创建MD5哈希对象
      const hash = crypto.createHash("md5");
      // 创建可读流，用于读取文件内容
      const stream = fs.createReadStream(filePath);

      // 当有数据块被读取时，更新哈希值
      stream.on("data", (data) => hash.update(data));
      // 当文件读取完成时，返回最终的哈希值
      stream.on("end", () => resolve(hash.digest("hex")));
      // 当发生错误时，拒绝Promise
      stream.on("error", reject);
    });
  }

  // 或者使用同步版本
  calculateChunkHashSync(buffer) {
    const hash = crypto.createHash("md5");
    hash.update(buffer);
    return hash.digest("hex");
  }

  /**
   * 初始化分片上传
   * @param {Object} options - 包含文件信息和其他相关参数的对象
   * @param {string} options.fileHash - 文件的MD5哈希值
   * @param {string} options.fileName - 文件名称
   * @param {number} options.fileSize - 文件大小
   * @param {number} options.totalChunks - 总分片数
   * @param {Object} options.file - 文件对象
   * @param {number} options.userId - 用户ID
   */
  async createFileRecord({
    fileHash,
    fileName,
    fileSize,
    fileType,
    totalChunks,
    userId,
  }) {
    const transaction = await this.db.sequelize.transaction();

    try {
      // 验证必要参数
      if (!fileHash || !fileName || !fileSize) {
        throw new BadRequestError("缺少必要参数");
      }

      // 检查是否已存在
      const existing = await this.File.findOne({
        where: { file_hash: fileHash },
        include: [
          {
            model: this.FileChunk,
            as: "chunks",
            attributes: ["chunk_index"],
          },
        ],
      });

      if (existing) {
        if (existing.status === "completed") {
          throw new ConflictError("文件已存在且已完成上传");
        }
        // 返回已上传的分片索引
        const uploadedChunks = existing.chunks
          ? existing.chunks
              .map((chunk) => chunk.index)
              .filter((index) => index !== undefined)
          : [];

        await transaction.commit();
        return {
          file: existing,
          uploadedChunks,
          shouldUpload: uploadedChunks.length < totalChunks,
        };
      }
      console.log("文件类型：", fileType);

      // 创建新记录
      const fileRecord = this.File.create({
        file_hash: fileHash,
        file_name: fileName,
        file_size: fileSize,
        file_type: fileType,
        total_chunks: totalChunks,
        user_id: userId,
        status: "uploading",
        created_at: new Date(),
      });
      // 创建分片目录
      const chunkDir = path.join(this.uploadDir, "chunks", fileHash);
      fs.mkdirSync(chunkDir, { recursive: true });

      await transaction.commit();

      return {
        file: fileRecord,
        uploadedChunks: [],
        shouldUpload: true,
      };
    } catch (error) {
      console.error("保存文件记录失败:", error);
      await transaction.rollback();
      throw error;
    }
  }
  /**
   * 保存文件分片
   * @param {*} param0
   * @returns
   */
  async saveChunk({ fileHash, chunkIndex, chunkHash, chunkFile }) {
    console.log("保存分片:", { fileHash, chunkIndex });
    console.log("保存分片目录:", this.uploadDir);
    const transaction = await this.db.sequelize.transaction(); // 新增事务

    // 1. 验证输入数据
    if (!chunkFile || (!chunkFile.buffer && !chunkFile.path)) {
      throw new Error("无效的分片文件数据");
    }

    // 2. 保存物理分片文件
    const chunkDir = path.join(this.uploadDir, "chunks", fileHash);
    const chunkFilePath = path.join(chunkDir, `${chunkIndex}.chunk`);
    const tempPath = chunkFile.path ? path.normalize(chunkFile.path) : null;
    console.log("分片文件路径:", chunkFilePath);
    console.log("标准化路径:", {
      chunkDir,
      chunkFilePath,
      tempPath,
    });
    try {
      // 1. 确保文件记录存在
      const fileRecord = await this.File.findOne({
        where: { file_hash: fileHash },
        transaction,
      });
      console.log("文件记录:", fileRecord);
      if (!fileRecord) throw new Error("关联的文件记录不存在");

      fs.mkdirSync(chunkDir, { recursive: true });

      console.log(`保存分片到: ${chunkFilePath}`);
      console.log("保存分片文件:", chunkFile, chunkFile.buffer);
      // 3. 保存分片文件
      if (chunkFile.buffer) {
        // 内存存储
        fs.writeFileSync(chunkFilePath, chunkFile.buffer);
        console.log(`从Buffer保存分片 ${chunkIndex} 完成`);
      } else if (chunkFile.path) {
        // 磁盘存储 - 移动文件
        if (!fs.existsSync(chunkFile.path)) {
          throw new Error(`临时文件不存在: ${chunkFile.path}`);
        }
        // fs.renameSync(chunkFile.path, chunkFilePath);

        // 先复制再删除，更可靠
        fs.copyFileSync(tempPath, chunkFilePath);
        fs.unlinkSync(tempPath);
        console.log("移动后验证:", {
          sourceExists: fs.existsSync(tempPath),
          targetExists: fs.existsSync(chunkFilePath),
          targetSize: fs.statSync(chunkFilePath).size,
        });
      } else {
        throw new Error("无法获取分片文件数据");
      }
      // 4. 验证文件是否真的保存
      if (!fs.existsSync(chunkFilePath)) {
        throw new Error("文件保存后验证失败");
      }
      // 4. 获取文件大小
      const chunkSize = fs.statSync(chunkFilePath).size;
      console.log(`分片保存验证: ${chunkFilePath} (${chunkSize} bytes)`);

      // 5. 计算分片哈希（使用流式处理避免内存问题）
      // const chunkHash = await this.calculateChunkHashStream(chunkFilePath);
      console.log("分片哈希:", chunkHash);
      // 6. 记录分片信息到数据库
      await this.recordChunkInfo(
        {
          fileHash,
          fileId: fileRecord.id,
          chunkIndex,
          chunkPath: chunkFilePath,
          chunkSize,
          chunkHash,
        },
        { transaction }
      );

      console.log("分片保存成功，已记录到数据库");
      await transaction.commit(); // 显式提交
      return {
        success: true,
        chunkPath: chunkFilePath,
        chunkSize,
        chunkHash,
      };
    } catch (error) {
      console.error("保存分片失败，并清理分片文件:", {
        error: error.message,
        chunkIndex,
        fileHash,
        chunkPath: chunkFilePath,
        tempPath: chunkFile.path,
        bufferSize: chunkFile.buffer?.length,
      });

      await transaction.rollback();
      // 清理失败的文件
      if (chunkFilePath && fs.existsSync(chunkFilePath)) {
        fs.unlinkSync(chunkFilePath);
      }
      throw error;
    }
  }
  /**
   * 合并分片文件
   * @param {*} {  fileHash, chunkIndex, chunkPath, chunkSize, chunkHash }
   * @returns
   */
  async mergeChunks(fileHash) {
    console.log("开始合并分片，fileHash:", fileHash);

    const transaction = await this.db.sequelize.transaction(); // 新增事务
    try {
      // 1. 获取所有分片信息
      const chunks = await this.FileChunk.findAll({
        where: { file_hash: fileHash },
        order: [["chunk_index", "ASC"]],
        transaction, // 加入事务
      });
      console.log("分片信息:", chunks);
      if (!chunks || chunks.length === 0) {
        throw new Error("未找到分片文件");
      }

      console.log(`找到 ${chunks.length} 个分片`);

      // 2. 获取文件基本信息
      const fileRecord = await this.File.findOne({
        where: { file_hash: fileHash },
        transaction,
      });

      if (!fileRecord) {
        throw new Error("关联的文件记录不存在");
      }

      // 3. 创建最终文件路径
      const finalDir = path.join(this.uploadDir, "merged");
      fs.mkdirSync(finalDir, { recursive: true });

      const fileExtension = path.extname(fileRecord.file_name) || ".bin";
      const finalFileName = `${fileHash}${fileExtension}`;
      const finalFilePath = path.join(finalDir, finalFileName);
      const finalFileUrl = `/uploads/merged/${finalFileName}`;

      // 4. 合并分片
      const writeStream = fs.createWriteStream(finalFilePath);

      for (const chunk of chunks) {
        console.log(`正在合并分片 ${chunk.chunk_index}`);

        if (!fs.existsSync(chunk.chunk_path)) {
          throw new Error(`分片文件不存在: ${chunk.chunk_path}`);
        }

        const chunkData = fs.readFileSync(chunk.chunk_path);
        writeStream.write(chunkData);
      }

      // 5. 关闭流并等待完成
      writeStream.end();
      await new Promise((resolve, reject) => {
        writeStream.on("finish", resolve);
        writeStream.on("error", reject);
      });

      // 6. 验证合并后的文件
      const finalStats = fs.statSync(finalFilePath);
      console.log("合并完成，文件大小:", finalStats.size);

      // 7. 验证文件完整性（可选）
      const expectedSize = chunks.reduce(
        (sum, chunk) => sum + chunk.chunk_size,
        0
      );
      if (finalStats.size !== expectedSize) {
        throw new Error(
          `文件大小不匹配: 期望 ${expectedSize}, 实际 ${finalStats.size}`
        );
      }
      console.log("文件保存成功,更新文件记录");
      // 8. 更新文件记录
      await this.File.update(
        {
          file_path: finalFilePath,
          file_url: finalFileUrl,
          status: "completed",
          completed_at: new Date(),
        },
        {
          where: { id: fileRecord.id },
          transaction, // 加入事务
        }
      );

      // 9. 【重要】可以选择性地清理分片文件（建议先保留一段时间）
      await this.cleanChunks(fileHash);

      console.log("文件处理完成", fileRecord);

      await transaction.commit(); // 提交事务
      return {
        id: fileRecord.id,
        url: finalFileUrl,
        path: finalFilePath,
        size: finalStats.size,
        type: fileRecord.file_type,
        name: fileRecord.file_name,
        message: "文件合并成功",
      };
    } catch (error) {
      console.error("合并分片失败:", error);

      await transaction.rollback(); // 回滚数据库操作
      // 更新状态为失败
      await this.File.update(
        {
          status: "failed",
          error_message: error.message,
        },
        {
          where: { file_hash: fileHash },
        }
      );

      throw error;
    }
  }
  /**
   * 清理分片文件
   * @param {string} fileHash - 文件哈希值
   * @param {boolean} [force=false] - 是否强制清理（跳过安全检查）
   */
  async cleanChunks(fileHash, force = false) {
    const chunkDir = path.join(this.uploadDir, "chunks", fileHash);

    try {
      // 安全验证：确保存在对应的已完成文件记录（除非强制清理）
      if (!force) {
        const fileRecord = await this.File.findOne({
          where: {
            file_hash: fileHash,
            status: "completed",
          },
        });

        if (!fileRecord) {
          throw new Error("未找到已完成的上传记录，拒绝清理");
        }

        // 额外验证：确保合并后的文件存在
        if (!fs.existsSync(fileRecord.file_path)) {
          throw new Error("合并后的文件不存在，拒绝清理分片");
        }
      }

      // 物理清理
      if (fs.existsSync(chunkDir)) {
        fs.rmSync(chunkDir, {
          recursive: true,
          force: true, // 强制删除只读文件
        });
        console.log(`分片清理完成: ${fileHash}`);
      }

      // 清理数据库分片记录
      await this.FileChunk.destroy({
        where: { file_hash: fileHash },
      });
    } catch (error) {
      console.error(`清理分片失败 [${fileHash}]:`, error);

      // 失败重试机制（仅当非强制模式时）
      if (!force) {
        setTimeout(() => this.cleanChunks(fileHash, true), 5000); // 5秒后重试并强制清理
      } else {
        throw error; // 强制模式下直接抛出错误
      }
    }
  }
  async getUploadProgress(fileHash) {
    // 获取上传进度
    return {
      uploadedChunks: 5,
      totalChunks: 10,
      progress: 50,
    };
  }
  /**
   * 检测文件是否已存在（支持多种检查方式）
   * @param {string} fileHash - 文件的完整哈希值
   * @param {string} [filename] - 可选的文件名
   * @returns {Promise<boolean>} 文件是否存在
   */
  async checkFileExists(fileHash) {
    try {
      // 1. 检查数据库记录
      const file = await this.File.findOne({
        where: { file_hash: fileHash },
        include: [
          {
            model: this.FileChunk,
            as: "chunks",
            attributes: ["chunk_index", "chunk_size", "chunk_hash"],
            required: false,
          },
        ],
        attributes: ["id", "status", "file_size", "total_chunks"],
      });

      if (!file) {
        return {
          exists: false,
          isCompleted: false,
          uploadedChunks: [],
        };
      }

      // 2. 检查物理分片文件
      const chunkDir = path.join(this.uploadDir, "chunks", fileHash);
      let actualChunks = [];

      if (fs.existsSync(chunkDir)) {
        actualChunks = fs
          .readdirSync(chunkDir)
          .filter((f) => f.endsWith(".chunk"))
          .map((f) => parseInt(f.split(".")[0]))
          .sort((a, b) => a - b);
      }

      // 3. 验证分片完整性
      const isCompleted = file.status === "completed";
      const dbChunks = file.chunks?.map((c) => c.chunk_index) || [];

      // 关键修复：只有同时满足以下条件才认为文件存在
      const realExists =
        isCompleted ||
        (actualChunks.length > 0 && actualChunks.length === file.total_chunks);
      console.log(
        "文件检查结果:",
        realExists,
        isCompleted,
        actualChunks,
        dbChunks
      );
      return {
        exists: realExists,
        isCompleted,
        uploadedChunks: isCompleted ? [] : actualChunks,
        // 添加详细诊断信息
        _diagnostics: {
          dbRecord: !!file,
          dbStatus: file.status,
          dbChunkCount: dbChunks.length,
          actualChunkCount: actualChunks.length,
          totalChunks: file.total_chunks,
        },
      };
    } catch (error) {
      console.error("文件检查异常:", error);
      return {
        exists: false,
        isCompleted: false,
        uploadedChunks: [],
        error: error.message,
      };
    }
  }
  /**
   * 在初始化前检查并清理无效记录
   * @param {string} fileHash - 文件的哈希值
   * @returns {Promise<void>}
   */
  async cleanupOrphanedRecords(fileHash) {
    const file = await this.File.findOne({ where: { file_hash: fileHash } });
    if (!file) return;

    // 检查分片目录是否存在且非空
    const chunkDir = path.join(this.uploadDir, "chunks", fileHash);
    const hasActualChunks =
      fs.existsSync(chunkDir) && fs.readdirSync(chunkDir).length > 0;

    if (!hasActualChunks) {
      await this.File.destroy({ where: { id: file.id } });
      await this.FileChunk.destroy({ where: { file_id: file.id } });
      console.log("清理残留记录:", fileHash);
    }
  }
  /**
   * 更新文件记录（上传完成后）
   */
  async updateFileRecord(fileId, updates) {
    return this.File.update(updates, {
      where: { id: fileId },
    });
  }

  // 确保 recordChunkInfo 方法正确实现
  async recordChunkInfo({
    fileHash,
    fileId,
    chunkIndex,
    chunkPath,
    chunkSize,
    chunkHash,
  }) {
    console.log("保存分片信息：", fileId);
    const transaction = await this.db.sequelize.transaction(); // 新增事务
    try {
      if (!this.FileChunk) {
        throw new Error("FileChunk 模型未注册");
      }
      const file = await this.File.findOne({
        where: { file_hash: fileHash },
        attributes: ["id"],
        transaction,
      });
      if (!file) {
        throw new Error("未找到对应的文件记录");
      }

      // 使用 where 条件来更新，而不是依赖主键
      const [chunk, created] = await this.FileChunk.findOrCreate({
        where: {
          file_hash: fileHash,
          chunk_index: chunkIndex,
        },
        defaults: {
          upload_id: fileHash,
          file_hash: fileHash,
          file_id: fileId,
          chunk_index: chunkIndex,
          chunk_path: chunkPath,
          chunk_size: chunkSize,
          chunk_hash: chunkHash,
          status: "uploaded",
          created_at: new Date(),
          updated_at: new Date(),
        },
        transaction, // 传入事务
      });

      if (!created) {
        // 如果记录已存在，则更新
        await chunk.update(
          {
            chunk_path: chunkPath,
            chunk_size: chunkSize,
            chunk_hash: chunkHash,
            status: "uploaded",
            updated_at: new Date(),
          },
          { transaction }
        );
      }
      // 4. 检查是否所有分片已上传完成
      const uploadedChunksCount = await this.FileChunk.count({
        where: { file_id: file.id, status: "uploaded" },
        transaction,
      });

      if (uploadedChunksCount === file.total_chunks) {
        await this.File.update(
          { status: "completed", completed_at: new Date() },
          { where: { id: file.id }, transaction }
        );
      }
      console.log("分片信息已保存到数据库");
      await transaction.commit();
    } catch (error) {
      console.error("记录分片信息失败:", error);
      await transaction.rollback();
      throw error; // 重新抛出错误，让上层处理
    }
  }

  async getChunksInfo(fileHash) {
    return this.FileChunk.findAll({
      where: { upload_id: fileHash },
      order: [["chunk_index", "ASC"]],
      attributes: [
        "id",
        "chunk_index",
        "chunk_path",
        "chunk_size",
        "chunk_hash",
      ],
    });
  }

  async verifyMergedFile(finalFilePath, expectedMd5) {
    if (!fs.existsSync(finalFilePath)) {
      throw new Error("合并后的文件不存在");
    }

    // 计算合并文件的MD5
    const actualMd5 = await this.calculateChunkHashStream(finalFilePath);

    if (actualMd5 !== expectedMd5) {
      throw new Error(
        `文件完整性验证失败: 期望 ${expectedMd5}, 实际 ${actualMd5}`
      );
    }

    return true;
  }
}
