import { Injectable, NotFoundException, Req, Res } from '@nestjs/common';
import dayjs from 'dayjs';
import { isNil } from 'lodash';
import fs from 'node:fs';
import { BusinessException } from '~/common/exceptions/business.exception';
import { ErrorEnum } from '~/constants/error-code.constant';
import { MongodbService } from '../../../shared/mongodb/mongodb.service';

import {
  fileRename,
  getExtname,
  getFilePath,
  getFileType,
  getSize,
  saveLocalFile,
  saveLocalChunkFile,
} from '~/utils/file.util';

@Injectable()
export class UploadService {
  constructor(private readonly mongodbService: MongodbService) {}

  /**
   * 保存文件上传记录
   */
  async saveFile(file: Express.Multer.File) {
    if (isNil(file)) throw new NotFoundException('请先上传文件!');
    const fileName = file.originalname;
    // const size = getSize(file.size);
    const extName = getExtname(fileName);
    const type = getFileType(extName);
    const name = fileRename(fileName);
    const currentDate = dayjs().format('YYYY-MM-DD');
    const path = getFilePath(name, currentDate, type);

    saveLocalFile(await file.buffer, name, currentDate, type);

    const fileData = {
      name,
      fileName,
      extName,
      path,
      type,
      size: file.size.toString(),
    };
    const createdProduct = new this.mongodbService.StorageModel(fileData);
    await createdProduct.save();

    // console.log(`Excel 文件已成功保存到 ${process.env.APP_BASE_URL}${path}`);
    return path;
  }

  /**
   * 切片上传
   */
  async saveChunk(file: Express.Multer.File, @Req() req): Promise<string> {
    if (isNil(file)) throw new NotFoundException('请先上传文件!');

    saveLocalChunkFile(
      await file.buffer,
      req.body.fileHash,
      req.body.chunkHash,
    );
    // console.log(`Excel 文件已成功保存到 /upload/chunk`);
    return '分片上传成功';
  }

  // 切片合并
  async uploadChunkMerge(@Req() req) {
    const { fileHash, fileName, fileSize } = req.body;
    // 获取文件夹
    const chunkDir = 'public/chunk/' + fileHash;
    const files = fs.readdirSync(chunkDir);

    // 文件排序
    files.sort((a, b) => parseInt(a.split('-')[1]) - parseInt(b.split('-')[1]));

    // 切片合并
    let count = 0;
    let startPos = 0;
    files.map(file => {
      const filePath = chunkDir + '/' + file;
      const stream = fs.createReadStream(filePath);
      stream
        .pipe(
          fs.createWriteStream('public/chunk/' + fileName, {
            start: startPos,
          }),
        )
        .on('finish', () => {
          count++;
          if (count === files.length) {
            fs.rm(
              chunkDir,
              {
                recursive: true,
              },
              () => {},
            );
          }
        });
      startPos += fs.statSync(filePath).size;
    });

    // const size = getSize(fileSize);
    const extName = getExtname(fileName);
    const type = getFileType(extName);
    const name = fileRename(fileName);
    const path = '/chunk/' + fileName;

    const fileData = {
      name,
      fileName,
      extName,
      path,
      type,
      size: fileSize,
    };
    const createdProduct = new this.mongodbService.StorageModel(fileData);
    await createdProduct.save();

    return {
      fileSize: fileSize,
      fileName: fileName,
      filePath: '/chunk/' + fileName,
    };
  }

  //检查
  async checkChunks(@Req() req) {
    const { fileHash, fileName, chunkTotal } = req.query;

    // 获取切片文件夹、或文件
    const result = {
      uploadStatus: 'empty',
      chunkSignsArr: [],
    };

    const fileDir = 'public/chunk/' + fileName;

    // 有文件，说明文件已经上传且合并，返回上传成功 (秒传)
    if (fs.existsSync(fileDir)) {
      result.uploadStatus = 'uploaded';
      return result;
    }

    const chunkDir = 'public/chunk/' + fileHash;
    let directory;
    if (fs.existsSync(chunkDir)) {
      directory = fs.readdirSync(chunkDir);
      const chunkSignsArr = new Array<number>(+chunkTotal).fill(0);
      // 有文件夹，说明切片未完全上传，正序返回切片排序 (断点续传)
      if (directory?.length > 0) {
        directory.map(file => {
          // 修改后（不使用 at 方法）
          const parts = file.split('-');
          const idx = +parts[parts.length - 1];
          chunkSignsArr[idx] = 1;
        });
        result.uploadStatus = 'uploading';
      }
      result.chunkSignsArr = chunkSignsArr;
    }

    return result;
  }

  //下载切片
  async downChunks(@Req() req, @Res() res) {
    const { filePath, start, end } = req.query;

    const allfilepath = 'public' + filePath;

    // 3. 检查文件是否存在
    try {
      await fs.promises.access(allfilepath, fs.constants.R_OK);
    } catch (err) {
      throw new BusinessException(ErrorEnum.INVALID_FILE);
    }

    // 4. 获取文件信息并处理范围参数
    const stats = await fs.promises.stat(allfilepath);
    const fileSize = stats.size;
    const startNum = Math.max(0, Number(start) || 0);
    let endNum = Number(end) || fileSize - 1;
    endNum = Math.min(endNum, fileSize - 1);

    // 5. 设置分片下载响应头
    res.setHeader('Accept-Ranges', 'bytes');
    res.setHeader('Content-Type', 'application/octet-stream');
    res.setHeader('Content-Length', endNum - startNum + 1);
    res.setHeader('Content-Range', `bytes ${startNum}-${endNum}/${fileSize}`);

    // 6. 生成并返回分片流
    const fileStream = fs.createReadStream(allfilepath, {
      start: startNum,
      end: endNum,
    });

    fileStream.pipe(res);

    // 处理流错误
    fileStream.on('error', () => {
      if (!res.headersSent) {
        throw new BusinessException(ErrorEnum.FILE_DOWNLOAD_ERROR);
      }
    });
  }
}
