import { Injectable } from '@nestjs/common';
import crypto from 'crypto';
import { NextFunction, Request, Response } from 'express';
import fs from 'fs-extra';
import path from 'path';
import { CHUNK_SIZE } from './const';

const PUBLIC_DIR = path.resolve(__dirname, '../', 'public');
const TEMP_DIR = path.resolve(__dirname, '../', 'temp');

async function mergeChunks(filename) {
  const mergedFilePath = path.resolve(PUBLIC_DIR, filename);
  const chunkDir = path.resolve(TEMP_DIR, filename);
  const chunkFiles = await fs.readdir(chunkDir);

  // 对分片按索引进行升序排列
  chunkFiles.sort((a, b) => Number(a.split('-')[1]) - Number(b.split('-')[1]));
  try {
    // 分片并发写入
    const pipes = chunkFiles.map((chunkFile, index) => {
      return pipeStream(
        fs.createReadStream(path.resolve(chunkDir, chunkFile), {
          autoClose: true,
        }),
        fs.createWriteStream(mergedFilePath, { start: index * CHUNK_SIZE }),
      );
    });

    await Promise.all(pipes);
    // 删除分片的文件和文件夹
    await fs.rmdir(chunkDir, { recursive: true });

    await checkFileOk(mergedFilePath, filename);
  } catch (error) {
    return error;
  }
}

function checkFileOk(mergedFilePath, filename) {
  // 校验文件完整性
  return new Promise((resolve, reject) => {
    const hash = crypto.createHash('SHA256');
    const stream = fs.createReadStream(mergedFilePath);

    stream.on('data', (data) => {
      hash.update(data);
    });
    stream.on('end', () => {
      const hashValue = hash.digest('hex');
      const fileHash = filename.split('.')[0];
      if (fileHash === hashValue) {
        resolve('文件不完整');
      } else {
        reject();
      }
    });
  });
}

// 创建管道流，从req中读取数据，写入ws
async function pipeStream(req, ws) {
  return new Promise((resolve, reject) => {
    req.pipe(ws).on('finish', resolve).on('error', reject);
  });
}

@Injectable()
export class AppService {
  async handleUpload(req: Request, res: Response, next: NextFunction) {
    const { params, query } = req;
    const { filename } = params;

    const { chunkFileName, start } = query;

    // 文件从s开始写入，用于断点续传
    const s = isNaN(+start) ? 0 : parseInt(start as string, 10);

    // 临时保存分片的目录
    const chunkDir = path.resolve(TEMP_DIR, filename);

    // 当前上传的分片文件
    const chunkFilePath = path.resolve(chunkDir, chunkFileName as string);

    // 不存在则创建目录
    await fs.ensureDir(chunkDir);
    const ws = fs.createWriteStream(chunkFilePath, { start: s, flags: 'a' });
    req.on('aborted', () => {
      ws.close();
    });
    try {
      await pipeStream(req, ws);
      res.json({ success: true });
    } catch (error) {
      next(error);
    }
  }
  async handleMergeChunks(req: Request, res: Response, next: NextFunction) {
    const { filename } = req.params;
    try {
      const error = await mergeChunks(filename);
      if (error) {
        next(error);
      } else {
        res.json({ success: true });
      }
    } catch (error) {
      res.json({ success: false, error: error });
    }
  }

  verify() {}
}
