// 1、引入express
const express = require('express')
// 2、创建app对象(项目对象)
const app = express()
const path = require('path')
const fse = require("fs-extra");
const bodyParser = require("body-parser");

// 解析以 application/json 和 application/x-www-form-urlencoded 提交的数据
var jsonParser = bodyParser.json();
var urlencodedParser = bodyParser.urlencoded({ extended: false });

let multiparty = require('multiparty');
const cors = require('cors');
app.use(cors());

// 3、处理请求
app.get('/', (req, res) => {
  let data = []
  for (let i = 0; i < 100000; i++) {
    data.push({ id: i })
  }
  res.end(JSON.stringify(data))
})


// 文件切片上传
const UPLOAD_DIR = path.resolve(__dirname, ".", `upload`); // 切片存储目录
app.post('/upload', (req, res, next) => {
  const multipart = new multiparty.Form();
  multipart.parse(req, async (err, fields, files) => {
    if (err) {
      console.log('error', err)
      return;
    }
    const [file] = files.file;
    const [fileName] = fields.fileName;
    const [chunkName] = fields.chunkName;
    // 保存切片的文件夹的路径，比如  张远-嘉宾.flac-chunks
    const chunkDir = path.resolve(UPLOAD_DIR, `${fileName}-chunks`);
    // // 切片目录不存在，创建切片目录
    if (!fse.existsSync(chunkDir)) {
      await fse.mkdirs(chunkDir);
    }
    // 把切片移动到切片文件夹
    await fse.move(file.path, `${chunkDir}/${chunkName}`);
    res.end(
      JSON.stringify({
        code: 0,
        message: "切片上传成功"
      }));
  });
})

// 上传接收合并请求
app.post('/merge', jsonParser, async (req, res) => {

  // 创建写流数据
  const pipeStream = (path, writeStream) => {
    console.log('path', path)
    return new Promise(resolve => {
      const readStream = fse.createReadStream(path);
      readStream.on("end", () => {
        fse.unlinkSync(path);
        resolve();
      });
      readStream.pipe(writeStream);
    });
  }

  // 合并切片
  const mergeFileChunk = async (filePath, fileName, size) => {
    // filePath：你将切片合并到哪里，的路径
    const chunkDir = path.resolve(UPLOAD_DIR, `${fileName}-chunks`);
    let chunkPaths = null
    // 获取切片文件夹里所有切片，返回一个数组
    chunkPaths = await fse.readdir(chunkDir);
    // 根据切片下标进行排序
    // 否则直接读取目录的获得的顺序可能会错乱
    chunkPaths.sort((a, b) => a.split("-")[1] - b.split("-")[1]);
    const arr = chunkPaths.map((chunkPath, index) => {
      return pipeStream(
        path.resolve(chunkDir, chunkPath),
        // 指定位置创建可写流
        fse.createWriteStream(filePath, {
          start: index * size,
          end: (index + 1) * size
        })
      )
    })
    await Promise.all(arr)
    // 合并完成之后删除原来的切片目录
    fse.rmdirSync(chunkDir); // 合并后删除保存切片的目录
  };

  const { fileName, size } = req.body;
  console.log(fileName, size);
  const filePath = path.resolve(UPLOAD_DIR, fileName);
  await mergeFileChunk(filePath, fileName, size);
  res.end(
    JSON.stringify({
      code: 0,
      message: "文件合并成功"
    })
  );
})

app.post('/verify', jsonParser, async (req, res) => {
  // 返回已经上传切片名列表
  const createUploadedList = async fileName =>
    fse.existsSync(path.resolve(UPLOAD_DIR, fileName))
      ? await fse.readdir(path.resolve(UPLOAD_DIR, fileName))
      : [];

  const { fileName } = req.body;
  const filePath = path.resolve(UPLOAD_DIR, fileName);
  console.log(await createUploadedList(`${fileName}-chunks`))
  if (fse.existsSync(filePath)) {
    res.end(
      JSON.stringify({
        shouldUpload: false
      })
    );
  } else {
    res.end(
      JSON.stringify({
        shouldUpload: true,
        uploadedList: await createUploadedList(`${fileName}-chunks`)
      })
    );
  }
})

// 4、监听是否有请求
app.listen(3001, () => {
  console.log(`监听3001端口，服务启动！`);
})
