import { Controller } from 'egg';
import { mkdirsSync, del } from '../public/common';
import { streamMerge } from 'split-chunk-merge';

import path = require('path');
import fs = require('fs');

const uploadPath = path.join(__dirname, '../../uploads');

export default class HomeController extends Controller {

  public async index() {
    const { ctx } = this;
    ctx.body = await ctx.service.test.sayHi('egg');
  }

  // 上传前检测
  public async hashCheck() {
    const { ctx } = this;
    const { total, chunkSize, hash, name } = ctx.request.body;
    // 上传的文件哈希文件夹加名
    const chunksPath = path.join(uploadPath, hash + '-' + chunkSize, '/');
    const filePath = path.join(uploadPath, name);
    if (fs.existsSync(filePath)) {
      // 文件已存在
      ctx.status = 200;
      ctx.body = {
        success: true,
        msg: '检查成功，文件在服务器上已存在，不需要重复上传',
        data: {
          type: 0, // type=0 为文件已上传过
        },
      };
    } else {
      if (fs.existsSync(chunksPath)) {
        // 存在文件切片文件夹，上传没有上传完
        // 上次没有上传完成，找到以及上传的切片
        const index: any = [];
        const chunks = fs.readdirSync(chunksPath);

        if (chunks.length === Number(total)) {
          // 切片上传完了，没有合并
          ctx.status = 200;
          ctx.body = {
            success: true,
            msg: '切片上传完毕，没有合并',
            data: {
              type: 1, // type=1 切片上传完毕，没有合并
            },
          };
        } else {
          // 切片没有上传完
          chunks.forEach(item => {
            const chunksNameArr = item.split('-');
            index.push(chunksNameArr[chunksNameArr.length - 1]);
          });
          ctx.status = 200;
          ctx.body = {
            success: true,
            msg: '检查成功，需要断点续传',
            data: {
              type: 2, // type= 2 需要断点续传
              index,
            },
          };
        }
      } else {
        // 没有这个文件的切片和文件
        ctx.status = 200;
        ctx.body = {
          success: true,
          msg: '检查成功，为从未上传',
          data: {
            type: 3, // type=3 为从未上传
          },
        };
      }
    }
  }

  // 保存切片
  public async chunksUpload() {
    const { ctx } = this;
    const { /* name, total, */ index, /* size, */ chunkSize, hash } = ctx.request.body;
    const file = ctx.request.files[0];

    const chunksPath = path.join(uploadPath, hash + '-' + chunkSize, '/');

    if (!fs.existsSync(chunksPath)) mkdirsSync(chunksPath);
    // 创建读入流
    const readStream = fs.createReadStream(file.filepath);
    // 创建写入流
    const writeStream = fs.createWriteStream(chunksPath + hash + '-' + index);
    // 管道输送
    readStream.pipe(writeStream);
    readStream.on('end', () => {
      // 删除临时文件
      fs.unlinkSync(file.filepath);
    });
    ctx.status = 200;
    ctx.body = {
      success: true,
      msg: '上传成功',
      data: 200,
    };

  }


  // 合并切片
  public async chunksMerge() {
    const { ctx } = this;
    const { chunkSize, name, total, hash } = ctx.request.body;
    // 根据hash值，获取分片文件。
    const chunksPath = path.join(uploadPath, hash + '-' + chunkSize, '/');
    const filePath = path.join(uploadPath, name);
    // 读取所有的chunks 文件名存放在数组中, 并进行排序
    const chunks = fs.readdirSync(chunksPath).sort((a: any, b: any) => (
      a.split('-')[1] - b.split('-')[1]
    ));
    const chunksPathList: any = [];
    if (chunks.length !== total || chunks.length === 0) {
      ctx.status = 200;
      ctx.body = {
        success: false,
        msg: '切片文件数量与请求不符合，无法合并',
        data: '',
      };
    }
    chunks.forEach((item: string) => {
      chunksPathList.push(path.join(chunksPath, item));
    });

    try {
      await streamMerge(chunksPathList, filePath, chunkSize);
      // 递归删除文件
      del(chunksPath);
      ctx.status = 200;
      ctx.body = {
        success: true,
        msg: '合并成功',
        data: '',
      };
    } catch {
      ctx.status = 200;
      ctx.body = {
        success: false,
        msg: '合并失败，请重试',
        data: '',
      };
    }
  }
}

