const path = require('path')
const fs = require('fs')
const archiver = require('archiver')
const crypto = require('crypto')
const { Op, literal, Transaction } = require('sequelize')
const Mysequelize = require('@/models')
const R = require('@/models/Response')
const {
  Source,
  BinSource,
  User,
  Share,
  ShareTo,
  Department,
  SourceAuth
} = require('@/models/model')
const WpFile = require('@/models/fileModel')
const UserOption = require('@/models/userOptionModel')
const { ERROR_SOURCE_CODE } = require('@/constants/errorCode')
const { SEPARATE_CHAR } = require('@/constants/index')
const { fileTypeList } = require('@/constants/fileTypes')
const { generatId } = require('@/utils/idUtil')
const { dumpCheck } = require('@/utils/dumpCheck')
const { formatFilename } = require('@/utils/format')
const { getFileTypeBySuffix } = require('@/utils/fileType')
const { LOG_TYPE } = require('@/constants/log')
const { getUploadPath } = require('@/utils/upload')
const { logRecord } = require('@/utils/logRecord')
const { SystemLog, StorageOrigin } = require('@/framework/mongoose')
const parser = require('ua-parser-js')
const { MD5, genSalt } = require('@/utils/md5')
const {
  getUserSourcesAuths,
  getUserSourceRole
} = require('@/middlewares/authMiddlewares/verifySourceAuth')

// Source.belongsTo(Source, { foreignKey: 'parentId', as: 'parent' })

const commonSourceAttr = {
  include: [
    [
      Mysequelize.literal(
        '(SELECT COUNT(*) FROM source AS sc WHERE sc.parent_id = source.id and sc.is_dir = 0 and sc.delete_flag = 0)'
      ),
      'hasFile'
    ],
    [
      Mysequelize.literal(
        '(SELECT COUNT(*) FROM source AS sc WHERE sc.parent_id = source.id and sc.is_dir = 1 and sc.delete_flag = 0)'
      ),
      'hasFolder'
    ],
    [
      Mysequelize.literal(
        `CASE WHEN EXISTS (SELECT 1 FROM source as sc WHERE sc.parent_id = source.id and sc.delete_flag=0) THEN 0 ELSE 1 END`
      ),
      'isLeaf'
    ]
  ]
}
const commonSourceIncludes = [
  {
    model: User,
    as: 'user',
    attributes: ['id', 'username', 'avator']
  },
  {
    model: WpFile,
    as: 'file',
    attributes: ['id', 'filename', 'size']
  },
  {
    model: Share,
    // attributes: ['id', 'title', 'isShareTo', 'isLink'],
    include: [
      {
        model: ShareTo
      }
    ]
  },
  {
    model: SourceAuth,
    include: [{ model: User }, { model: Department }]
  }
]

module.exports.sourceAttr = commonSourceAttr
module.exports.sourceInclude = commonSourceIncludes

/**
 * 查询用户文件列表接口
 * @param {*} ctx
 */
module.exports.listSource = async (ctx) => {
  // 到达这里，说明查询参数没有问题
  const { page, pageSize, filename, isDir, deleteFlag, parentId } = ctx.newQuery
  const start = (page - 1) * pageSize

  const whereCondition = {
    parentId: BigInt(parentId),
    deleteFlag: 0
  }

  if (filename !== undefined) {
    whereCondition.filename = {
      [Op.like]: `%${filename}%`
    }
  }

  if (isDir !== undefined) {
    whereCondition.isDir = isDir
  }

  if (deleteFlag !== undefined) {
    whereCondition.deleteFlag = deleteFlag
  }

  // 查询一份自己的信息，parentId对应的信息
  let curSource = await Source.findByPk(BigInt(parentId), {
    attributes: commonSourceAttr,
    include: commonSourceIncludes
  })

  if (!curSource) {
    ctx.body = ctx.body = R.success({
      page,
      pageSize,
      items: [],
      total: 0,
      curSource,
      pages: 0
    })
    return
  }

  const parentNames = curSource.parentLevel
    .split(SEPARATE_CHAR)
    .filter((item) => item !== '-1')
    .concat([curSource.id])
    .map((item) => BigInt(item))

  // 查询出相关的信息
  const ppSources = await Source.findAll({
    where: {
      id: {
        [Op.in]: parentNames
      }
    },
    attribute: ['id', 'filename'],
    order: [
      Mysequelize.literal(`FIELD(id, ${parentNames.join(SEPARATE_CHAR)})`) // 按指定 ID 数组顺序排序
    ]
  })

  const curPathInfo = ppSources.map((item) => {
    return {
      id: item.id,
      filename: item.filename
    }
  })

  curSource = curSource.toJSON()
  curSource.pathInfo = curPathInfo

  let { rows, count } = await Source.findAndCountAll({
    where: whereCondition,
    attributes: commonSourceAttr,
    include: commonSourceIncludes,
    order: [['updateTime', 'DESC']],
    limit: +pageSize,
    offset: +start
  })

  // 对rows的数据进行修改，添加pathInfo信息

  rows = rows.map((item) => {
    let res = item.toJSON()
    return {
      ...res,
      pathInfo: curPathInfo.concat([
        {
          id: item.id,
          filename: item.filename
        }
      ])
    }
  })

  ctx.body = R.success({
    page,
    pageSize,
    items: rows,
    total: count,
    curSource,
    pages: Math.ceil(count / pageSize)
  })
}

/**
 * 根据sourceId查询用户文件列表接口
 * @param {*} ctx
 */
module.exports.listSourceById = async (ctx) => {
  // 到达这里，说明查询参数没有问题
  const { sourceId } = ctx.newQuery

  const myCommonSourceIncludes = [
    {
      model: User,
      as: 'user',
      attributes: ['id', 'username', 'avator']
    },
    {
      model: WpFile,
      as: 'file',
      attributes: ['id', 'filename', 'size']
    },
    {
      model: Share,
      // attributes: ['id', 'title', 'isShareTo', 'isLink'],
      include: [
        {
          model: ShareTo,
          include: [
            {
              model: User
            },
            {
              model: Department
            }
          ]
        }
      ]
    }
  ]

  let curSource = await Source.findByPk(BigInt(sourceId), {
    attributes: commonSourceAttr,
    include: myCommonSourceIncludes
  })

  if (!curSource) {
    ctx.body = R.error(ERROR_SOURCE_CODE.SOURCE_ID_ERROR)
    return
  }

  const parentNames = curSource.parentLevel
    .split(SEPARATE_CHAR)
    .filter((item) => item !== '-1')
    .concat([curSource.id])
    .map((item) => BigInt(item))

  // 查询出相关的信息
  const ppSources = await Source.findAll({
    where: {
      id: {
        [Op.in]: parentNames
      }
    },
    attribute: ['id', 'filename'],
    order: [
      Mysequelize.literal(`FIELD(id, ${parentNames.join(SEPARATE_CHAR)})`) // 按指定 ID 数组顺序排序
    ]
  })

  const curPathInfo = ppSources.map((item) => {
    return {
      id: item.id,
      filename: item.filename
    }
  })

  curSource = curSource.toJSON()
  curSource.pathInfo = curPathInfo

  // 查询一下当前文件下容量信息
  if (curSource.isDir) {
    const childs = await Source.findAll({
      where: {
        isDir: 0,
        parentLevel: {
          [Op.like]: `${curSource.parentLevel + SEPARATE_CHAR + curSource.id}%`
        },
        deleteFlag: 0
      },
      include: [
        {
          model: WpFile,
          as: 'file',
          attributes: ['id', 'filename', 'size']
        }
      ]
    })

    const size = childs.reduce((pre, next) => {
      if (next.file) {
        pre += BigInt(next.file.size)
      }
      return pre
    }, 0n)
    curSource.size = size.toString()
  }

  ctx.body = R.success(curSource)
}

/**
 * 根据sourceId查询用户文件日志接口
 * @param {*} ctx
 */
module.exports.listSourceLogById = async (ctx) => {
  const { sourceId, page, pageSize } = ctx.newQuery

  const reg = new RegExp(sourceId)

  const typeReg = /^source/
  const whereCondition = {
    sourceIds: reg,
    type: typeReg
  }

  const total = await SystemLog.countDocuments(whereCondition) // 获取总记录数
  const pages = Math.ceil(total / pageSize) // 计算总页数
  let logs = await SystemLog.find(whereCondition)
    .skip((page - 1) * pageSize) // 计算偏移量
    .limit(pageSize)
    .sort({ createTime: -1 })
    .lean() // 设置每页数量

  const myUserIds = logs.map((item) => item.userId).filter((v) => !!v)

  const users = await User.findAll({
    attributes: ['id', 'username', 'avator'],
    where: {
      id: {
        [Op.in]: myUserIds
      }
    }
  })

  logs = logs.map((item) => {
    return {
      ...item,
      uaParse: parser(item.ua),
      id: item._id,
      user: users.find((item2) => item2.id === item.userId) || undefined
    }
  })

  // 根据userId查询出用户信息

  ctx.body = R.success({
    page,
    pageSize,
    total,
    pages,
    items: logs
  })
}

/**
 * 根据文件类型来查询个人空间下的所有文件
 */
module.exports.listFileTypeSource = async (ctx) => {
  // 到达这里，说明查询参数没有问题
  const { page, pageSize, fileType, filename } = ctx.newQuery
  const start = (page - 1) * pageSize

  // 根据fileType可能的类型来进行处理
  const type = fileTypeList.find((item) => item.name === fileType)

  // 将当前的类型也作为一个curSource?

  const whereCondition = {
    type: {
      [Op.in]: type.type
    },
    deleteFlag: 0,
    isDir: 0,
    targetType: 1,
    targetId: BigInt(ctx.user.id)
  }

  if (filename !== undefined) {
    whereCondition.filename = {
      [Op.like]: `%${filename}%`
    }
  }

  let { rows, count } = await Source.findAndCountAll({
    where: whereCondition,
    attributes: commonSourceAttr,
    include: commonSourceIncludes,
    order: [['updateTime', 'DESC']],
    distinct: true,
    limit: +pageSize,
    offset: +start
  })

  // 对rows进行处理,添加pathInfo信息

  let finaRows = []
  for (let i = 0; i < rows.length; i++) {
    let res = rows[i].toJSON()
    // 查询parent_level对应的信息
    const psources = await Source.findAll({
      where: {
        id: {
          [Op.in]: rows[i].parentLevel.split(SEPARATE_CHAR)
        }
      },
      attribute: ['id', 'filename'],
      order: [
        Mysequelize.literal(`FIELD(id, ${rows[i].parentLevel})`) // 按指定 ID 数组顺序排序
      ]
    })
    res.pathInfo = psources
      .map((v) => ({
        id: v.id,
        filename: v.filename
      }))
      .concat({
        id: rows[i].id,
        filename: rows[i].filename
      })
    finaRows.push(res)
  }

  ctx.body = R.success({
    page,
    pageSize,
    items: finaRows,
    total: count,
    pages: Math.ceil(count / pageSize)
  })
}

/**
 * 查询回收站里面的文件
 * 1. 操作者需要是本人，createUser
 */
module.exports.listBinSource = async (ctx) => {
  // 到达这里，说明查询参数没有问题
  const { page, pageSize, filename } = ctx.newQuery
  const start = (page - 1) * pageSize

  let sourceWhere = {}
  if (filename) {
    sourceWhere.filename = {
      [Op.like]: `%${filename}%`
    }
  } else {
    sourceWhere = undefined
  }

  const commonIncludes = [
    {
      model: User,
      as: 'user',
      attributes: ['id', 'username', 'avator']
    },
    {
      model: Source,
      as: 'source',
      attributes: commonSourceAttr,
      include: commonSourceIncludes,
      where: sourceWhere,
      required: true
    }
  ]

  const whereCondition = {
    createUser: BigInt(ctx.user.id)
  }

  let { rows, count } = await BinSource.findAndCountAll({
    where: whereCondition,
    attributes: ['id', 'createUser', 'parentLevel', 'updateTime'],
    include: commonIncludes,
    order: [['updateTime', 'DESC']],
    distinct: true,
    limit: +pageSize,
    offset: +start
  })

  // 对rows进行处理,添加pathInfo信息

  let finaRows = []
  for (let i = 0; i < rows.length; i++) {
    let res = rows[i].toJSON()

    // 查询parent_level对应的信息
    const psources = await Source.findAll({
      where: {
        id: {
          [Op.in]: rows[i].parentLevel.split(SEPARATE_CHAR)
        }
      },
      attribute: ['id', 'filename'],
      order: [
        Mysequelize.literal(`FIELD(id, ${rows[i].parentLevel})`) // 按指定 ID 数组顺序排序
      ]
    })
    res = {
      ...res.source,
      binSourceId: res.id,
      createUser: res.user
    }
    res.pathInfo = psources
      .map((v) => ({
        id: v.id,
        filename: v.filename
      }))
      .concat({
        id: rows[i].source.id,
        filename: rows[i].source.filename
      })
    finaRows.push(res)
  }

  ctx.body = R.success({
    page,
    pageSize,
    items: finaRows,
    total: count,
    pages: Math.ceil(count / pageSize)
  })
}

/**
 * 用户创建文件夹接口
 * @param {*} ctx
 */
module.exports.createDir = async (ctx) => {
  // 走到这里，说明参数正常，直接创建即可
  // 创建流程：继承pSource的targetId和targetType
  const { dirName, parentId } = ctx.request.body
  const t = await Mysequelize.transaction()
  try {
    const id = generatId()
    const pSource = await Source.findByPk(BigInt(parentId), {
      attributes: ['id', 'filename', 'targetType', 'targetId', 'parentLevel']
    })
    await Source.create(
      {
        id,
        targetType: pSource.targetType,
        targetId: pSource.targetId,
        filename: dirName,
        parentId: parentId,
        isDir: 1,
        fileId: -1,
        type: 0,
        sizeDesc: '--',
        deleteFlag: 0,
        parentLevel: pSource.parentLevel + SEPARATE_CHAR + parentId,
        createUser: ctx.user.id,
        updateUser: ctx.user.id
      },
      {
        transaction: t
      }
    )
    await t.commit()
    ctx.logs = [
      {
        type: LOG_TYPE.SOURCE_MAKEIDR,
        curSource: {
          id,
          filename: dirName,
          isDir: 1,
          parentId
        },
        pSource: pSource.toJSON(),
        sourceIds: [id, pSource.id].join(SEPARATE_CHAR)
      }
    ]
    ctx.body = R.success()
  } catch (err) {
    await t.rollback()
    ctx.body = R.error(ERROR_SOURCE_CODE.CREATE_DIR_ERROR)
  }
}

/**
 * 用户新建文件接口
 */
module.exports.createFile = async (ctx) => {
  const { filename, parentId, type } = ctx.request.body
  const t = await Mysequelize.transaction()
  const suffix = filename.slice(((filename.lastIndexOf('.') - 1) >>> 0) + 2)
  // 根据filename生成type
  try {
    const id = generatId()
    const pSource = await Source.findByPk(BigInt(parentId), {
      attributes: ['id', 'filename', 'targetType', 'targetId', 'parentLevel']
    })

    // TODO需要在当前存储源的文件目录下创建同名文件
    const storageOrigin = await StorageOrigin.findOne({
      isDefault: 1
    })

    const salt = genSalt()
    const newFilename = await MD5(filename, salt)

    function existsSync(path) {
      try {
        fs.statSync(path)
        return true
      } catch {
        return false
      }
    }
    function _createDir(path) {
      if (!existsSync(path)) {
        fs.mkdirSync(path, { recursive: true })
      }
    }

    _createDir(path.join(storageOrigin.path, './upload/file/empty'))

    // 创建一个空文件
    const filePath = path.join(
      storageOrigin.path,
      './upload/file/empty',
      newFilename + '.' + suffix
    )
    fs.writeFileSync(filePath, '')

    // 计算文件的哈希值
    const hash = crypto.createHash('md5')
    const input = fs.createReadStream(filePath)

    const res = await new Promise((resolve, reject) => {
      input.on('readable', () => {
        const data = input.read()
        if (data) {
          hash.update(data)
        } else {
          const fileHash = hash.digest('hex')
          // console.log('哈希值:', fileHash)
          resolve(fileHash)
        }
      })
    })

    const fileNextId = generatId()
    await WpFile.create(
      {
        id: fileNextId,
        filename,
        size: 0,
        suffix,
        path: '/empty/' + newFilename + '.' + suffix,
        storageOriginId: storageOrigin._id.toString(),
        identifier: res,
        createUser: ctx.user.id,
        updateUser: ctx.user.id
      },
      {
        transaction: t
      }
    )

    await Source.create(
      {
        id,
        targetType: pSource.targetType,
        targetId: pSource.targetId,
        filename: filename,
        parentId: parentId,
        isDir: 0,
        fileId: fileNextId,
        type: getFileTypeBySuffix(filename).type,
        sizeDesc: '0',
        deleteFlag: 0,
        parentLevel: pSource.parentLevel + SEPARATE_CHAR + parentId,
        createUser: ctx.user.id,
        updateUser: ctx.user.id
      },
      {
        transaction: t
      }
    )

    await t.commit()
    ctx.logs = [
      {
        type: LOG_TYPE.SOURCE_MAKEIDR,
        curSource: {
          id,
          filename,
          isDir: 0,
          parentId
        },
        pSource: pSource.toJSON(),
        sourceIds: [id, pSource.id].join(SEPARATE_CHAR)
      }
    ]
    ctx.body = R.success()
  } catch (err) {
    console.log(err)
    await t.rollback()
    ctx.body = R.error(ERROR_SOURCE_CODE.CREATE_FILE_ERROR)
  }
}

/**
 * 用户文件重命名接口
 * @param {*} ctx
 */
module.exports.rename = async (ctx) => {
  // 走到这里，说明参数没有问题，可以直接更新文件名
  const { filename, sourceId } = ctx.request.body
  const t = await Mysequelize.transaction()
  // 更新一下文件类型，这里针对的是文件类型
  const source = await Source.findByPk(BigInt(sourceId))
  let updateObj = {
    filename
  }
  if (!source.isDir) {
    updateObj.type = getFileTypeBySuffix(filename).type
  }
  try {
    await Source.update(updateObj, {
      where: {
        id: BigInt(sourceId)
      },
      transaction: t
    })
    await t.commit()
    ctx.logs = [
      {
        type: LOG_TYPE.SOURCE_RENAME,
        curSource: {
          id: sourceId,
          filename,
          isDir: source.isDir,
          parentId: source.parentId
        },
        sourceIds: [sourceId].join(SEPARATE_CHAR)
      }
    ]
    ctx.body = R.success()
  } catch {
    await t.rollback()
    ctx.body = R.error(ERROR_SOURCE_CODE.RENAME_ERROR)
  }
}

/**
 * 剪切或者复制时遇到重名文件的处理
 * @param {*} source
 * @param {*} pSource
 * @param {*} pSourceChildrens
 */
module.exports.handleDumpSource = async (
  source,
  pSource,
  pSourceChildrens,
  isCreate,
  userId,
  t
) => {
  // 判断是否重名
  const dumpTarget = pSourceChildrens.find(
    (item) => item.filename === source.filename
  )

  const userSetting = await UserOption.findOne({
    where: {
      userId: BigInt(userId),
      key: 'repeatFile'
    }
  })

  const type =
    userSetting && userSetting.value !== undefined ? +userSetting.value : 2

  const res = !dumpTarget

  if (res) {
    // 没有重名
    if (isCreate) {
      await levelCreateSource(source, pSource, userId, t)
    } else {
      await levelUpdateSource(source, pSource, userId, t)
    }
  } else {
    if (type === 1) {
      // 重名覆盖
      // 对应重名的情况，且为覆盖的情况，这种最特殊
      // source是否文件夹
      if (source.isDir && dumpTarget.isDir) {
        // 本级别不进行处理，查看子级
        const children1 = await Source.findAll({
          where: {
            parentId: BigInt(source.id),
            deleteFlag: 0
          }
        })

        const children2 = await Source.findAll({
          where: {
            parentId: BigInt(dumpTarget.id),
            deleteFlag: 0
          }
        })

        for (let i = 0; i < children1.length; i++) {
          await this.handleDumpSource(
            children1[i],
            dumpTarget,
            children2,
            isCreate,
            userId,
            t
          )
        }
      } else {
        // 双方都是file，一方file，一方folder
        // source覆盖pSource下的重名文件
        // 参考可道云的话，一方file，一方folder不会进行处理
        // 删除pSource下的重名文件，添加本source
        await Source.destroy({
          where: {
            id: BigInt(dumpTarget.id)
          },
          transaction: t
        })

        // 删除后代
        await Source.destroy({
          where: {
            parentLevel: {
              [Op.like]:
                dumpTarget.parentLevel + SEPARATE_CHAR + dumpTarget.id + '%'
            }
          },
          transaction: t
        })

        // 添加本source
        await levelCreateSource(source, pSource, userId, t)
      }

      if (!isCreate) {
        // 干掉本身及其后代
        await Source.destroy({
          where: {
            id: BigInt(source.id)
          },
          transaction: t
        })

        await Source.destroy({
          where: {
            parentLevel: {
              [Op.like]: source.parentLevel + SEPARATE_CHAR + source.id
            }
          },
          transaction: t
        })

        // 干掉回收站记录
        await BinSource.destroy({
          where: {
            sourceId: BigInt(source.id)
          }
        })
      }

      // 对于移动操作，需要将之前的记录清除
    } else if (type === 2) {
      // 重名重命名
      // 创建一个新的名字
      const one = await UserOption.findOne({
        where: {
          key: 'renameFormat',
          userId: userId
        }
      })
      let newName = ''
      // 重复了，解决冲突，使用文件夹递增的方式解决，根据用户配置来进行重名文件的处理
      if (!one) {
        newName = formatFilename(source.filename)
      } else {
        if (+one.value === 1) {
          newName = await formatFilename(source.filename, true)(
            pSource.id,
            source.filename
          )
        }
      }
      source.filename = newName
      // 直接在目标pSource下创建
      if (isCreate) {
        await levelCreateSource(source, pSource, userId, t)
      } else {
        await levelUpdateSource(source, pSource, userId, t)
      }
    } else if (type === 0) {
      // 跳过，复制则不用操作，剪切的话就是直接删除了
      if (!isCreate) {
        // 干掉本身及其后代
        await Source.destroy({
          where: {
            id: BigInt(source.id)
          },
          transaction: t
        })

        await Source.destroy({
          where: {
            parentLevel: {
              [Op.like]: source.parentLevel + SEPARATE_CHAR + source.id
            }
          },
          transaction: t
        })

        // 干掉回收站记录
        await BinSource.destroy({
          where: {
            sourceId: BigInt(source.id)
          }
        })
      }
    }
  }
}

/**
 * 层次更新资源方法
 * @param {*} rootSource 根source
 * @param {*} pSource 移动到的目标source
 * @param {*} userId 用户ID
 * @param {*} t 事务对象
 */
const levelUpdateSource = async (rootSource, pSource, userId, t) => {
  // 例如B移动到G
  // B:A--->ACG
  // D:AB---->ACGB
  // m:ABD---->ACGBD

  // 进行层次更新树
  // 1. 更新B，也就是根
  await Source.update(
    {
      targetType: pSource.targetType,
      targetId: pSource.targetId,
      filename: rootSource.filename,
      parentId: pSource.id,
      parentLevel: pSource.parentLevel + SEPARATE_CHAR + pSource.id,
      updateUser: userId
    },
    {
      where: {
        id: BigInt(rootSource.id)
      },
      transaction: t
    }
  )

  // 2. 更新后代的parentLevel
  const children = await Source.findAll({
    where: {
      parentLevel: {
        [Op.like]: rootSource.parentLevel + SEPARATE_CHAR + rootSource.id + '%'
      }
    }
  })

  const beforSplit = rootSource.parentLevel + SEPARATE_CHAR + rootSource.id
  const newPrefix =
    pSource.parentLevel +
    SEPARATE_CHAR +
    pSource.id +
    SEPARATE_CHAR +
    rootSource.id
  for (let i = 0; i < children.length; i++) {
    let item = children[i]

    // A,B,D  split A,B  ----->,D，得到后半部分不变的
    const staticPart = children[i].parentLevel.split(beforSplit)[1]

    // 新的替换
    const newParentLevel = newPrefix + staticPart
    await Source.update(
      {
        targetType: pSource.targetType,
        targetId: pSource.targetId,
        parentLevel: newParentLevel,
        updateUser: userId
      },
      {
        where: {
          id: BigInt(item.id)
        },
        transaction: t
      }
    )
  }
}

/**
 * 用户移动文件接口
 * @param {*} ctx
 */
module.exports.moveFile = async (ctx) => {
  // 到达这里，说明参数没有问题，可以进行移动了
  const t = await Mysequelize.transaction()
  const t2 = await Mysequelize.transaction({
    isolationLevel: Transaction.ISOLATION_LEVELS.READ_UNCOMMITTED
  })
  const { sourceIds, parentId } = ctx.request.body
  // 多文件同时移动，要么全部成功，要么全部失败
  const parentSource = await Source.findByPk(BigInt(parentId))
  ctx.logs = []
  try {
    for (let i = 0; i < sourceIds.length; i++) {
      const pSourceChildrens = await Source.findAll({
        where: {
          parentId: BigInt(parentId),
          deleteFlag: 0
        },
        transaction: t2
      })
      const source = await Source.findByPk(BigInt(sourceIds[i]))
      const pSource = await Source.findByPk(BigInt(source.parentId))
      await this.handleDumpSource(
        source,
        parentSource,
        pSourceChildrens,
        false,
        ctx.user.id,
        t
      )
      ctx.logs.push({
        type: LOG_TYPE.SOURCE_MOVE,
        curSource: source.toJSON(),
        toSource: parentSource.toJSON(),
        pSource: pSource.toJSON(),
        sourceIds: [source.id, parentSource.id, pSource.id].join(SEPARATE_CHAR)
      })
    }
    await t.commit()
    ctx.body = R.success()
  } catch (error) {
    console.log(error)
    await t.rollback()
    ctx.body = R.error(ERROR_SOURCE_CODE.MOVE_SOURCE_ERROR)
  }
}

const levelCreateSource = async (rootSource, pSource, userId, t) => {
  // 进行层次建立树
  if (!rootSource) {
    return []
  }

  rootSource = rootSource.toJSON()

  // 对rootSource进行处理
  rootSource = {
    ...rootSource,
    targetType: pSource.targetType,
    targetId: pSource.targetId,
    parentId: pSource.id,
    parentLevel: pSource.parentLevel + SEPARATE_CHAR + pSource.id,
    createUser: userId,
    updateUser: userId
  }

  const result = []
  const queue = [rootSource]
  while (queue.length > 0) {
    const source = queue.pop()
    // 复制该source
    const newId = generatId()
    const createObj = {
      id: newId,
      targetType: source.targetType,
      targetId: source.targetId,
      filename: source.filename,
      parentId: source.parentId,
      isDir: source.isDir,
      fileId: source.fileId,
      type: source.type,
      sizeDesc: source.sizeDesc,
      deleteFlag: source.deleteFlag,
      parentLevel: source.parentLevel,
      createUser: source.createUser,
      updateUser: source.updateUser
    }

    result.push(createObj)

    // 查询出当前source的children,没必要把在回收站的后代复制过去
    const children = await Source.findAll({
      where: {
        parentId: BigInt(source.id),
        deleteFlag: 0
      }
    })

    // 对children进行修改
    const newChildren = children.map((item) => {
      item = item.toJSON()
      return {
        ...item,
        parentId: newId,
        targetType: source.targetType,
        targetId: source.targetId,
        parentLevel: source.parentLevel + SEPARATE_CHAR + newId,
        createUser: source.createUser,
        updateUser: source.updateUser
      }
    })

    console.log(newChildren)

    queue.push(...newChildren)
  }

  await Source.bulkCreate(result, { transaction: t })
}

/**
 * 用户复制文件接口
 */
module.exports.copyFile = async (ctx) => {
  // 到达这里，说明参数没有问题，可以复制了
  // 复制时的重名文件统一处理，由用户的配置决定，跳过，覆盖，生成副本
  const t = await Mysequelize.transaction()
  const t2 = await Mysequelize.transaction({
    isolationLevel: Transaction.ISOLATION_LEVELS.READ_UNCOMMITTED
  })
  const { sourceIds, parentId } = ctx.request.body
  const parentSource = await Source.findByPk(BigInt(parentId))
  // 多文件同时复制，要么全部成功，要么全部失败
  ctx.logs = []
  try {
    for (let i = 0; i < sourceIds.length; i++) {
      const pSourceChildrens = await Source.findAll({
        where: {
          parentId: BigInt(parentId),
          deleteFlag: 0
        },
        transaction: t2
      })

      const source = await Source.findByPk(BigInt(sourceIds[i]))
      const pSource = await Source.findByPk(BigInt(source.parentId))
      await this.handleDumpSource(
        source,
        parentSource,
        pSourceChildrens,
        true,
        ctx.user.id,
        t
      )
      ctx.logs.push({
        type: LOG_TYPE.SOURCE_COPY,
        curSource: source.toJSON(),
        toSource: parentSource.toJSON(),
        pSource: pSource.toJSON(),
        sourceIds: [source.id, parentSource.id, pSource.id].join(SEPARATE_CHAR)
      })
    }
    await t.commit()
    ctx.body = R.success()
  } catch (error) {
    console.log(error)
    await t.rollback()
    ctx.body = R.error(ERROR_SOURCE_CODE.COPY_SOURCE_ERROR)
  }
}

/**
 * 根据传入的binSourceIds，获取父子依赖关系，取出第一层
 * @param {*} isAll
 * @param {*} binSourceIds
 */
const getNeedRecOrDelArr = async (userId, isAll, binSourceIds) => {
  binSourceIds = binSourceIds.map((v) => BigInt(v))
  let binSources = []
  if (+isAll === 1) {
    binSources = await BinSource.findAll({
      where: {
        createUser: BigInt(userId)
      },
      attributes: ['id', 'parentLevel', 'sourceId', 'targetId', 'targetType']
    })
  } else {
    binSources = await BinSource.findAll({
      where: {
        id: {
          [Op.in]: binSourceIds
        }
      },
      attributes: ['id', 'parentLevel', 'sourceId', 'targetId', 'targetType']
    })
  }

  // 进行父子依赖关系分析，还原第一层
  const needRecycleArr = binSources.filter((item) => {
    // 如何证明它是第一层？ 条件：没有其他的parentLevel为它的子集
    // 不存在说明该item的parentLevel不包含任何人，说明它是根
    return !binSources.some((v) => {
      // 是否存在一个数据，当前item，startwith其他人，如果startwith了，说明不是根
      return (
        item.parentLevel.startsWith(v.parentLevel) &&
        item.parentLevel !== v.parentLevel
      )
    })
  })

  return {
    binSources,
    needRecycleArr
  }
}

/**
 * 用户彻底删除文件接口
 * @param {} ctx
 */
module.exports.delFile = async (ctx) => {
  let { binSourceIds, isAll } = ctx.request.body
  // 递归删除这些用户-文件ID对应的记录
  // TODO 对应的文件记录是否要删除？否，其他用户有关联的，需要所有用户都没有关联才会删除，可以
  // 使用定时任务每天删除一下

  // 存储所有要删除的文件-记录ID
  // 防止出现重复删除，其实出现了也没有关系，但是数据库操作可能更耗时
  // a文件夹下存在b文件夹，b文件夹先放入回收站，a文件夹后放入，就有可能重复

  // 删除逻辑：自己删除，其后代也被删除，删除后，bin-source记录需要删除，后代bin-source记录也需要删除
  const { needRecycleArr, binSources } = await getNeedRecOrDelArr(
    ctx.user.id,
    isAll,
    binSourceIds
  )

  ctx.logs = []
  for (let i = 0; i < needRecycleArr.length; i++) {
    let item = needRecycleArr[i]
    // 获取source相关信息
    const aSource = await Source.findByPk(BigInt(item.sourceId))
    const pSource = await Source.findByPk(BigInt(aSource.parentId))
    ctx.logs.push({
      type: LOG_TYPE.SOURCE_DELETE,
      curSource: aSource.toJSON(),
      pSource: pSource.toJSON(),
      sourceIds: [aSource.id, pSource.id].join(SEPARATE_CHAR)
    })
    try {
      const t = await Mysequelize.transaction()
      // 可以进行删除
      await Source.destroy({
        where: {
          id: item.sourceId
        },
        transaction: t
      })

      // 将其后代也全部删除
      await Source.destroy({
        where: {
          parentLevel: {
            [Op.like]: item.parentLevel + SEPARATE_CHAR + item.sourceId + '%'
          }
        },
        transaction: t
      })

      // 要更新用户或者部门的容量,去除当前正在删除的
      const sourceUse = await Source.findAll({
        where: {
          targetId: item.targetId,
          id: {
            [Op.ne]: item.sourceId
          },
          parentLevel: {
            [Op.notLike]: item.parentLevel + SEPARATE_CHAR + item.sourceId + '%'
          },
          fileId: {
            [Op.ne]: '-1'
          }
        },
        include: [{ model: WpFile, as: 'file', required: true }]
      })

      const sourceSizeUse = sourceUse.reduce((pre, next) => {
        pre += BigInt(next.file.size)
        return pre
      }, BigInt(0))

      if (+item.targetType === 1) {
        await User.update(
          {
            sizeUse: sourceSizeUse.toString()
          },
          {
            where: {
              id: BigInt(item.targetId)
            },
            transaction: t
          }
        )
      } else if (+item.targetType === 2) {
        await Department.update(
          {
            sizeUse: sourceSizeUse.toString()
          },
          {
            where: {
              id: BigInt(item.targetId)
            },
            transaction: t
          }
        )
      }

      // 删除完后，bin-source记录需要清除,存在父子关系的bin-source也需要清除
      await BinSource.destroy({
        where: {
          id: item.id
        },
        transaction: t
      })

      const binSourceChild = binSources.filter((v1) =>
        v1.parentLevel.startsWith(
          item.parentLevel + SEPARATE_CHAR + item.sourceId
        )
      )

      const child = binSourceChild.map((v2) => BigInt(v2.id))

      await BinSource.destroy({
        where: {
          id: {
            [Op.in]: child
          }
        },
        transaction: t
      })

      // shareSource也要删除
      const allSourceId = [
        BigInt(item.sourceId),
        ...binSourceChild.map((item) => BigInt(item.sourceId))
      ]

      const shares = await Share.findAll({
        where: {
          sourceId: {
            [Op.in]: allSourceId
          }
        }
      })

      const shareIds = shares.map((item) => BigInt(item.id))

      // 对应的shareTo也需要删除
      await Share.destroy({
        where: {
          id: {
            [Op.in]: shareIds
          }
        },
        transaction: t
      })

      await ShareTo.destroy({
        where: {
          shareId: {
            [Op.in]: shareIds
          }
        },
        transaction: t
      })

      await t.commit()
    } catch (error) {
      await t.rollback()
    }
  }
  ctx.body = R.success()
}

/**
 * 文件放入回收站接口
 * @param {} ctx
 */
module.exports.binFile = async (ctx) => {
  // 到达这里，说明所有文件可以被放入回收站
  // 自身deleteFlag设置为1，其后代也要设置为1
  const t = await Mysequelize.transaction()
  let { sourceIds } = ctx.request.body
  sourceIds = sourceIds.map((item) => BigInt(item))

  const sources = await Source.findAll({
    where: {
      id: {
        [Op.in]: sourceIds
      }
    },
    attributes: [
      'id',
      'filename',
      'parentId',
      'isDir',
      'targetId',
      'targetType',
      'parentLevel'
    ]
  })

  // 删除的本身加入到回收站记录
  // 例如当前的source的parentLevel为-1,1,2
  // 那么它的后代，-1,1,2,x,x,x
  ctx.logs = []
  try {
    await Source.update(
      {
        deleteFlag: 1
      },
      {
        where: {
          id: {
            [Op.in]: sourceIds
          }
        },
        transaction: t
      }
    )

    for (let i = 0; i < sources.length; i++) {
      const pSource = await Source.findByPk(BigInt(sources[i].parentId))
      ctx.logs.push({
        type: LOG_TYPE.SOURCE_TOBIN,
        curSource: sources[i].toJSON(),
        pSource: pSource.toJSON(),
        sourceIds: [sources[i].id, pSource.id].join(SEPARATE_CHAR)
      })
      const nextId = generatId()
      await BinSource.create(
        {
          id: nextId,
          targetId: sources[i].targetId,
          targetType: sources[i].targetType,
          sourceId: sources[i].id,
          parentLevel: sources[i].parentLevel,
          createUser: ctx.user.id,
          updateUser: ctx.user.id
        },
        {
          transaction: t
        }
      )

      // 对应的后代
      await Source.update(
        {
          deleteFlag: 1
        },
        {
          where: {
            parentLevel: {
              [Op.like]:
                sources[i].parentLevel + SEPARATE_CHAR + sources[i].id + '%'
            }
          },
          transaction: t
        }
      )
    }

    await t.commit()
    ctx.body = R.success()
  } catch {
    await t.rollback()
    ctx.body = R.error(ERROR_SOURCE_CODE.BIN_FILE_ERROR)
  }
}

/**
 * 用户还原在回收站中的文件接口
 */
module.exports.recFile = async (ctx) => {
  let { isAll, binSourceIds } = ctx.request.body

  binSourceIds = binSourceIds.map((v) => BigInt(v))
  let binSources = []
  if (isAll) {
    binSources = await BinSource.findAll({
      where: {
        createUser: BigInt(ctx.user.id)
      },
      attributes: ['id', 'parentLevel', 'sourceId']
    })
  } else {
    binSources = await BinSource.findAll({
      where: {
        id: {
          [Op.in]: binSourceIds
        }
      },
      attributes: ['id', 'parentLevel', 'sourceId']
    })
  }

  // 进行父子依赖关系分析，还原第一层
  const needRecycleArr = binSources.filter((item) => {
    // 如何证明它是第一层？ 条件：没有其他的parentLevel为它的子集
    // 不存在说明该item的parentLevel不包含任何人，说明它是根
    return !binSources.some((v) => {
      // 是否存在一个数据，当前item，startwith其他人，如果startwith了，说明不是根
      return (
        item.parentLevel.startsWith(v.parentLevel) &&
        item.parentLevel !== v.parentLevel
      )
    })
  })

  // 将根进行还原，还原时需要注意其祖先是否在回收站
  // 我现在在还原的资源已经是当前用户能还原的根了，不排除其他用户把其祖先已经放入回收站的情况

  const t2 = await Mysequelize.transaction({
    isolationLevel: Transaction.ISOLATION_LEVELS.READ_UNCOMMITTED
  })
  ctx.logs = []
  for (let i = 0; i < needRecycleArr.length; i++) {
    let item = needRecycleArr[i]

    // 查询出为当前item的parentLevel的字串，也就是其父亲，自己肯定对应的上
    const str = 'LOCATE(`parent_level`, ' + `'${item.parentLevel}'` + ') > 0'
    const res = await BinSource.findOne({
      where: {
        parentLevel: {
          [Op.notLike]: item.parentLevel
        },
        [Op.and]: literal(str)
      }
    })
    if (res) {
      // 说明存在祖先,不能还原，跳过
    } else {
      const t = await Mysequelize.transaction()
      try {
        // 可以进行还原了,还原这个根时，名称可能会重复
        const curSource = await Source.findByPk(BigInt(item.sourceId))
        const pSource = await Source.findByPk(BigInt(curSource.parentId))

        const one = await Source.findOne({
          where: {
            parentId: BigInt(curSource.parentId),
            filename: curSource.filename,
            deleteFlag: 0
          },
          transaction: t2
        })
        if (one) {
          const one1 = await UserOption.findOne({
            where: {
              key: 'renameFormat',
              userId: ctx.user.id
            }
          })
          let newName = ''
          // 重复了，解决冲突，使用文件夹递增的方式解决，根据用户配置来进行重名文件的处理
          if (!one1) {
            newName = formatFilename(curSource.filename)
          } else {
            if (+one1.value === 1) {
              newName = await formatFilename(curSource.filename, true)(
                curSource.parentId,
                curSource.filename
              )
            }
          }
          curSource.filename = newName
        }
        await Source.update(
          {
            deleteFlag: 0,
            filename: curSource.filename
          },
          {
            where: {
              id: item.sourceId
            },
            transaction: t
          }
        )

        // 将其后代也全部设置为0
        await Source.update(
          {
            deleteFlag: 0
          },
          {
            where: {
              parentLevel: {
                [Op.like]:
                  item.parentLevel + SEPARATE_CHAR + item.sourceId + '%'
              }
            },
            transaction: t
          }
        )

        // 更新完后，bin-source记录需要清除,存在父子关系的bin-source也需要清除
        await BinSource.destroy({
          where: {
            id: item.id
          },
          transaction: t
        })

        // 这里的binSources是用户传入的，或者全选的，如果自己选了父亲，父亲还原，其后代也应该还原（记录删除）
        // const child = binSources
        //   .filter((v1) =>
        //     v1.parentLevel.startsWith(
        //       item.parentLevel + SEPARATE_CHAR + item.sourceId
        //     )
        //   )
        //   .map((v2) => BigInt(v2.id))

        await BinSource.destroy({
          where: {
            parentLevel: {
              [Op.like]: item.parentLevel + SEPARATE_CHAR + item.sourceId + '%'
            }
          },
          transaction: t
        })
        await t.commit()
        ctx.logs.push({
          type: LOG_TYPE.SOURCE_RECYCLE,
          curSource: curSource.toJSON(),
          pSource: pSource.toJSON(),
          sourceIds: [curSource.id, pSource.id].join(SEPARATE_CHAR)
        })
      } catch (error) {
        console.log(error)
        await t.rollback()
      }
    }
  }

  ctx.body = R.success()
  // 1. 对于传入的source列表，将source及其子source的deleteFlag改为0
  // 2. 如果还原到的目录也被删除？直接跳过改source，不进行处理
  // 全部还原，查询出binSourceIds对应的全部source，进行父子依赖分析，删除第一层，删除所有bin-source-id记录
  // 还原时，如果其祖先也在回收站中，则不能还原
  // 还原成功时，如果有后代记录在回收站表中，需要清除
}

/**
 * 文件打包下载接口
 * @param {} ctx
 */
module.exports.zipDownload = async (ctx) => {
  // 到达这里，说明文件ID没有问题，可以进行下载
  let { sourceIds } = ctx.request.body
  sourceIds = sourceIds.map((item) => BigInt(item))

  try {
    // 汇总sourceId及其子文件的对应的file_id对应的path,需要递归生成父子关系结构
    // {path:'src/a',name:'a',isDir},{path:'src/b',name:'a/b'}

    const fileInfoArr = []

    const deepRoot = async (root, pathArr = []) => {
      if (root.isDir) {
        // 查询出子文件
        const children = await Source.findAll({
          where: {
            parentId: BigInt(root.id),
            deleteFlag: 0
          },
          include: [
            {
              model: WpFile,
              as: 'file',
              attributes: ['path', 'storageOriginId']
            }
          ]
        })

        for (let i = 0; i < children.length; i++) {
          await deepRoot(children[i], pathArr.concat(children[i].filename))
        }
      }

      // 不管是不是文件夹，都需要将其push到fileInfoArr中
      let fileDir = '-1'
      if (root.file) {
        const { fileDir: res } = await getUploadPath(root.file.storageOriginId)
        fileDir = res
      }

      fileInfoArr.push({
        id: root.id,
        parentId: root.parentId,
        parentLevel: root.parentLevel,
        targetId: root.targetId,
        isDir: root.isDir,
        filename: root.filename,
        path: root.file ? root.file.path : '-1',
        name: pathArr.join('/'),
        fileDir
      })
    }

    for (let i = 0; i < sourceIds.length; i++) {
      // 使用层次遍历
      const rootSource = await Source.findByPk(BigInt(sourceIds[i]), {
        include: [
          {
            model: WpFile,
            as: 'file',
            attributes: ['path', 'storageOriginId']
          }
        ]
      })

      await deepRoot(rootSource, [rootSource.filename])
    }

    console.log(fileInfoArr)
    // 对fileInfoArr进行压缩处理

    await new Promise((resolve, reject) => {
      const output = fs.createWriteStream('archive.zip')
      const archive = archiver('zip', {
        zlib: { level: 9 } // 设置压缩级别
      })

      output.on('close', function () {
        console.log(archive.pointer() + ' total bytes')
        console.log(
          'archiver has been finalized and the output file descriptor has closed.'
        )
        resolve()
      })

      // output.on('finish', function () {})

      archive.pipe(output)

      // 添加目录和文件到 zip 文件中，保持层级关系
      for (let i = 0; i < fileInfoArr.length; i++) {
        const info = fileInfoArr[i]
        if (info.path === '-1') {
          archive.append(null, { name: info.name, type: 'directory' })
        } else {
          // 对应文件类型
          // console.log(path.join(fileDir, info.path))
          // const { fileDir } = await getUploadPath(info.storageOriginId)
          archive.append(
            fs.createReadStream(path.join(info.fileDir, info.path)),
            {
              name: info.name
            }
          ) // 添加 src/b/b.txt 文件到 a 目录下
        }
      }

      // archive.directory('xxx2', 'xxx2')
      // archive.append(
      //   fs.createReadStream(path.resolve(__dirname, './depController.js')),
      //   {
      //     name: 'xxx.js'
      //   }
      // )
      // archive.append(null, { name: '666', type: 'directory' })

      // archive.append(
      //   fs.createReadStream(path.resolve(__dirname, './depController.js')),
      //   {
      //     name: 'xxx2.js'
      //   }
      // )

      archive.finalize()
    })

    console.log('111')
    // ctx.attachment('archive.zip')

    ctx.set('Access-Control-Expose-Headers', 'content-disposition')
    ctx.set(
      'Content-disposition',
      'attachment;filename=' + encodeURIComponent('xxx.zip')
    )
    // ctx.set('Content-Type', 'application/zip')

    // ctx.body = R.success({
    //   fileInfoArr,
    //   url: fs.createReadStream('archive.zip')
    // })

    const file = fs.readFileSync('archive.zip')
    console.log(file)
    ctx.logs = [
      {
        type: LOG_TYPE.SOURCE_DIR_DOWNLOAD,
        sources: fileInfoArr,
        sourceIds: fileInfoArr.map((item) => item.id).join(SEPARATE_CHAR)
      }
    ]
    for (let i = 0; i < ctx.logs.length; i++) {
      await logRecord(ctx, ctx.logs[i].type, ctx.logs[i])
    }
    ctx.body = file
  } catch (e) {
    console.log(e)
    ctx.body = R.error(ERROR_SOURCE_CODE.DOWNLOAD_SOURCE_ERROR)
  }
}

/**
 * 单文件下载接口
 * @param {} ctx
 */
module.exports.fileDownload = async (ctx) => {
  // await
  // 到达这里，说明文件ID没有问题，可以进行下载
  let { sourceId } = ctx.request.body

  try {
    // 找到这个文件对应的file的物理路径,进行文件读取
    const source = await Source.findByPk(sourceId, {
      include: [
        {
          model: WpFile,
          as: 'file',
          attributes: ['id', 'filename', 'size', 'path', 'storageOriginId']
        }
      ]
    })

    const { fileDir } = await getUploadPath(source.file.storageOriginId)

    // 获取父亲
    const pSource = await Source.findByPk(BigInt(source.parentId))
    // 根据当前文件的存储源和path路径进行文件读取，返回流

    /*
     * 返回二进制数据(buffer字节流)
     * readFileSync:第一个参数：文件路径。第二个参数：编码方式（可省略，省略后读出的文件就是buffer数据格式）
     */
    console.log(source.file.path)
    console.log(path.join(fileDir, source.file.path))
    const file = fs.readFileSync(path.join(fileDir, source.file.path))
    console.log('readFileSync', file)

    /*
     * 返回readStream（文件读取流，输入流）对象，
     * const stream = fs.createReadStream(filePath)
     * console.log('stram',stream)
     */
    const logs = [
      {
        type: LOG_TYPE.SOURCE_DOWNLOAD,
        curSource: source.toJSON(),
        pSource: pSource.toJSON(),
        sourceIds: [source.id, pSource.id]
      }
    ]

    for (let i = 0; i < logs.length; i++) {
      console.log(logs[i])
      await logRecord(ctx, logs[i].type, logs[i])
    }

    console.log('file', file)
    ctx.set('Access-Control-Expose-Headers', 'content-disposition')
    ctx.set(
      'Content-disposition',
      'attachment;filename=' + encodeURIComponent(source.filename)
    )

    ctx.body = file
  } catch (err) {
    console.log(err)
    ctx.body = R.error(ERROR_SOURCE_CODE.DOWNLOAD_SOURCE_ERROR)
  }
}

/**
 * @author: Hejh(3010733382@qq.com)
 * @description: 更新资源授权的对象
 * @param {*} authTo 授权对象数组
 * @param {*} sourceId 分享ID
 * @param {*} userId 用户ID
 * @param {*} t 事务对象
 */
const updateAuthTo = async (authTo, sourceId, userId, t) => {
  let newAuthTo = authTo.map((item) => {
    let obj = {
      ...item,
      id: generatId(),
      sourceId,
      createUser: userId,
      updateUser: userId
    }
    return obj
  })
  await SourceAuth.bulkCreate(newAuthTo, { transaction: t })
}

/**
 * 资源授权接口
 * @param {} ctx
 */
module.exports.authSource = async (ctx) => {
  let { sourceId, authTo } = ctx.request.body
  const t = await Mysequelize.transaction()
  try {
    // 更新分享授权对象，删除对应的记录
    await SourceAuth.destroy({
      where: {
        sourceId: BigInt(sourceId)
      },
      transaction: t
    })
    // 重新添加
    await updateAuthTo(authTo, sourceId, ctx.user.id, t)
    ctx.body = R.success()
    await t.commit()
  } catch (error) {
    console.log(error)
    await t.rollback()
    ctx.body = R.error(ERROR_SOURCE_CODE.SOURCE_AUTH_ERROR)
  }
}

/**
 * 用户获取对资源拥有的文档权限数组
 * @param {} ctx
 */

module.exports.getSourceAuth = async (ctx) => {
  const { sourceIds } = ctx.request.body
  const permissions = await getUserSourcesAuths(ctx.user.id, sourceIds)
  ctx.body = R.success(permissions.map((item) => item.ename))
}

/**
 * 用户获取对某个部门资源拥有的文档角色
 * @param {} ctx
 */
module.exports.getSourceRole = async (ctx) => {
  const { sourceId } = ctx.request.body
  const role = await getUserSourceRole(ctx.user.id, sourceId)
  ctx.body = R.success(role)
}
