)
}
// Blob download helper
export function downloadBlob({ blob, name }: { blob: Blob; name: string }) {
// Prepare for download
const el = document.createElement('a')
el.style.display = 'none'
document.body.appendChild(el)
// Download zip file
const bUrl = window.URL.createObjectURL(blob)
el.href = bUrl
el.download = name
el.click()
window.URL.revokeObjectURL(bUrl)
el.remove()
}
/**
* Download multiple files after compressing them into a zip
* @param toastId Toast ID to be used for toast notification
* @param files Files to be downloaded
* @param folder Optional folder name to hold files, otherwise flatten files in the zip
*/
export async function downloadMultipleFiles({
toastId,
router,
files,
folder,
}: {
toastId: string
router: NextRouter
files: { name: string; url: string }[]
folder?: string
}): Promise {
const zip = new JSZip()
const dir = folder ? zip.folder(folder)! : zip
// Add selected file blobs to zip
files.forEach(({ name, url }) => {
dir.file(
name,
fetch(url).then(r => {
return r.blob()
})
)
})
// Create zip file and download it
const b = await zip.generateAsync({ type: 'blob' }, metadata => {
toast.loading(, {
id: toastId,
})
})
downloadBlob({ blob: b, name: folder ? folder + '.zip' : 'download.zip' })
}
/**
* Download hierarchical tree-like files after compressing them into a zip
* @param toastId Toast ID to be used for toast notification
* @param files Files to be downloaded. Array of file and folder items excluding root folder.
* Folder items MUST be in front of its children items in the array.
* Use async generator because generation of the array may be slow.
* When waiting for its generation, we can meanwhile download bodies of already got items.
* Only folder items can have url undefined.
* @param basePath Root dir path of files to be downloaded
* @param folder Optional folder name to hold files, otherwise flatten files in the zip
*/
export async function downloadTreelikeMultipleFiles({
toastId,
router,
files,
basePath,
folder,
}: {
toastId: string
router: NextRouter
files: AsyncGenerator<{
name: string
url?: string
path: string
isFolder: boolean
}>
basePath: string
folder?: string
}): Promise {
const zip = new JSZip()
const root = folder ? zip.folder(folder)! : zip
const map = [{ path: basePath, dir: root }]
// Add selected file blobs to zip according to its path
for await (const { name, url, path, isFolder } of files) {
// Search parent dir in map
const i = map
.slice()
.reverse()
.findIndex(
({ path: parent }) =>
path.substring(0, parent.length) === parent && path.substring(parent.length + 1).indexOf('/') === -1
)
if (i === -1) {
throw new Error('File array does not satisfy requirement')
}
// Add file or folder to zip
const dir = map[map.length - 1 - i].dir
if (isFolder) {
map.push({ path, dir: dir.folder(name)! })
} else {
dir.file(
name,
fetch(url!).then(r => r.blob())
)
}
}
// Create zip file and download it
const b = await zip.generateAsync({ type: 'blob' }, metadata => {
toast.loading(, {
id: toastId,
})
})
downloadBlob({ blob: b, name: folder ? folder + '.zip' : 'download.zip' })
}
interface TraverseItem {
path: string
meta: any
isFolder: boolean
error?: { status: number; message: string }
}
/**
* One-shot concurrent top-down file traversing for the folder.
* Due to react hook limit, we cannot reuse SWR utils for recursive actions.
* We will directly fetch API and arrange responses instead.
* In folder tree, we visit folders top-down as concurrently as possible.
* Every time we visit a folder, we fetch and return meta of all its children.
* If folders have pagination, partically retrieved items are not returned immediately,
* but after all children of the folder have been successfully retrieved.
* If an error occurred in paginated fetching, all children will be dropped.
* @param path Folder to be traversed. The path should be cleaned in advance.
* @returns Array of items representing folders and files of traversed folder top-down and excluding root folder.
* Due to top-down, Folder items are ALWAYS in front of its children items.
* Error key in the item will contain the error when there is a handleable error.
*/
export async function* traverseFolder(path: string): AsyncGenerator {
const hashedToken = getStoredToken(path)
// Generate the task passed to Promise.race to request a folder
const genTask = async (i: number, path: string, next?: string) => {
return {
i,
path,
data: await fetcher([
next ? `/api/?path=${path}&next=${next}` : `/api?path=${path}`,
hashedToken ?? undefined,
]).catch(error => ({ i, path, error })),
}
}
// Pool containing Promises of folder requests
let pool = [genTask(0, path)]
// Map as item buffer for folders with pagination
const buf: { [k: string]: TraverseItem[] } = {}
// filter(() => true) removes gaps in the array
while (pool.filter(() => true).length > 0) {
let info: { i: number; path: string; data: any }
try {
info = await Promise.race(pool.filter(() => true))
} catch (error: any) {
const { i, path, error: innerError } = error
// 4xx errors are identified as handleable errors
if (Math.floor(innerError.status / 100) === 4) {
delete pool[i]
yield {
path,
meta: {},
isFolder: true,
error: { status: innerError.status, message: innerError.message.error },
}
continue
} else {
throw error
}
}
const { i, path, data } = info
if (!data || !data.folder) {
throw new Error('Path is not folder')
}
delete pool[i]
const items = data.folder.value.map((c: any) => {
const p = `${path === '/' ? '' : path}/${encodeURIComponent(c.name)}`
return { path: p, meta: c, isFolder: Boolean(c.folder) }
}) as TraverseItem[]
if (data.next) {
buf[path] = (buf[path] ?? []).concat(items)
// Append next page task to the pool at the end
const i = pool.length
pool[i] = genTask(i, path, data.next)
} else {
const allItems = (buf[path] ?? []).concat(items)
if (buf[path]) {
delete buf[path]
}
allItems
.filter(item => item.isFolder)
.forEach(item => {
// Append new folder tasks to the pool at the end
const i = pool.length
pool[i] = genTask(i, item.path)
})
yield* allItems
}
}
}