File size: 4,287 Bytes
4d70170 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 |
// require modules
const fs = require('node:fs')
const path = require('node:path')
const process = require('node:process')
const archiver = require('archiver')
const IS_CI = !!(process.env.CIRCLECI || process.env.GITHUB_ACTIONS)
const ProgressBar = !IS_CI ? require('progress') : {}
const readDirGlob = !IS_CI ? require('readdir-glob') : {}
const INCLUDE_GLOBS = [
'build/**',
'icons/**',
'popups/**',
'devtools.html',
'devtools-background.html',
'manifest.json',
'package.json',
]
// SKIP_GLOBS makes glob searches more efficient
const SKIP_DIR_GLOBS = ['node_modules', 'src']
function bytesToSize(bytes) {
const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB']
if (bytes === 0) {
return '0 Byte'
}
const i = Number.parseInt(Math.floor(Math.log(bytes) / Math.log(1024)))
return `${Math.round(bytes / 1024 ** i, 2)} ${sizes[i]}`
}
(async () => {
await writeZip('devtools-chrome.zip', 'shell-chrome')
await writeZip('devtools-firefox.zip', 'shell-firefox')
async function writeZip(fileName, packageDir) {
// create a file to stream archive data to.
const output = fs.createWriteStream(path.join(__dirname, 'dist', fileName))
const archive = archiver('zip', {
zlib: { level: 9 }, // Sets the compression level.
})
if (!IS_CI) {
const status = {
total: 0,
cFile: '...',
cSize: '0 Bytes',
tBytes: 0,
tSize: '0 Bytes',
}
async function parseFileStats() {
return new Promise((resolve, reject) => {
const globber = readDirGlob(path.join('packages', packageDir), { pattern: INCLUDE_GLOBS, skip: SKIP_DIR_GLOBS, mark: true, stat: true })
globber.on('match', (match) => {
if (!match.stat.isDirectory()) {
status.total++
}
})
globber.on('error', (err) => {
reject(err)
})
globber.on('end', () => {
resolve()
})
})
}
await parseFileStats().catch((err) => {
console.error(err)
process.exit(1)
})
const bar = new ProgressBar(`${fileName} @ :tSize [:bar] :current/:total :percent +:cFile@:cSize`, {
width: 18,
incomplete: ' ',
total: status.total,
})
bar.tick(0, status)
archive.on('entry', (entry) => {
if (!entry.stats.isDirectory()) {
const n = entry.name
status.written++
status.cFile = n.length > 14
? `...${n.slice(n.length - 11)}`
: n
status.cSize = bytesToSize(entry.stats.size)
status.tBytes += entry.stats.size
status.tSize = bytesToSize(status.tBytes)
bar.tick(1, status)
}
})
}
const end = new Promise((resolve) => {
// listen for all archive data to be written
// 'close' event is fired only when a file descriptor is involved
output.on('close', () => {
if (archive.pointer() < 1000) {
console.warn(`Zip file (${fileName}) is only ${archive.pointer()} bytes`)
}
resolve()
})
})
// This event is fired when the data source is drained no matter what was the data source.
// It is not part of this library but rather from the NodeJS Stream API.
// @see: https://nodejs.org/api/stream.html#stream_event_end
output.on('end', () => {
'nothing'
})
// good practice to catch warnings (ie stat failures and other non-blocking errors)
archive.on('warning', (err) => {
if (err.code !== 'ENOENT') {
// throw error
console.error(err)
process.exit(1)
}
})
// good practice to catch this error explicitly
archive.on('error', (err) => {
console.error(err)
process.exit(1)
})
// pipe archive data to the file
archive.pipe(output)
INCLUDE_GLOBS.forEach((glob) => {
// append files from a glob pattern
archive.glob(glob, { cwd: path.join('packages', packageDir), skip: SKIP_DIR_GLOBS })
})
// finalize the archive (ie we are done appending files but streams have to finish yet)
// 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand
archive.finalize()
await end
}
})()
|