Spaces:
Sleeping
Sleeping
File size: 4,746 Bytes
1df763a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 |
/**
* Filesystem Cache
*
* Given a file and a transform function, cache the result into files
* or retrieve the previously cached files if the given file is already known.
*
* @see https://github.com/babel/babel-loader/issues/34
* @see https://github.com/babel/babel-loader/pull/41
*/
const os = require("os");
const path = require("path");
const zlib = require("zlib");
const crypto = require("crypto");
const {
promisify
} = require("util");
const {
readFile,
writeFile,
mkdir
} = require("fs/promises");
const findCacheDirP = import("find-cache-dir");
const transform = require("./transform");
// Lazily instantiated when needed
let defaultCacheDirectory = null;
let hashType = "sha256";
// use md5 hashing if sha256 is not available
try {
crypto.createHash(hashType);
} catch (err) {
hashType = "md5";
}
const gunzip = promisify(zlib.gunzip);
const gzip = promisify(zlib.gzip);
/**
* Read the contents from the compressed file.
*
* @async
* @params {String} filename
* @params {Boolean} compress
*/
const read = async function (filename, compress) {
const data = await readFile(filename + (compress ? ".gz" : ""));
const content = compress ? await gunzip(data) : data;
return JSON.parse(content.toString());
};
/**
* Write contents into a compressed file.
*
* @async
* @params {String} filename
* @params {Boolean} compress
* @params {String} result
*/
const write = async function (filename, compress, result) {
const content = JSON.stringify(result);
const data = compress ? await gzip(content) : content;
return await writeFile(filename + (compress ? ".gz" : ""), data);
};
/**
* Build the filename for the cached file
*
* @params {String} source File source code
* @params {Object} options Options used
*
* @return {String}
*/
const filename = function (source, identifier, options) {
const hash = crypto.createHash(hashType);
const contents = JSON.stringify({
source,
options,
identifier
});
hash.update(contents);
return hash.digest("hex") + ".json";
};
/**
* Handle the cache
*
* @params {String} directory
* @params {Object} params
*/
const handleCache = async function (directory, params) {
const {
source,
options = {},
cacheIdentifier,
cacheDirectory,
cacheCompression
} = params;
const file = path.join(directory, filename(source, cacheIdentifier, options));
try {
// No errors mean that the file was previously cached
// we just need to return it
return await read(file, cacheCompression);
} catch (err) {}
const fallback = typeof cacheDirectory !== "string" && directory !== os.tmpdir();
// Make sure the directory exists.
try {
// overwrite directory if exists
await mkdir(directory, {
recursive: true
});
} catch (err) {
if (fallback) {
return handleCache(os.tmpdir(), params);
}
throw err;
}
// Otherwise just transform the file
// return it to the user asap and write it in cache
const result = await transform(source, options);
// Do not cache if there are external dependencies,
// since they might change and we cannot control it.
if (!result.externalDependencies.length) {
try {
await write(file, cacheCompression, result);
} catch (err) {
if (fallback) {
// Fallback to tmpdir if node_modules folder not writable
return handleCache(os.tmpdir(), params);
}
throw err;
}
}
return result;
};
/**
* Retrieve file from cache, or create a new one for future reads
*
* @async
* @param {Object} params
* @param {String} params.cacheDirectory Directory to store cached files
* @param {String} params.cacheIdentifier Unique identifier to bust cache
* @param {Boolean} params.cacheCompression Whether compressing cached files
* @param {String} params.source Original contents of the file to be cached
* @param {Object} params.options Options to be given to the transform fn
*
* @example
*
* const result = await cache({
* cacheDirectory: '.tmp/cache',
* cacheIdentifier: 'babel-loader-cachefile',
* cacheCompression: false,
* source: *source code from file*,
* options: {
* experimental: true,
* runtime: true
* },
* });
*/
module.exports = async function (params) {
let directory;
if (typeof params.cacheDirectory === "string") {
directory = params.cacheDirectory;
} else {
if (defaultCacheDirectory === null) {
const {
default: findCacheDir
} = await findCacheDirP;
defaultCacheDirectory = findCacheDir({
name: "babel-loader"
}) || os.tmpdir();
}
directory = defaultCacheDirectory;
}
return await handleCache(directory, params);
}; |