|
'use strict'; |
|
|
|
const acorn = require('acorn'); |
|
const node_module = require('node:module'); |
|
const fs = require('node:fs'); |
|
const ufo = require('ufo'); |
|
const pathe = require('pathe'); |
|
const pkgTypes = require('pkg-types'); |
|
const node_url = require('node:url'); |
|
const assert = require('node:assert'); |
|
const process$1 = require('node:process'); |
|
const path = require('node:path'); |
|
const v8 = require('node:v8'); |
|
const node_util = require('node:util'); |
|
|
|
function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e.default : e; } |
|
|
|
const fs__default = _interopDefaultCompat(fs); |
|
const assert__default = _interopDefaultCompat(assert); |
|
const process__default = _interopDefaultCompat(process$1); |
|
const path__default = _interopDefaultCompat(path); |
|
const v8__default = _interopDefaultCompat(v8); |
|
|
|
const BUILTIN_MODULES = new Set(node_module.builtinModules); |
|
function normalizeSlash(path) { |
|
return path.replace(/\\/g, "/"); |
|
} |
|
function isObject(value) { |
|
return value !== null && typeof value === "object"; |
|
} |
|
function matchAll(regex, string, addition) { |
|
const matches = []; |
|
for (const match of string.matchAll(regex)) { |
|
matches.push({ |
|
...addition, |
|
...match.groups, |
|
code: match[0], |
|
start: match.index, |
|
end: (match.index || 0) + match[0].length |
|
}); |
|
} |
|
return matches; |
|
} |
|
function clearImports(imports) { |
|
return (imports || "").replace(/(\/\/[^\n]*\n|\/\*.*\*\/)/g, "").replace(/\s+/g, " "); |
|
} |
|
function getImportNames(cleanedImports) { |
|
const topLevelImports = cleanedImports.replace(/{([^}]*)}/, ""); |
|
const namespacedImport = topLevelImports.match(/\* as \s*(\S*)/)?.[1]; |
|
const defaultImport = topLevelImports.split(",").find((index) => !/[*{}]/.test(index))?.trim() || void 0; |
|
return { |
|
namespacedImport, |
|
defaultImport |
|
}; |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
const own$1 = {}.hasOwnProperty; |
|
|
|
const classRegExp = /^([A-Z][a-z\d]*)+$/; |
|
|
|
const kTypes = new Set([ |
|
'string', |
|
'function', |
|
'number', |
|
'object', |
|
|
|
'Function', |
|
'Object', |
|
'boolean', |
|
'bigint', |
|
'symbol' |
|
]); |
|
|
|
const codes = {}; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function formatList(array, type = 'and') { |
|
return array.length < 3 |
|
? array.join(` ${type} `) |
|
: `${array.slice(0, -1).join(', ')}, ${type} ${array[array.length - 1]}` |
|
} |
|
|
|
|
|
const messages = new Map(); |
|
const nodeInternalPrefix = '__node_internal_'; |
|
|
|
let userStackTraceLimit; |
|
|
|
codes.ERR_INVALID_ARG_TYPE = createError( |
|
'ERR_INVALID_ARG_TYPE', |
|
|
|
|
|
|
|
|
|
|
|
(name, expected, actual) => { |
|
assert__default(typeof name === 'string', "'name' must be a string"); |
|
if (!Array.isArray(expected)) { |
|
expected = [expected]; |
|
} |
|
|
|
let message = 'The '; |
|
if (name.endsWith(' argument')) { |
|
|
|
message += `${name} `; |
|
} else { |
|
const type = name.includes('.') ? 'property' : 'argument'; |
|
message += `"${name}" ${type} `; |
|
} |
|
|
|
message += 'must be '; |
|
|
|
|
|
const types = []; |
|
|
|
const instances = []; |
|
|
|
const other = []; |
|
|
|
for (const value of expected) { |
|
assert__default( |
|
typeof value === 'string', |
|
'All expected entries have to be of type string' |
|
); |
|
|
|
if (kTypes.has(value)) { |
|
types.push(value.toLowerCase()); |
|
} else if (classRegExp.exec(value) === null) { |
|
assert__default( |
|
value !== 'object', |
|
'The value "object" should be written as "Object"' |
|
); |
|
other.push(value); |
|
} else { |
|
instances.push(value); |
|
} |
|
} |
|
|
|
|
|
|
|
if (instances.length > 0) { |
|
const pos = types.indexOf('object'); |
|
if (pos !== -1) { |
|
types.slice(pos, 1); |
|
instances.push('Object'); |
|
} |
|
} |
|
|
|
if (types.length > 0) { |
|
message += `${types.length > 1 ? 'one of type' : 'of type'} ${formatList( |
|
types, |
|
'or' |
|
)}`; |
|
if (instances.length > 0 || other.length > 0) message += ' or '; |
|
} |
|
|
|
if (instances.length > 0) { |
|
message += `an instance of ${formatList(instances, 'or')}`; |
|
if (other.length > 0) message += ' or '; |
|
} |
|
|
|
if (other.length > 0) { |
|
if (other.length > 1) { |
|
message += `one of ${formatList(other, 'or')}`; |
|
} else { |
|
if (other[0].toLowerCase() !== other[0]) message += 'an '; |
|
message += `${other[0]}`; |
|
} |
|
} |
|
|
|
message += `. Received ${determineSpecificType(actual)}`; |
|
|
|
return message |
|
}, |
|
TypeError |
|
); |
|
|
|
codes.ERR_INVALID_MODULE_SPECIFIER = createError( |
|
'ERR_INVALID_MODULE_SPECIFIER', |
|
|
|
|
|
|
|
|
|
|
|
(request, reason, base = undefined) => { |
|
return `Invalid module "${request}" ${reason}${ |
|
base ? ` imported from ${base}` : '' |
|
}` |
|
}, |
|
TypeError |
|
); |
|
|
|
codes.ERR_INVALID_PACKAGE_CONFIG = createError( |
|
'ERR_INVALID_PACKAGE_CONFIG', |
|
|
|
|
|
|
|
|
|
|
|
(path, base, message) => { |
|
return `Invalid package config ${path}${ |
|
base ? ` while importing ${base}` : '' |
|
}${message ? `. ${message}` : ''}` |
|
}, |
|
Error |
|
); |
|
|
|
codes.ERR_INVALID_PACKAGE_TARGET = createError( |
|
'ERR_INVALID_PACKAGE_TARGET', |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
(pkgPath, key, target, isImport = false, base = undefined) => { |
|
const relError = |
|
typeof target === 'string' && |
|
!isImport && |
|
target.length > 0 && |
|
!target.startsWith('./'); |
|
if (key === '.') { |
|
assert__default(isImport === false); |
|
return ( |
|
`Invalid "exports" main target ${JSON.stringify(target)} defined ` + |
|
`in the package config ${pkgPath}package.json${ |
|
base ? ` imported from ${base}` : '' |
|
}${relError ? '; targets must start with "./"' : ''}` |
|
) |
|
} |
|
|
|
return `Invalid "${ |
|
isImport ? 'imports' : 'exports' |
|
}" target ${JSON.stringify( |
|
target |
|
)} defined for '${key}' in the package config ${pkgPath}package.json${ |
|
base ? ` imported from ${base}` : '' |
|
}${relError ? '; targets must start with "./"' : ''}` |
|
}, |
|
Error |
|
); |
|
|
|
codes.ERR_MODULE_NOT_FOUND = createError( |
|
'ERR_MODULE_NOT_FOUND', |
|
|
|
|
|
|
|
|
|
|
|
(path, base, exactUrl = false) => { |
|
return `Cannot find ${ |
|
exactUrl ? 'module' : 'package' |
|
} '${path}' imported from ${base}` |
|
}, |
|
Error |
|
); |
|
|
|
codes.ERR_NETWORK_IMPORT_DISALLOWED = createError( |
|
'ERR_NETWORK_IMPORT_DISALLOWED', |
|
"import of '%s' by %s is not supported: %s", |
|
Error |
|
); |
|
|
|
codes.ERR_PACKAGE_IMPORT_NOT_DEFINED = createError( |
|
'ERR_PACKAGE_IMPORT_NOT_DEFINED', |
|
|
|
|
|
|
|
|
|
|
|
(specifier, packagePath, base) => { |
|
return `Package import specifier "${specifier}" is not defined${ |
|
packagePath ? ` in package ${packagePath}package.json` : '' |
|
} imported from ${base}` |
|
}, |
|
TypeError |
|
); |
|
|
|
codes.ERR_PACKAGE_PATH_NOT_EXPORTED = createError( |
|
'ERR_PACKAGE_PATH_NOT_EXPORTED', |
|
|
|
|
|
|
|
|
|
|
|
(pkgPath, subpath, base = undefined) => { |
|
if (subpath === '.') |
|
return `No "exports" main defined in ${pkgPath}package.json${ |
|
base ? ` imported from ${base}` : '' |
|
}` |
|
return `Package subpath '${subpath}' is not defined by "exports" in ${pkgPath}package.json${ |
|
base ? ` imported from ${base}` : '' |
|
}` |
|
}, |
|
Error |
|
); |
|
|
|
codes.ERR_UNSUPPORTED_DIR_IMPORT = createError( |
|
'ERR_UNSUPPORTED_DIR_IMPORT', |
|
"Directory import '%s' is not supported " + |
|
'resolving ES modules imported from %s', |
|
Error |
|
); |
|
|
|
codes.ERR_UNKNOWN_FILE_EXTENSION = createError( |
|
'ERR_UNKNOWN_FILE_EXTENSION', |
|
|
|
|
|
|
|
|
|
(ext, path) => { |
|
return `Unknown file extension "${ext}" for ${path}` |
|
}, |
|
TypeError |
|
); |
|
|
|
codes.ERR_INVALID_ARG_VALUE = createError( |
|
'ERR_INVALID_ARG_VALUE', |
|
|
|
|
|
|
|
|
|
|
|
(name, value, reason = 'is invalid') => { |
|
let inspected = node_util.inspect(value); |
|
|
|
if (inspected.length > 128) { |
|
inspected = `${inspected.slice(0, 128)}...`; |
|
} |
|
|
|
const type = name.includes('.') ? 'property' : 'argument'; |
|
|
|
return `The ${type} '${name}' ${reason}. Received ${inspected}` |
|
}, |
|
TypeError |
|
|
|
|
|
); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function createError(sym, value, def) { |
|
|
|
|
|
messages.set(sym, value); |
|
|
|
return makeNodeErrorWithCode(def, sym) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
function makeNodeErrorWithCode(Base, key) { |
|
|
|
return NodeError |
|
|
|
|
|
|
|
function NodeError(...args) { |
|
const limit = Error.stackTraceLimit; |
|
if (isErrorStackTraceLimitWritable()) Error.stackTraceLimit = 0; |
|
const error = new Base(); |
|
|
|
if (isErrorStackTraceLimitWritable()) Error.stackTraceLimit = limit; |
|
const message = getMessage(key, args, error); |
|
Object.defineProperties(error, { |
|
|
|
|
|
message: { |
|
value: message, |
|
enumerable: false, |
|
writable: true, |
|
configurable: true |
|
}, |
|
toString: { |
|
|
|
value() { |
|
return `${this.name} [${key}]: ${this.message}` |
|
}, |
|
enumerable: false, |
|
writable: true, |
|
configurable: true |
|
} |
|
}); |
|
|
|
captureLargerStackTrace(error); |
|
|
|
error.code = key; |
|
return error |
|
} |
|
} |
|
|
|
|
|
|
|
|
|
function isErrorStackTraceLimitWritable() { |
|
|
|
|
|
try { |
|
|
|
if (v8__default.startupSnapshot.isBuildingSnapshot()) { |
|
return false |
|
} |
|
} catch {} |
|
|
|
const desc = Object.getOwnPropertyDescriptor(Error, 'stackTraceLimit'); |
|
if (desc === undefined) { |
|
return Object.isExtensible(Error) |
|
} |
|
|
|
return own$1.call(desc, 'writable') && desc.writable !== undefined |
|
? desc.writable |
|
: desc.set !== undefined |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function hideStackFrames(fn) { |
|
|
|
|
|
const hidden = nodeInternalPrefix + fn.name; |
|
Object.defineProperty(fn, 'name', {value: hidden}); |
|
return fn |
|
} |
|
|
|
const captureLargerStackTrace = hideStackFrames( |
|
|
|
|
|
|
|
|
|
|
|
function (error) { |
|
const stackTraceLimitIsWritable = isErrorStackTraceLimitWritable(); |
|
if (stackTraceLimitIsWritable) { |
|
userStackTraceLimit = Error.stackTraceLimit; |
|
Error.stackTraceLimit = Number.POSITIVE_INFINITY; |
|
} |
|
|
|
Error.captureStackTrace(error); |
|
|
|
|
|
if (stackTraceLimitIsWritable) Error.stackTraceLimit = userStackTraceLimit; |
|
|
|
return error |
|
} |
|
); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function getMessage(key, args, self) { |
|
const message = messages.get(key); |
|
assert__default(message !== undefined, 'expected `message` to be found'); |
|
|
|
if (typeof message === 'function') { |
|
assert__default( |
|
message.length <= args.length, |
|
`Code: ${key}; The provided arguments length (${args.length}) does not ` + |
|
`match the required ones (${message.length}).` |
|
); |
|
return Reflect.apply(message, self, args) |
|
} |
|
|
|
const regex = /%[dfijoOs]/g; |
|
let expectedLength = 0; |
|
while (regex.exec(message) !== null) expectedLength++; |
|
assert__default( |
|
expectedLength === args.length, |
|
`Code: ${key}; The provided arguments length (${args.length}) does not ` + |
|
`match the required ones (${expectedLength}).` |
|
); |
|
if (args.length === 0) return message |
|
|
|
args.unshift(message); |
|
return Reflect.apply(node_util.format, null, args) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
function determineSpecificType(value) { |
|
if (value === null || value === undefined) { |
|
return String(value) |
|
} |
|
|
|
if (typeof value === 'function' && value.name) { |
|
return `function ${value.name}` |
|
} |
|
|
|
if (typeof value === 'object') { |
|
if (value.constructor && value.constructor.name) { |
|
return `an instance of ${value.constructor.name}` |
|
} |
|
|
|
return `${node_util.inspect(value, {depth: -1})}` |
|
} |
|
|
|
let inspected = node_util.inspect(value, {colors: false}); |
|
|
|
if (inspected.length > 28) { |
|
inspected = `${inspected.slice(0, 25)}...`; |
|
} |
|
|
|
return `type ${typeof value} (${inspected})` |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
const hasOwnProperty$1 = {}.hasOwnProperty; |
|
|
|
const {ERR_INVALID_PACKAGE_CONFIG: ERR_INVALID_PACKAGE_CONFIG$1} = codes; |
|
|
|
|
|
const cache = new Map(); |
|
|
|
const reader = {read}; |
|
const packageJsonReader = reader; |
|
|
|
|
|
|
|
|
|
|
|
|
|
function read(jsonPath, {base, specifier}) { |
|
const existing = cache.get(jsonPath); |
|
|
|
if (existing) { |
|
return existing |
|
} |
|
|
|
|
|
let string; |
|
|
|
try { |
|
string = fs__default.readFileSync(path__default.toNamespacedPath(jsonPath), 'utf8'); |
|
} catch (error) { |
|
const exception = (error); |
|
|
|
if (exception.code !== 'ENOENT') { |
|
throw exception |
|
} |
|
} |
|
|
|
|
|
const result = { |
|
exists: false, |
|
pjsonPath: jsonPath, |
|
main: undefined, |
|
name: undefined, |
|
type: 'none', |
|
exports: undefined, |
|
imports: undefined |
|
}; |
|
|
|
if (string !== undefined) { |
|
|
|
let parsed; |
|
|
|
try { |
|
parsed = JSON.parse(string); |
|
} catch (error_) { |
|
const cause = (error_); |
|
const error = new ERR_INVALID_PACKAGE_CONFIG$1( |
|
jsonPath, |
|
(base ? `"${specifier}" from ` : '') + node_url.fileURLToPath(base || specifier), |
|
cause.message |
|
); |
|
|
|
error.cause = cause; |
|
throw error |
|
} |
|
|
|
result.exists = true; |
|
|
|
if ( |
|
hasOwnProperty$1.call(parsed, 'name') && |
|
typeof parsed.name === 'string' |
|
) { |
|
result.name = parsed.name; |
|
} |
|
|
|
if ( |
|
hasOwnProperty$1.call(parsed, 'main') && |
|
typeof parsed.main === 'string' |
|
) { |
|
result.main = parsed.main; |
|
} |
|
|
|
if (hasOwnProperty$1.call(parsed, 'exports')) { |
|
|
|
result.exports = parsed.exports; |
|
} |
|
|
|
if (hasOwnProperty$1.call(parsed, 'imports')) { |
|
|
|
result.imports = parsed.imports; |
|
} |
|
|
|
|
|
if ( |
|
hasOwnProperty$1.call(parsed, 'type') && |
|
(parsed.type === 'commonjs' || parsed.type === 'module') |
|
) { |
|
result.type = parsed.type; |
|
} |
|
} |
|
|
|
cache.set(jsonPath, result); |
|
|
|
return result |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function getPackageScopeConfig(resolved) { |
|
let packageJSONUrl = new node_url.URL('package.json', resolved); |
|
|
|
while (true) { |
|
const packageJSONPath = packageJSONUrl.pathname; |
|
if (packageJSONPath.endsWith('node_modules/package.json')) { |
|
break |
|
} |
|
|
|
const packageConfig = packageJsonReader.read( |
|
node_url.fileURLToPath(packageJSONUrl), |
|
{specifier: resolved} |
|
); |
|
|
|
if (packageConfig.exists) { |
|
return packageConfig |
|
} |
|
|
|
const lastPackageJSONUrl = packageJSONUrl; |
|
packageJSONUrl = new node_url.URL('../package.json', packageJSONUrl); |
|
|
|
|
|
|
|
if (packageJSONUrl.pathname === lastPackageJSONUrl.pathname) { |
|
break |
|
} |
|
} |
|
|
|
const packageJSONPath = node_url.fileURLToPath(packageJSONUrl); |
|
|
|
return { |
|
pjsonPath: packageJSONPath, |
|
exists: false, |
|
main: undefined, |
|
name: undefined, |
|
type: 'none', |
|
exports: undefined, |
|
imports: undefined |
|
} |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function getPackageType(url) { |
|
const packageConfig = getPackageScopeConfig(url); |
|
return packageConfig.type |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
const {ERR_UNKNOWN_FILE_EXTENSION} = codes; |
|
|
|
const hasOwnProperty = {}.hasOwnProperty; |
|
|
|
|
|
const extensionFormatMap = { |
|
|
|
__proto__: null, |
|
'.cjs': 'commonjs', |
|
'.js': 'module', |
|
'.json': 'json', |
|
'.mjs': 'module' |
|
}; |
|
|
|
|
|
|
|
|
|
|
|
function mimeToFormat(mime) { |
|
if ( |
|
mime && |
|
/\s*(text|application)\/javascript\s*(;\s*charset=utf-?8\s*)?/i.test(mime) |
|
) |
|
return 'module' |
|
if (mime === 'application/json') return 'json' |
|
return null |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
const protocolHandlers = { |
|
|
|
__proto__: null, |
|
'data:': getDataProtocolModuleFormat, |
|
'file:': getFileProtocolModuleFormat, |
|
'http:': getHttpProtocolModuleFormat, |
|
'https:': getHttpProtocolModuleFormat, |
|
'node:'() { |
|
return 'builtin' |
|
} |
|
}; |
|
|
|
|
|
|
|
|
|
function getDataProtocolModuleFormat(parsed) { |
|
const {1: mime} = /^([^/]+\/[^;,]+)[^,]*?(;base64)?,/.exec( |
|
parsed.pathname |
|
) || [null, null, null]; |
|
return mimeToFormat(mime) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function extname(url) { |
|
const pathname = url.pathname; |
|
let index = pathname.length; |
|
|
|
while (index--) { |
|
const code = pathname.codePointAt(index); |
|
|
|
if (code === 47 ) { |
|
return '' |
|
} |
|
|
|
if (code === 46 ) { |
|
return pathname.codePointAt(index - 1) === 47 |
|
? '' |
|
: pathname.slice(index) |
|
} |
|
} |
|
|
|
return '' |
|
} |
|
|
|
|
|
|
|
|
|
function getFileProtocolModuleFormat(url, _context, ignoreErrors) { |
|
const ext = extname(url); |
|
|
|
if (ext === '.js') { |
|
const packageType = getPackageType(url); |
|
|
|
if (packageType !== 'none') { |
|
return packageType |
|
} |
|
|
|
return 'commonjs' |
|
} |
|
|
|
if (ext === '') { |
|
const packageType = getPackageType(url); |
|
|
|
|
|
if (packageType === 'none' || packageType === 'commonjs') { |
|
return 'commonjs' |
|
} |
|
|
|
|
|
|
|
return 'module' |
|
} |
|
|
|
const format = extensionFormatMap[ext]; |
|
if (format) return format |
|
|
|
|
|
if (ignoreErrors) { |
|
return undefined |
|
} |
|
|
|
const filepath = node_url.fileURLToPath(url); |
|
throw new ERR_UNKNOWN_FILE_EXTENSION(ext, filepath) |
|
} |
|
|
|
function getHttpProtocolModuleFormat() { |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
function defaultGetFormatWithoutErrors(url, context) { |
|
const protocol = url.protocol; |
|
|
|
if (!hasOwnProperty.call(protocolHandlers, protocol)) { |
|
return null |
|
} |
|
|
|
return protocolHandlers[protocol](url, context, true) || null |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
const RegExpPrototypeSymbolReplace = RegExp.prototype[Symbol.replace]; |
|
|
|
const { |
|
ERR_NETWORK_IMPORT_DISALLOWED, |
|
ERR_INVALID_MODULE_SPECIFIER, |
|
ERR_INVALID_PACKAGE_CONFIG, |
|
ERR_INVALID_PACKAGE_TARGET, |
|
ERR_MODULE_NOT_FOUND, |
|
ERR_PACKAGE_IMPORT_NOT_DEFINED, |
|
ERR_PACKAGE_PATH_NOT_EXPORTED, |
|
ERR_UNSUPPORTED_DIR_IMPORT |
|
} = codes; |
|
|
|
const own = {}.hasOwnProperty; |
|
|
|
const invalidSegmentRegEx = |
|
/(^|\\|\/)((\.|%2e)(\.|%2e)?|(n|%6e|%4e)(o|%6f|%4f)(d|%64|%44)(e|%65|%45)(_|%5f)(m|%6d|%4d)(o|%6f|%4f)(d|%64|%44)(u|%75|%55)(l|%6c|%4c)(e|%65|%45)(s|%73|%53))?(\\|\/|$)/i; |
|
const deprecatedInvalidSegmentRegEx = |
|
/(^|\\|\/)((\.|%2e)(\.|%2e)?|(n|%6e|%4e)(o|%6f|%4f)(d|%64|%44)(e|%65|%45)(_|%5f)(m|%6d|%4d)(o|%6f|%4f)(d|%64|%44)(u|%75|%55)(l|%6c|%4c)(e|%65|%45)(s|%73|%53))(\\|\/|$)/i; |
|
const invalidPackageNameRegEx = /^\.|%|\\/; |
|
const patternRegEx = /\*/g; |
|
const encodedSepRegEx = /%2f|%5c/i; |
|
|
|
const emittedPackageWarnings = new Set(); |
|
|
|
const doubleSlashRegEx = /[/\\]{2}/; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function emitInvalidSegmentDeprecation( |
|
target, |
|
request, |
|
match, |
|
packageJsonUrl, |
|
internal, |
|
base, |
|
isTarget |
|
) { |
|
|
|
if (process__default.noDeprecation) { |
|
return |
|
} |
|
|
|
const pjsonPath = node_url.fileURLToPath(packageJsonUrl); |
|
const double = doubleSlashRegEx.exec(isTarget ? target : request) !== null; |
|
process__default.emitWarning( |
|
`Use of deprecated ${ |
|
double ? 'double slash' : 'leading or trailing slash matching' |
|
} resolving "${target}" for module ` + |
|
`request "${request}" ${ |
|
request === match ? '' : `matched to "${match}" ` |
|
}in the "${ |
|
internal ? 'imports' : 'exports' |
|
}" field module resolution of the package at ${pjsonPath}${ |
|
base ? ` imported from ${node_url.fileURLToPath(base)}` : '' |
|
}.`, |
|
'DeprecationWarning', |
|
'DEP0166' |
|
); |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function emitLegacyIndexDeprecation(url, packageJsonUrl, base, main) { |
|
|
|
if (process__default.noDeprecation) { |
|
return |
|
} |
|
|
|
const format = defaultGetFormatWithoutErrors(url, {parentURL: base.href}); |
|
if (format !== 'module') return |
|
const urlPath = node_url.fileURLToPath(url.href); |
|
const pkgPath = node_url.fileURLToPath(new node_url.URL('.', packageJsonUrl)); |
|
const basePath = node_url.fileURLToPath(base); |
|
if (!main) { |
|
process__default.emitWarning( |
|
`No "main" or "exports" field defined in the package.json for ${pkgPath} resolving the main entry point "${urlPath.slice( |
|
pkgPath.length |
|
)}", imported from ${basePath}.\nDefault "index" lookups for the main are deprecated for ES modules.`, |
|
'DeprecationWarning', |
|
'DEP0151' |
|
); |
|
} else if (path__default.resolve(pkgPath, main) !== urlPath) { |
|
process__default.emitWarning( |
|
`Package ${pkgPath} has a "main" field set to "${main}", ` + |
|
`excluding the full filename and extension to the resolved file at "${urlPath.slice( |
|
pkgPath.length |
|
)}", imported from ${basePath}.\n Automatic extension resolution of the "main" field is ` + |
|
'deprecated for ES modules.', |
|
'DeprecationWarning', |
|
'DEP0151' |
|
); |
|
} |
|
} |
|
|
|
|
|
|
|
|
|
|
|
function tryStatSync(path) { |
|
|
|
try { |
|
return fs.statSync(path) |
|
} catch { |
|
return new fs.Stats() |
|
} |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function fileExists(url) { |
|
const stats = fs.statSync(url, {throwIfNoEntry: false}); |
|
const isFile = stats ? stats.isFile() : undefined; |
|
return isFile === null || isFile === undefined ? false : isFile |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function legacyMainResolve(packageJsonUrl, packageConfig, base) { |
|
|
|
let guess; |
|
if (packageConfig.main !== undefined) { |
|
guess = new node_url.URL(packageConfig.main, packageJsonUrl); |
|
|
|
if (fileExists(guess)) return guess |
|
|
|
const tries = [ |
|
`./${packageConfig.main}.js`, |
|
`./${packageConfig.main}.json`, |
|
`./${packageConfig.main}.node`, |
|
`./${packageConfig.main}/index.js`, |
|
`./${packageConfig.main}/index.json`, |
|
`./${packageConfig.main}/index.node` |
|
]; |
|
let i = -1; |
|
|
|
while (++i < tries.length) { |
|
guess = new node_url.URL(tries[i], packageJsonUrl); |
|
if (fileExists(guess)) break |
|
guess = undefined; |
|
} |
|
|
|
if (guess) { |
|
emitLegacyIndexDeprecation( |
|
guess, |
|
packageJsonUrl, |
|
base, |
|
packageConfig.main |
|
); |
|
return guess |
|
} |
|
|
|
} |
|
|
|
const tries = ['./index.js', './index.json', './index.node']; |
|
let i = -1; |
|
|
|
while (++i < tries.length) { |
|
guess = new node_url.URL(tries[i], packageJsonUrl); |
|
if (fileExists(guess)) break |
|
guess = undefined; |
|
} |
|
|
|
if (guess) { |
|
emitLegacyIndexDeprecation(guess, packageJsonUrl, base, packageConfig.main); |
|
return guess |
|
} |
|
|
|
|
|
throw new ERR_MODULE_NOT_FOUND( |
|
node_url.fileURLToPath(new node_url.URL('.', packageJsonUrl)), |
|
node_url.fileURLToPath(base) |
|
) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function finalizeResolution(resolved, base, preserveSymlinks) { |
|
if (encodedSepRegEx.exec(resolved.pathname) !== null) { |
|
throw new ERR_INVALID_MODULE_SPECIFIER( |
|
resolved.pathname, |
|
'must not include encoded "/" or "\\" characters', |
|
node_url.fileURLToPath(base) |
|
) |
|
} |
|
|
|
|
|
let filePath; |
|
|
|
try { |
|
filePath = node_url.fileURLToPath(resolved); |
|
} catch (error) { |
|
const cause = (error); |
|
Object.defineProperty(cause, 'input', {value: String(resolved)}); |
|
Object.defineProperty(cause, 'module', {value: String(base)}); |
|
throw cause |
|
} |
|
|
|
const stats = tryStatSync( |
|
filePath.endsWith('/') ? filePath.slice(-1) : filePath |
|
); |
|
|
|
if (stats.isDirectory()) { |
|
const error = new ERR_UNSUPPORTED_DIR_IMPORT(filePath, node_url.fileURLToPath(base)); |
|
|
|
error.url = String(resolved); |
|
throw error |
|
} |
|
|
|
if (!stats.isFile()) { |
|
const error = new ERR_MODULE_NOT_FOUND( |
|
filePath || resolved.pathname, |
|
base && node_url.fileURLToPath(base), |
|
true |
|
); |
|
|
|
error.url = String(resolved); |
|
throw error |
|
} |
|
|
|
if (!preserveSymlinks) { |
|
const real = fs.realpathSync(filePath); |
|
const {search, hash} = resolved; |
|
resolved = node_url.pathToFileURL(real + (filePath.endsWith(path__default.sep) ? '/' : '')); |
|
resolved.search = search; |
|
resolved.hash = hash; |
|
} |
|
|
|
return resolved |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function importNotDefined(specifier, packageJsonUrl, base) { |
|
return new ERR_PACKAGE_IMPORT_NOT_DEFINED( |
|
specifier, |
|
packageJsonUrl && node_url.fileURLToPath(new node_url.URL('.', packageJsonUrl)), |
|
node_url.fileURLToPath(base) |
|
) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function exportsNotFound(subpath, packageJsonUrl, base) { |
|
return new ERR_PACKAGE_PATH_NOT_EXPORTED( |
|
node_url.fileURLToPath(new node_url.URL('.', packageJsonUrl)), |
|
subpath, |
|
base && node_url.fileURLToPath(base) |
|
) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function throwInvalidSubpath(request, match, packageJsonUrl, internal, base) { |
|
const reason = `request is not a valid match in pattern "${match}" for the "${ |
|
internal ? 'imports' : 'exports' |
|
}" resolution of ${node_url.fileURLToPath(packageJsonUrl)}`; |
|
throw new ERR_INVALID_MODULE_SPECIFIER( |
|
request, |
|
reason, |
|
base && node_url.fileURLToPath(base) |
|
) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function invalidPackageTarget(subpath, target, packageJsonUrl, internal, base) { |
|
target = |
|
typeof target === 'object' && target !== null |
|
? JSON.stringify(target, null, '') |
|
: `${target}`; |
|
|
|
return new ERR_INVALID_PACKAGE_TARGET( |
|
node_url.fileURLToPath(new node_url.URL('.', packageJsonUrl)), |
|
subpath, |
|
target, |
|
internal, |
|
base && node_url.fileURLToPath(base) |
|
) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function resolvePackageTargetString( |
|
target, |
|
subpath, |
|
match, |
|
packageJsonUrl, |
|
base, |
|
pattern, |
|
internal, |
|
isPathMap, |
|
conditions |
|
) { |
|
if (subpath !== '' && !pattern && target[target.length - 1] !== '/') |
|
throw invalidPackageTarget(match, target, packageJsonUrl, internal, base) |
|
|
|
if (!target.startsWith('./')) { |
|
if (internal && !target.startsWith('../') && !target.startsWith('/')) { |
|
let isURL = false; |
|
|
|
try { |
|
new node_url.URL(target); |
|
isURL = true; |
|
} catch { |
|
|
|
} |
|
|
|
if (!isURL) { |
|
const exportTarget = pattern |
|
? RegExpPrototypeSymbolReplace.call( |
|
patternRegEx, |
|
target, |
|
() => subpath |
|
) |
|
: target + subpath; |
|
|
|
return packageResolve(exportTarget, packageJsonUrl, conditions) |
|
} |
|
} |
|
|
|
throw invalidPackageTarget(match, target, packageJsonUrl, internal, base) |
|
} |
|
|
|
if (invalidSegmentRegEx.exec(target.slice(2)) !== null) { |
|
if (deprecatedInvalidSegmentRegEx.exec(target.slice(2)) === null) { |
|
if (!isPathMap) { |
|
const request = pattern |
|
? match.replace('*', () => subpath) |
|
: match + subpath; |
|
const resolvedTarget = pattern |
|
? RegExpPrototypeSymbolReplace.call( |
|
patternRegEx, |
|
target, |
|
() => subpath |
|
) |
|
: target; |
|
emitInvalidSegmentDeprecation( |
|
resolvedTarget, |
|
request, |
|
match, |
|
packageJsonUrl, |
|
internal, |
|
base, |
|
true |
|
); |
|
} |
|
} else { |
|
throw invalidPackageTarget(match, target, packageJsonUrl, internal, base) |
|
} |
|
} |
|
|
|
const resolved = new node_url.URL(target, packageJsonUrl); |
|
const resolvedPath = resolved.pathname; |
|
const packagePath = new node_url.URL('.', packageJsonUrl).pathname; |
|
|
|
if (!resolvedPath.startsWith(packagePath)) |
|
throw invalidPackageTarget(match, target, packageJsonUrl, internal, base) |
|
|
|
if (subpath === '') return resolved |
|
|
|
if (invalidSegmentRegEx.exec(subpath) !== null) { |
|
const request = pattern |
|
? match.replace('*', () => subpath) |
|
: match + subpath; |
|
if (deprecatedInvalidSegmentRegEx.exec(subpath) === null) { |
|
if (!isPathMap) { |
|
const resolvedTarget = pattern |
|
? RegExpPrototypeSymbolReplace.call( |
|
patternRegEx, |
|
target, |
|
() => subpath |
|
) |
|
: target; |
|
emitInvalidSegmentDeprecation( |
|
resolvedTarget, |
|
request, |
|
match, |
|
packageJsonUrl, |
|
internal, |
|
base, |
|
false |
|
); |
|
} |
|
} else { |
|
throwInvalidSubpath(request, match, packageJsonUrl, internal, base); |
|
} |
|
} |
|
|
|
if (pattern) { |
|
return new node_url.URL( |
|
RegExpPrototypeSymbolReplace.call( |
|
patternRegEx, |
|
resolved.href, |
|
() => subpath |
|
) |
|
) |
|
} |
|
|
|
return new node_url.URL(subpath, resolved) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
function isArrayIndex(key) { |
|
const keyNumber = Number(key); |
|
if (`${keyNumber}` !== key) return false |
|
return keyNumber >= 0 && keyNumber < 0xff_ff_ff_ff |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function resolvePackageTarget( |
|
packageJsonUrl, |
|
target, |
|
subpath, |
|
packageSubpath, |
|
base, |
|
pattern, |
|
internal, |
|
isPathMap, |
|
conditions |
|
) { |
|
if (typeof target === 'string') { |
|
return resolvePackageTargetString( |
|
target, |
|
subpath, |
|
packageSubpath, |
|
packageJsonUrl, |
|
base, |
|
pattern, |
|
internal, |
|
isPathMap, |
|
conditions |
|
) |
|
} |
|
|
|
if (Array.isArray(target)) { |
|
|
|
const targetList = target; |
|
if (targetList.length === 0) return null |
|
|
|
|
|
let lastException; |
|
let i = -1; |
|
|
|
while (++i < targetList.length) { |
|
const targetItem = targetList[i]; |
|
|
|
let resolveResult; |
|
try { |
|
resolveResult = resolvePackageTarget( |
|
packageJsonUrl, |
|
targetItem, |
|
subpath, |
|
packageSubpath, |
|
base, |
|
pattern, |
|
internal, |
|
isPathMap, |
|
conditions |
|
); |
|
} catch (error) { |
|
const exception = (error); |
|
lastException = exception; |
|
if (exception.code === 'ERR_INVALID_PACKAGE_TARGET') continue |
|
throw error |
|
} |
|
|
|
if (resolveResult === undefined) continue |
|
|
|
if (resolveResult === null) { |
|
lastException = null; |
|
continue |
|
} |
|
|
|
return resolveResult |
|
} |
|
|
|
if (lastException === undefined || lastException === null) { |
|
return null |
|
} |
|
|
|
throw lastException |
|
} |
|
|
|
if (typeof target === 'object' && target !== null) { |
|
const keys = Object.getOwnPropertyNames(target); |
|
let i = -1; |
|
|
|
while (++i < keys.length) { |
|
const key = keys[i]; |
|
if (isArrayIndex(key)) { |
|
throw new ERR_INVALID_PACKAGE_CONFIG( |
|
node_url.fileURLToPath(packageJsonUrl), |
|
base, |
|
'"exports" cannot contain numeric property keys.' |
|
) |
|
} |
|
} |
|
|
|
i = -1; |
|
|
|
while (++i < keys.length) { |
|
const key = keys[i]; |
|
if (key === 'default' || (conditions && conditions.has(key))) { |
|
|
|
const conditionalTarget = (target[key]); |
|
const resolveResult = resolvePackageTarget( |
|
packageJsonUrl, |
|
conditionalTarget, |
|
subpath, |
|
packageSubpath, |
|
base, |
|
pattern, |
|
internal, |
|
isPathMap, |
|
conditions |
|
); |
|
if (resolveResult === undefined) continue |
|
return resolveResult |
|
} |
|
} |
|
|
|
return null |
|
} |
|
|
|
if (target === null) { |
|
return null |
|
} |
|
|
|
throw invalidPackageTarget( |
|
packageSubpath, |
|
target, |
|
packageJsonUrl, |
|
internal, |
|
base |
|
) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function isConditionalExportsMainSugar(exports, packageJsonUrl, base) { |
|
if (typeof exports === 'string' || Array.isArray(exports)) return true |
|
if (typeof exports !== 'object' || exports === null) return false |
|
|
|
const keys = Object.getOwnPropertyNames(exports); |
|
let isConditionalSugar = false; |
|
let i = 0; |
|
let j = -1; |
|
while (++j < keys.length) { |
|
const key = keys[j]; |
|
const curIsConditionalSugar = key === '' || key[0] !== '.'; |
|
if (i++ === 0) { |
|
isConditionalSugar = curIsConditionalSugar; |
|
} else if (isConditionalSugar !== curIsConditionalSugar) { |
|
throw new ERR_INVALID_PACKAGE_CONFIG( |
|
node_url.fileURLToPath(packageJsonUrl), |
|
base, |
|
'"exports" cannot contain some keys starting with \'.\' and some not.' + |
|
' The exports object must either be an object of package subpath keys' + |
|
' or an object of main entry condition name keys only.' |
|
) |
|
} |
|
} |
|
|
|
return isConditionalSugar |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
function emitTrailingSlashPatternDeprecation(match, pjsonUrl, base) { |
|
|
|
if (process__default.noDeprecation) { |
|
return |
|
} |
|
|
|
const pjsonPath = node_url.fileURLToPath(pjsonUrl); |
|
if (emittedPackageWarnings.has(pjsonPath + '|' + match)) return |
|
emittedPackageWarnings.add(pjsonPath + '|' + match); |
|
process__default.emitWarning( |
|
`Use of deprecated trailing slash pattern mapping "${match}" in the ` + |
|
`"exports" field module resolution of the package at ${pjsonPath}${ |
|
base ? ` imported from ${node_url.fileURLToPath(base)}` : '' |
|
}. Mapping specifiers ending in "/" is no longer supported.`, |
|
'DeprecationWarning', |
|
'DEP0155' |
|
); |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function packageExportsResolve( |
|
packageJsonUrl, |
|
packageSubpath, |
|
packageConfig, |
|
base, |
|
conditions |
|
) { |
|
let exports = packageConfig.exports; |
|
|
|
if (isConditionalExportsMainSugar(exports, packageJsonUrl, base)) { |
|
exports = {'.': exports}; |
|
} |
|
|
|
if ( |
|
own.call(exports, packageSubpath) && |
|
!packageSubpath.includes('*') && |
|
!packageSubpath.endsWith('/') |
|
) { |
|
|
|
const target = exports[packageSubpath]; |
|
const resolveResult = resolvePackageTarget( |
|
packageJsonUrl, |
|
target, |
|
'', |
|
packageSubpath, |
|
base, |
|
false, |
|
false, |
|
false, |
|
conditions |
|
); |
|
if (resolveResult === null || resolveResult === undefined) { |
|
throw exportsNotFound(packageSubpath, packageJsonUrl, base) |
|
} |
|
|
|
return resolveResult |
|
} |
|
|
|
let bestMatch = ''; |
|
let bestMatchSubpath = ''; |
|
const keys = Object.getOwnPropertyNames(exports); |
|
let i = -1; |
|
|
|
while (++i < keys.length) { |
|
const key = keys[i]; |
|
const patternIndex = key.indexOf('*'); |
|
|
|
if ( |
|
patternIndex !== -1 && |
|
packageSubpath.startsWith(key.slice(0, patternIndex)) |
|
) { |
|
|
|
|
|
|
|
|
|
|
|
|
|
if (packageSubpath.endsWith('/')) { |
|
emitTrailingSlashPatternDeprecation( |
|
packageSubpath, |
|
packageJsonUrl, |
|
base |
|
); |
|
} |
|
|
|
const patternTrailer = key.slice(patternIndex + 1); |
|
|
|
if ( |
|
packageSubpath.length >= key.length && |
|
packageSubpath.endsWith(patternTrailer) && |
|
patternKeyCompare(bestMatch, key) === 1 && |
|
key.lastIndexOf('*') === patternIndex |
|
) { |
|
bestMatch = key; |
|
bestMatchSubpath = packageSubpath.slice( |
|
patternIndex, |
|
packageSubpath.length - patternTrailer.length |
|
); |
|
} |
|
} |
|
} |
|
|
|
if (bestMatch) { |
|
|
|
const target = (exports[bestMatch]); |
|
const resolveResult = resolvePackageTarget( |
|
packageJsonUrl, |
|
target, |
|
bestMatchSubpath, |
|
bestMatch, |
|
base, |
|
true, |
|
false, |
|
packageSubpath.endsWith('/'), |
|
conditions |
|
); |
|
|
|
if (resolveResult === null || resolveResult === undefined) { |
|
throw exportsNotFound(packageSubpath, packageJsonUrl, base) |
|
} |
|
|
|
return resolveResult |
|
} |
|
|
|
throw exportsNotFound(packageSubpath, packageJsonUrl, base) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
function patternKeyCompare(a, b) { |
|
const aPatternIndex = a.indexOf('*'); |
|
const bPatternIndex = b.indexOf('*'); |
|
const baseLengthA = aPatternIndex === -1 ? a.length : aPatternIndex + 1; |
|
const baseLengthB = bPatternIndex === -1 ? b.length : bPatternIndex + 1; |
|
if (baseLengthA > baseLengthB) return -1 |
|
if (baseLengthB > baseLengthA) return 1 |
|
if (aPatternIndex === -1) return 1 |
|
if (bPatternIndex === -1) return -1 |
|
if (a.length > b.length) return -1 |
|
if (b.length > a.length) return 1 |
|
return 0 |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function packageImportsResolve(name, base, conditions) { |
|
if (name === '#' || name.startsWith('#/') || name.endsWith('/')) { |
|
const reason = 'is not a valid internal imports specifier name'; |
|
throw new ERR_INVALID_MODULE_SPECIFIER(name, reason, node_url.fileURLToPath(base)) |
|
} |
|
|
|
|
|
let packageJsonUrl; |
|
|
|
const packageConfig = getPackageScopeConfig(base); |
|
|
|
if (packageConfig.exists) { |
|
packageJsonUrl = node_url.pathToFileURL(packageConfig.pjsonPath); |
|
const imports = packageConfig.imports; |
|
if (imports) { |
|
if (own.call(imports, name) && !name.includes('*')) { |
|
const resolveResult = resolvePackageTarget( |
|
packageJsonUrl, |
|
imports[name], |
|
'', |
|
name, |
|
base, |
|
false, |
|
true, |
|
false, |
|
conditions |
|
); |
|
if (resolveResult !== null && resolveResult !== undefined) { |
|
return resolveResult |
|
} |
|
} else { |
|
let bestMatch = ''; |
|
let bestMatchSubpath = ''; |
|
const keys = Object.getOwnPropertyNames(imports); |
|
let i = -1; |
|
|
|
while (++i < keys.length) { |
|
const key = keys[i]; |
|
const patternIndex = key.indexOf('*'); |
|
|
|
if (patternIndex !== -1 && name.startsWith(key.slice(0, -1))) { |
|
const patternTrailer = key.slice(patternIndex + 1); |
|
if ( |
|
name.length >= key.length && |
|
name.endsWith(patternTrailer) && |
|
patternKeyCompare(bestMatch, key) === 1 && |
|
key.lastIndexOf('*') === patternIndex |
|
) { |
|
bestMatch = key; |
|
bestMatchSubpath = name.slice( |
|
patternIndex, |
|
name.length - patternTrailer.length |
|
); |
|
} |
|
} |
|
} |
|
|
|
if (bestMatch) { |
|
const target = imports[bestMatch]; |
|
const resolveResult = resolvePackageTarget( |
|
packageJsonUrl, |
|
target, |
|
bestMatchSubpath, |
|
bestMatch, |
|
base, |
|
true, |
|
true, |
|
false, |
|
conditions |
|
); |
|
|
|
if (resolveResult !== null && resolveResult !== undefined) { |
|
return resolveResult |
|
} |
|
} |
|
} |
|
} |
|
} |
|
|
|
throw importNotDefined(name, packageJsonUrl, base) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function parsePackageName(specifier, base) { |
|
let separatorIndex = specifier.indexOf('/'); |
|
let validPackageName = true; |
|
let isScoped = false; |
|
if (specifier[0] === '@') { |
|
isScoped = true; |
|
if (separatorIndex === -1 || specifier.length === 0) { |
|
validPackageName = false; |
|
} else { |
|
separatorIndex = specifier.indexOf('/', separatorIndex + 1); |
|
} |
|
} |
|
|
|
const packageName = |
|
separatorIndex === -1 ? specifier : specifier.slice(0, separatorIndex); |
|
|
|
|
|
|
|
if (invalidPackageNameRegEx.exec(packageName) !== null) { |
|
validPackageName = false; |
|
} |
|
|
|
if (!validPackageName) { |
|
throw new ERR_INVALID_MODULE_SPECIFIER( |
|
specifier, |
|
'is not a valid package name', |
|
node_url.fileURLToPath(base) |
|
) |
|
} |
|
|
|
const packageSubpath = |
|
'.' + (separatorIndex === -1 ? '' : specifier.slice(separatorIndex)); |
|
|
|
return {packageName, packageSubpath, isScoped} |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function packageResolve(specifier, base, conditions) { |
|
if (node_module.builtinModules.includes(specifier)) { |
|
return new node_url.URL('node:' + specifier) |
|
} |
|
|
|
const {packageName, packageSubpath, isScoped} = parsePackageName( |
|
specifier, |
|
base |
|
); |
|
|
|
|
|
const packageConfig = getPackageScopeConfig(base); |
|
|
|
|
|
|
|
if (packageConfig.exists) { |
|
const packageJsonUrl = node_url.pathToFileURL(packageConfig.pjsonPath); |
|
if ( |
|
packageConfig.name === packageName && |
|
packageConfig.exports !== undefined && |
|
packageConfig.exports !== null |
|
) { |
|
return packageExportsResolve( |
|
packageJsonUrl, |
|
packageSubpath, |
|
packageConfig, |
|
base, |
|
conditions |
|
) |
|
} |
|
} |
|
|
|
let packageJsonUrl = new node_url.URL( |
|
'./node_modules/' + packageName + '/package.json', |
|
base |
|
); |
|
let packageJsonPath = node_url.fileURLToPath(packageJsonUrl); |
|
|
|
let lastPath; |
|
do { |
|
const stat = tryStatSync(packageJsonPath.slice(0, -13)); |
|
if (!stat.isDirectory()) { |
|
lastPath = packageJsonPath; |
|
packageJsonUrl = new node_url.URL( |
|
(isScoped ? '../../../../node_modules/' : '../../../node_modules/') + |
|
packageName + |
|
'/package.json', |
|
packageJsonUrl |
|
); |
|
packageJsonPath = node_url.fileURLToPath(packageJsonUrl); |
|
continue |
|
} |
|
|
|
|
|
const packageConfig = packageJsonReader.read(packageJsonPath, { |
|
base, |
|
specifier |
|
}); |
|
if (packageConfig.exports !== undefined && packageConfig.exports !== null) { |
|
return packageExportsResolve( |
|
packageJsonUrl, |
|
packageSubpath, |
|
packageConfig, |
|
base, |
|
conditions |
|
) |
|
} |
|
|
|
if (packageSubpath === '.') { |
|
return legacyMainResolve(packageJsonUrl, packageConfig, base) |
|
} |
|
|
|
return new node_url.URL(packageSubpath, packageJsonUrl) |
|
|
|
} while (packageJsonPath.length !== lastPath.length) |
|
|
|
throw new ERR_MODULE_NOT_FOUND(packageName, node_url.fileURLToPath(base), false) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
function isRelativeSpecifier(specifier) { |
|
if (specifier[0] === '.') { |
|
if (specifier.length === 1 || specifier[1] === '/') return true |
|
if ( |
|
specifier[1] === '.' && |
|
(specifier.length === 2 || specifier[2] === '/') |
|
) { |
|
return true |
|
} |
|
} |
|
|
|
return false |
|
} |
|
|
|
|
|
|
|
|
|
|
|
function shouldBeTreatedAsRelativeOrAbsolutePath(specifier) { |
|
if (specifier === '') return false |
|
if (specifier[0] === '/') return true |
|
return isRelativeSpecifier(specifier) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function moduleResolve(specifier, base, conditions, preserveSymlinks) { |
|
const protocol = base.protocol; |
|
const isRemote = protocol === 'http:' || protocol === 'https:'; |
|
|
|
|
|
|
|
let resolved; |
|
|
|
if (shouldBeTreatedAsRelativeOrAbsolutePath(specifier)) { |
|
resolved = new node_url.URL(specifier, base); |
|
} else if (!isRemote && specifier[0] === '#') { |
|
resolved = packageImportsResolve(specifier, base, conditions); |
|
} else { |
|
try { |
|
resolved = new node_url.URL(specifier); |
|
} catch { |
|
if (!isRemote) { |
|
resolved = packageResolve(specifier, base, conditions); |
|
} |
|
} |
|
} |
|
|
|
assert__default(resolved !== undefined, 'expected to be defined'); |
|
|
|
if (resolved.protocol !== 'file:') { |
|
return resolved |
|
} |
|
|
|
return finalizeResolution(resolved, base, preserveSymlinks) |
|
} |
|
|
|
function fileURLToPath(id) { |
|
if (typeof id === "string" && !id.startsWith("file://")) { |
|
return normalizeSlash(id); |
|
} |
|
return normalizeSlash(node_url.fileURLToPath(id)); |
|
} |
|
function pathToFileURL(id) { |
|
return node_url.pathToFileURL(fileURLToPath(id)).toString(); |
|
} |
|
const INVALID_CHAR_RE = /[\u0000-\u001F"#$&*+,/:;<=>?@[\]^`{|}\u007F]+/g; |
|
function sanitizeURIComponent(name = "", replacement = "_") { |
|
return name.replace(INVALID_CHAR_RE, replacement).replace(/%../g, replacement); |
|
} |
|
function sanitizeFilePath(filePath = "") { |
|
return filePath.replace(/\?.*$/, "").split(/[/\\]/g).map((p) => sanitizeURIComponent(p)).join("/").replace(/^([A-Za-z])_\//, "$1:/"); |
|
} |
|
function normalizeid(id) { |
|
if (typeof id !== "string") { |
|
id = id.toString(); |
|
} |
|
if (/(node|data|http|https|file):/.test(id)) { |
|
return id; |
|
} |
|
if (BUILTIN_MODULES.has(id)) { |
|
return "node:" + id; |
|
} |
|
return "file://" + encodeURI(normalizeSlash(id)); |
|
} |
|
async function loadURL(url) { |
|
const code = await fs.promises.readFile(fileURLToPath(url), "utf8"); |
|
return code; |
|
} |
|
function toDataURL(code) { |
|
const base64 = Buffer.from(code).toString("base64"); |
|
return `data:text/javascript;base64,${base64}`; |
|
} |
|
function isNodeBuiltin(id = "") { |
|
id = id.replace(/^node:/, "").split("/")[0]; |
|
return BUILTIN_MODULES.has(id); |
|
} |
|
const ProtocolRegex = /^(?<proto>.{2,}?):.+$/; |
|
function getProtocol(id) { |
|
const proto = id.match(ProtocolRegex); |
|
return proto ? proto.groups?.proto : void 0; |
|
} |
|
|
|
const DEFAULT_CONDITIONS_SET = new Set(["node", "import"]); |
|
const DEFAULT_EXTENSIONS = [".mjs", ".cjs", ".js", ".json"]; |
|
const NOT_FOUND_ERRORS = new Set([ |
|
"ERR_MODULE_NOT_FOUND", |
|
"ERR_UNSUPPORTED_DIR_IMPORT", |
|
"MODULE_NOT_FOUND", |
|
"ERR_PACKAGE_PATH_NOT_EXPORTED" |
|
]); |
|
function _tryModuleResolve(id, url, conditions) { |
|
try { |
|
return moduleResolve(id, url, conditions); |
|
} catch (error) { |
|
if (!NOT_FOUND_ERRORS.has(error?.code)) { |
|
throw error; |
|
} |
|
} |
|
} |
|
function _resolve(id, options = {}) { |
|
if (typeof id !== "string") { |
|
if (id instanceof URL) { |
|
id = fileURLToPath(id); |
|
} else { |
|
throw new TypeError("input must be a `string` or `URL`"); |
|
} |
|
} |
|
if (/(node|data|http|https):/.test(id)) { |
|
return id; |
|
} |
|
if (BUILTIN_MODULES.has(id)) { |
|
return "node:" + id; |
|
} |
|
if (id.startsWith("file://")) { |
|
id = fileURLToPath(id); |
|
} |
|
if (pathe.isAbsolute(id)) { |
|
try { |
|
const stat = fs.statSync(id); |
|
if (stat.isFile()) { |
|
return pathToFileURL(id); |
|
} |
|
} catch (error) { |
|
if (error?.code !== "ENOENT") { |
|
throw error; |
|
} |
|
} |
|
} |
|
const conditionsSet = options.conditions ? new Set(options.conditions) : DEFAULT_CONDITIONS_SET; |
|
const _urls = (Array.isArray(options.url) ? options.url : [options.url]).filter(Boolean).map((url) => new URL(normalizeid(url.toString()))); |
|
if (_urls.length === 0) { |
|
_urls.push(new URL(pathToFileURL(process.cwd()))); |
|
} |
|
const urls = [..._urls]; |
|
for (const url of _urls) { |
|
if (url.protocol === "file:") { |
|
urls.push( |
|
new URL("./", url), |
|
|
|
new URL(ufo.joinURL(url.pathname, "_index.js"), url), |
|
|
|
new URL("node_modules", url) |
|
); |
|
} |
|
} |
|
let resolved; |
|
for (const url of urls) { |
|
resolved = _tryModuleResolve(id, url, conditionsSet); |
|
if (resolved) { |
|
break; |
|
} |
|
for (const prefix of ["", "/index"]) { |
|
for (const extension of options.extensions || DEFAULT_EXTENSIONS) { |
|
resolved = _tryModuleResolve( |
|
id + prefix + extension, |
|
url, |
|
conditionsSet |
|
); |
|
if (resolved) { |
|
break; |
|
} |
|
} |
|
if (resolved) { |
|
break; |
|
} |
|
} |
|
if (resolved) { |
|
break; |
|
} |
|
} |
|
if (!resolved) { |
|
const error = new Error( |
|
`Cannot find module ${id} imported from ${urls.join(", ")}` |
|
); |
|
error.code = "ERR_MODULE_NOT_FOUND"; |
|
throw error; |
|
} |
|
return pathToFileURL(resolved); |
|
} |
|
function resolveSync(id, options) { |
|
return _resolve(id, options); |
|
} |
|
function resolve(id, options) { |
|
try { |
|
return Promise.resolve(resolveSync(id, options)); |
|
} catch (error) { |
|
return Promise.reject(error); |
|
} |
|
} |
|
function resolvePathSync(id, options) { |
|
return fileURLToPath(resolveSync(id, options)); |
|
} |
|
function resolvePath(id, options) { |
|
try { |
|
return Promise.resolve(resolvePathSync(id, options)); |
|
} catch (error) { |
|
return Promise.reject(error); |
|
} |
|
} |
|
function createResolve(defaults) { |
|
return (id, url) => { |
|
return resolve(id, { url, ...defaults }); |
|
}; |
|
} |
|
const NODE_MODULES_RE = /^(.+\/node_modules\/)([^/@]+|@[^/]+\/[^/]+)(\/?.*?)?$/; |
|
function parseNodeModulePath(path) { |
|
if (!path) { |
|
return {}; |
|
} |
|
path = pathe.normalize(fileURLToPath(path)); |
|
const match = NODE_MODULES_RE.exec(path); |
|
if (!match) { |
|
return {}; |
|
} |
|
const [, dir, name, subpath] = match; |
|
return { |
|
dir, |
|
name, |
|
subpath: subpath ? `.${subpath}` : void 0 |
|
}; |
|
} |
|
async function lookupNodeModuleSubpath(path) { |
|
path = pathe.normalize(fileURLToPath(path)); |
|
const { name, subpath } = parseNodeModulePath(path); |
|
if (!name || !subpath) { |
|
return subpath; |
|
} |
|
const { exports } = await pkgTypes.readPackageJSON(path).catch(() => { |
|
}) || {}; |
|
if (exports) { |
|
const resolvedSubpath = _findSubpath(subpath, exports); |
|
if (resolvedSubpath) { |
|
return resolvedSubpath; |
|
} |
|
} |
|
return subpath; |
|
} |
|
function _findSubpath(subpath, exports) { |
|
if (typeof exports === "string") { |
|
exports = { ".": exports }; |
|
} |
|
if (!subpath.startsWith(".")) { |
|
subpath = subpath.startsWith("/") ? `.${subpath}` : `./${subpath}`; |
|
} |
|
if (subpath in (exports || {})) { |
|
return subpath; |
|
} |
|
return _flattenExports(exports).find((p) => p.fsPath === subpath)?.subpath; |
|
} |
|
function _flattenExports(exports = {}, parentSubpath = "./") { |
|
return Object.entries(exports).flatMap(([key, value]) => { |
|
const [subpath, condition] = key.startsWith(".") ? [key.slice(1), void 0] : ["", key]; |
|
const _subPath = ufo.joinURL(parentSubpath, subpath); |
|
if (typeof value === "string") { |
|
return [{ subpath: _subPath, fsPath: value, condition }]; |
|
} else { |
|
return _flattenExports(value, _subPath); |
|
} |
|
}); |
|
} |
|
|
|
const ESM_STATIC_IMPORT_RE = /(?<=\s|^|;|\})import\s*([\s"']*(?<imports>[\p{L}\p{M}\w\t\n\r $*,/{}@.]+)from\s*)?["']\s*(?<specifier>(?<="\s*)[^"]*[^\s"](?=\s*")|(?<='\s*)[^']*[^\s'](?=\s*'))\s*["'][\s;]*/gmu; |
|
const DYNAMIC_IMPORT_RE = /import\s*\((?<expression>(?:[^()]+|\((?:[^()]+|\([^()]*\))*\))*)\)/gm; |
|
const IMPORT_NAMED_TYPE_RE = /(?<=\s|^|;|})import\s*type\s+([\s"']*(?<imports>[\w\t\n\r $*,/{}]+)from\s*)?["']\s*(?<specifier>(?<="\s*)[^"]*[^\s"](?=\s*")|(?<='\s*)[^']*[^\s'](?=\s*'))\s*["'][\s;]*/gm; |
|
const EXPORT_DECAL_RE = /\bexport\s+(?<declaration>(async function\s*\*?|function\s*\*?|let|const enum|const|enum|var|class))\s+\*?(?<name>[\w$]+)(?<extraNames>.*,\s*[\w$]+)*/g; |
|
const EXPORT_DECAL_TYPE_RE = /\bexport\s+(?<declaration>(interface|type|declare (async function|function|let|const enum|const|enum|var|class)))\s+(?<name>[\w$]+)/g; |
|
const EXPORT_NAMED_RE = /\bexport\s+{(?<exports>[^}]+?)[\s,]*}(\s*from\s*["']\s*(?<specifier>(?<="\s*)[^"]*[^\s"](?=\s*")|(?<='\s*)[^']*[^\s'](?=\s*'))\s*["'][^\n;]*)?/g; |
|
const EXPORT_NAMED_TYPE_RE = /\bexport\s+type\s+{(?<exports>[^}]+?)[\s,]*}(\s*from\s*["']\s*(?<specifier>(?<="\s*)[^"]*[^\s"](?=\s*")|(?<='\s*)[^']*[^\s'](?=\s*'))\s*["'][^\n;]*)?/g; |
|
const EXPORT_NAMED_DESTRUCT = /\bexport\s+(let|var|const)\s+(?:{(?<exports1>[^}]+?)[\s,]*}|\[(?<exports2>[^\]]+?)[\s,]*])\s+=/gm; |
|
const EXPORT_STAR_RE = /\bexport\s*(\*)(\s*as\s+(?<name>[\w$]+)\s+)?\s*(\s*from\s*["']\s*(?<specifier>(?<="\s*)[^"]*[^\s"](?=\s*")|(?<='\s*)[^']*[^\s'](?=\s*'))\s*["'][^\n;]*)?/g; |
|
const EXPORT_DEFAULT_RE = /\bexport\s+default\s+(async function|function|class|true|false|\W|\d)|\bexport\s+default\s+(?<defaultName>.*)/g; |
|
const TYPE_RE = /^\s*?type\s/; |
|
function findStaticImports(code) { |
|
return _filterStatement( |
|
_tryGetLocations(code, "import"), |
|
matchAll(ESM_STATIC_IMPORT_RE, code, { type: "static" }) |
|
); |
|
} |
|
function findDynamicImports(code) { |
|
return _filterStatement( |
|
_tryGetLocations(code, "import"), |
|
matchAll(DYNAMIC_IMPORT_RE, code, { type: "dynamic" }) |
|
); |
|
} |
|
function findTypeImports(code) { |
|
return [ |
|
...matchAll(IMPORT_NAMED_TYPE_RE, code, { type: "type" }), |
|
...matchAll(ESM_STATIC_IMPORT_RE, code, { type: "static" }).filter( |
|
(match) => /[^A-Za-z]type\s/.test(match.imports) |
|
) |
|
]; |
|
} |
|
function parseStaticImport(matched) { |
|
const cleanedImports = clearImports(matched.imports); |
|
const namedImports = {}; |
|
const _matches = cleanedImports.match(/{([^}]*)}/)?.[1]?.split(",") || []; |
|
for (const namedImport of _matches) { |
|
const _match = namedImport.match(/^\s*(\S*) as (\S*)\s*$/); |
|
const source = _match?.[1] || namedImport.trim(); |
|
const importName = _match?.[2] || source; |
|
if (source && !TYPE_RE.test(source)) { |
|
namedImports[source] = importName; |
|
} |
|
} |
|
const { namespacedImport, defaultImport } = getImportNames(cleanedImports); |
|
return { |
|
...matched, |
|
defaultImport, |
|
namespacedImport, |
|
namedImports |
|
}; |
|
} |
|
function parseTypeImport(matched) { |
|
if (matched.type === "type") { |
|
return parseStaticImport(matched); |
|
} |
|
const cleanedImports = clearImports(matched.imports); |
|
const namedImports = {}; |
|
const _matches = cleanedImports.match(/{([^}]*)}/)?.[1]?.split(",") || []; |
|
for (const namedImport of _matches) { |
|
const _match = /\s+as\s+/.test(namedImport) ? namedImport.match(/^\s*type\s+(\S*) as (\S*)\s*$/) : namedImport.match(/^\s*type\s+(\S*)\s*$/); |
|
const source = _match?.[1] || namedImport.trim(); |
|
const importName = _match?.[2] || source; |
|
if (source && TYPE_RE.test(namedImport)) { |
|
namedImports[source] = importName; |
|
} |
|
} |
|
const { namespacedImport, defaultImport } = getImportNames(cleanedImports); |
|
return { |
|
...matched, |
|
defaultImport, |
|
namespacedImport, |
|
namedImports |
|
}; |
|
} |
|
function findExports(code) { |
|
const declaredExports = matchAll(EXPORT_DECAL_RE, code, { |
|
type: "declaration" |
|
}); |
|
for (const declaredExport of declaredExports) { |
|
const extraNamesStr = declaredExport.extraNames; |
|
if (extraNamesStr) { |
|
const extraNames = matchAll(/,\s*(?<name>\w+)/g, extraNamesStr, {}).map( |
|
(m) => m.name |
|
); |
|
declaredExport.names = [declaredExport.name, ...extraNames]; |
|
} |
|
delete declaredExport.extraNames; |
|
} |
|
const namedExports = normalizeNamedExports( |
|
matchAll(EXPORT_NAMED_RE, code, { |
|
type: "named" |
|
}) |
|
); |
|
const destructuredExports = matchAll( |
|
EXPORT_NAMED_DESTRUCT, |
|
code, |
|
{ type: "named" } |
|
); |
|
for (const namedExport of destructuredExports) { |
|
namedExport.exports = namedExport.exports1 || namedExport.exports2; |
|
namedExport.names = namedExport.exports.replace(/^\r?\n?/, "").split(/\s*,\s*/g).filter((name) => !TYPE_RE.test(name)).map( |
|
(name) => name.replace(/^.*?\s*:\s*/, "").replace(/\s*=\s*.*$/, "").trim() |
|
); |
|
} |
|
const defaultExport = matchAll(EXPORT_DEFAULT_RE, code, { |
|
type: "default", |
|
name: "default" |
|
}); |
|
const starExports = matchAll(EXPORT_STAR_RE, code, { |
|
type: "star" |
|
}); |
|
const exports = normalizeExports([ |
|
...declaredExports, |
|
...namedExports, |
|
...destructuredExports, |
|
...defaultExport, |
|
...starExports |
|
]); |
|
if (exports.length === 0) { |
|
return []; |
|
} |
|
const exportLocations = _tryGetLocations(code, "export"); |
|
if (exportLocations && exportLocations.length === 0) { |
|
return []; |
|
} |
|
return ( |
|
|
|
_filterStatement(exportLocations, exports).filter((exp, index, exports2) => { |
|
const nextExport = exports2[index + 1]; |
|
return !nextExport || exp.type !== nextExport.type || !exp.name || exp.name !== nextExport.name; |
|
}) |
|
); |
|
} |
|
function findTypeExports(code) { |
|
const declaredExports = matchAll( |
|
EXPORT_DECAL_TYPE_RE, |
|
code, |
|
{ type: "declaration" } |
|
); |
|
const namedExports = normalizeNamedExports( |
|
matchAll(EXPORT_NAMED_TYPE_RE, code, { |
|
type: "named" |
|
}) |
|
); |
|
const exports = normalizeExports([ |
|
...declaredExports, |
|
...namedExports |
|
]); |
|
if (exports.length === 0) { |
|
return []; |
|
} |
|
const exportLocations = _tryGetLocations(code, "export"); |
|
if (exportLocations && exportLocations.length === 0) { |
|
return []; |
|
} |
|
return ( |
|
|
|
_filterStatement(exportLocations, exports).filter((exp, index, exports2) => { |
|
const nextExport = exports2[index + 1]; |
|
return !nextExport || exp.type !== nextExport.type || !exp.name || exp.name !== nextExport.name; |
|
}) |
|
); |
|
} |
|
function normalizeExports(exports) { |
|
for (const exp of exports) { |
|
if (!exp.name && exp.names && exp.names.length === 1) { |
|
exp.name = exp.names[0]; |
|
} |
|
if (exp.name === "default" && exp.type !== "default") { |
|
exp._type = exp.type; |
|
exp.type = "default"; |
|
} |
|
if (!exp.names && exp.name) { |
|
exp.names = [exp.name]; |
|
} |
|
if (exp.type === "declaration" && exp.declaration) { |
|
exp.declarationType = exp.declaration.replace( |
|
/^declare\s*/, |
|
"" |
|
); |
|
} |
|
} |
|
return exports; |
|
} |
|
function normalizeNamedExports(namedExports) { |
|
for (const namedExport of namedExports) { |
|
namedExport.names = namedExport.exports.replace(/^\r?\n?/, "").split(/\s*,\s*/g).filter((name) => !TYPE_RE.test(name)).map((name) => name.replace(/^.*?\sas\s/, "").trim()); |
|
} |
|
return namedExports; |
|
} |
|
function findExportNames(code) { |
|
return findExports(code).flatMap((exp) => exp.names).filter(Boolean); |
|
} |
|
async function resolveModuleExportNames(id, options) { |
|
const url = await resolvePath(id, options); |
|
const code = await loadURL(url); |
|
const exports = findExports(code); |
|
const exportNames = new Set( |
|
exports.flatMap((exp) => exp.names).filter(Boolean) |
|
); |
|
for (const exp of exports) { |
|
if (exp.type !== "star" || !exp.specifier) { |
|
continue; |
|
} |
|
const subExports = await resolveModuleExportNames(exp.specifier, { |
|
...options, |
|
url |
|
}); |
|
for (const subExport of subExports) { |
|
exportNames.add(subExport); |
|
} |
|
} |
|
return [...exportNames]; |
|
} |
|
function _filterStatement(locations, statements) { |
|
return statements.filter((exp) => { |
|
return !locations || locations.some((location) => { |
|
return exp.start <= location.start && exp.end >= location.end; |
|
}); |
|
}); |
|
} |
|
function _tryGetLocations(code, label) { |
|
try { |
|
return _getLocations(code, label); |
|
} catch { |
|
} |
|
} |
|
function _getLocations(code, label) { |
|
const tokens = acorn.tokenizer(code, { |
|
ecmaVersion: "latest", |
|
sourceType: "module", |
|
allowHashBang: true, |
|
allowAwaitOutsideFunction: true, |
|
allowImportExportEverywhere: true |
|
}); |
|
const locations = []; |
|
for (const token of tokens) { |
|
if (token.type.label === label) { |
|
locations.push({ |
|
start: token.start, |
|
end: token.end |
|
}); |
|
} |
|
} |
|
return locations; |
|
} |
|
|
|
function createCommonJS(url) { |
|
const __filename = fileURLToPath(url); |
|
const __dirname = path.dirname(__filename); |
|
let _nativeRequire; |
|
const getNativeRequire = () => { |
|
if (!_nativeRequire) { |
|
_nativeRequire = node_module.createRequire(url); |
|
} |
|
return _nativeRequire; |
|
}; |
|
function require(id) { |
|
return getNativeRequire()(id); |
|
} |
|
require.resolve = function requireResolve(id, options) { |
|
return getNativeRequire().resolve(id, options); |
|
}; |
|
return { |
|
__filename, |
|
__dirname, |
|
require |
|
}; |
|
} |
|
function interopDefault(sourceModule, opts = {}) { |
|
if (!isObject(sourceModule) || !("default" in sourceModule)) { |
|
return sourceModule; |
|
} |
|
const defaultValue = sourceModule.default; |
|
if (defaultValue === void 0 || defaultValue === null) { |
|
return sourceModule; |
|
} |
|
if (typeof defaultValue !== "object") { |
|
return opts.preferNamespace ? sourceModule : defaultValue; |
|
} |
|
for (const key in sourceModule) { |
|
if (key === "default") { |
|
try { |
|
if (!(key in defaultValue)) { |
|
Object.defineProperty(defaultValue, key, { |
|
enumerable: false, |
|
configurable: false, |
|
get() { |
|
return defaultValue; |
|
} |
|
}); |
|
} |
|
} catch { |
|
} |
|
} else { |
|
try { |
|
if (!(key in defaultValue)) { |
|
Object.defineProperty(defaultValue, key, { |
|
enumerable: true, |
|
configurable: true, |
|
get() { |
|
return sourceModule[key]; |
|
} |
|
}); |
|
} |
|
} catch { |
|
} |
|
} |
|
} |
|
return defaultValue; |
|
} |
|
|
|
const EVAL_ESM_IMPORT_RE = /(?<=import .* from ["'])([^"']+)(?=["'])|(?<=export .* from ["'])([^"']+)(?=["'])|(?<=import\s*["'])([^"']+)(?=["'])|(?<=import\s*\(["'])([^"']+)(?=["']\))/g; |
|
async function loadModule(id, options = {}) { |
|
const url = await resolve(id, options); |
|
const code = await loadURL(url); |
|
return evalModule(code, { ...options, url }); |
|
} |
|
async function evalModule(code, options = {}) { |
|
const transformed = await transformModule(code, options); |
|
const dataURL = toDataURL(transformed); |
|
return import(dataURL).catch((error) => { |
|
error.stack = error.stack.replace( |
|
new RegExp(dataURL, "g"), |
|
options.url || "_mlly_eval_" |
|
); |
|
throw error; |
|
}); |
|
} |
|
function transformModule(code, options = {}) { |
|
if (options.url && options.url.endsWith(".json")) { |
|
return Promise.resolve("export default " + code); |
|
} |
|
if (options.url) { |
|
code = code.replace(/import\.meta\.url/g, `'${options.url}'`); |
|
} |
|
return Promise.resolve(code); |
|
} |
|
async function resolveImports(code, options) { |
|
const imports = [...code.matchAll(EVAL_ESM_IMPORT_RE)].map((m) => m[0]); |
|
if (imports.length === 0) { |
|
return code; |
|
} |
|
const uniqueImports = [...new Set(imports)]; |
|
const resolved = new Map(); |
|
await Promise.all( |
|
uniqueImports.map(async (id) => { |
|
let url = await resolve(id, options); |
|
if (url.endsWith(".json")) { |
|
const code2 = await loadURL(url); |
|
url = toDataURL(await transformModule(code2, { url })); |
|
} |
|
resolved.set(id, url); |
|
}) |
|
); |
|
const re = new RegExp( |
|
uniqueImports.map((index) => `(${index})`).join("|"), |
|
"g" |
|
); |
|
return code.replace(re, (id) => resolved.get(id)); |
|
} |
|
|
|
const ESM_RE = /([\s;]|^)(import[\s\w*,{}]*from|import\s*["'*{]|export\b\s*(?:[*{]|default|class|type|function|const|var|let|async function)|import\.meta\b)/m; |
|
const CJS_RE = /([\s;]|^)(module.exports\b|exports\.\w|require\s*\(|global\.\w)/m; |
|
const COMMENT_RE = /\/\*.+?\*\/|\/\/.*(?=[nr])/g; |
|
const BUILTIN_EXTENSIONS = new Set([".mjs", ".cjs", ".node", ".wasm"]); |
|
function hasESMSyntax(code, opts = {}) { |
|
if (opts.stripComments) { |
|
code = code.replace(COMMENT_RE, ""); |
|
} |
|
return ESM_RE.test(code); |
|
} |
|
function hasCJSSyntax(code, opts = {}) { |
|
if (opts.stripComments) { |
|
code = code.replace(COMMENT_RE, ""); |
|
} |
|
return CJS_RE.test(code); |
|
} |
|
function detectSyntax(code, opts = {}) { |
|
if (opts.stripComments) { |
|
code = code.replace(COMMENT_RE, ""); |
|
} |
|
const hasESM = hasESMSyntax(code, {}); |
|
const hasCJS = hasCJSSyntax(code, {}); |
|
return { |
|
hasESM, |
|
hasCJS, |
|
isMixed: hasESM && hasCJS |
|
}; |
|
} |
|
const validNodeImportDefaults = { |
|
allowedProtocols: ["node", "file", "data"] |
|
}; |
|
async function isValidNodeImport(id, _options = {}) { |
|
if (isNodeBuiltin(id)) { |
|
return true; |
|
} |
|
const options = { ...validNodeImportDefaults, ..._options }; |
|
const proto = getProtocol(id); |
|
if (proto && !options.allowedProtocols?.includes(proto)) { |
|
return false; |
|
} |
|
if (proto === "data") { |
|
return true; |
|
} |
|
const resolvedPath = await resolvePath(id, options); |
|
const extension = pathe.extname(resolvedPath); |
|
if (BUILTIN_EXTENSIONS.has(extension)) { |
|
return true; |
|
} |
|
if (extension !== ".js") { |
|
return false; |
|
} |
|
const package_ = await pkgTypes.readPackageJSON(resolvedPath).catch(() => { |
|
}); |
|
if (package_?.type === "module") { |
|
return true; |
|
} |
|
if (/\.(\w+-)?esm?(-\w+)?\.js$|\/(esm?)\//.test(resolvedPath)) { |
|
return false; |
|
} |
|
const code = options.code || await fs.promises.readFile(resolvedPath, "utf8").catch(() => { |
|
}) || ""; |
|
return !hasESMSyntax(code); |
|
} |
|
|
|
exports.DYNAMIC_IMPORT_RE = DYNAMIC_IMPORT_RE; |
|
exports.ESM_STATIC_IMPORT_RE = ESM_STATIC_IMPORT_RE; |
|
exports.EXPORT_DECAL_RE = EXPORT_DECAL_RE; |
|
exports.EXPORT_DECAL_TYPE_RE = EXPORT_DECAL_TYPE_RE; |
|
exports.createCommonJS = createCommonJS; |
|
exports.createResolve = createResolve; |
|
exports.detectSyntax = detectSyntax; |
|
exports.evalModule = evalModule; |
|
exports.fileURLToPath = fileURLToPath; |
|
exports.findDynamicImports = findDynamicImports; |
|
exports.findExportNames = findExportNames; |
|
exports.findExports = findExports; |
|
exports.findStaticImports = findStaticImports; |
|
exports.findTypeExports = findTypeExports; |
|
exports.findTypeImports = findTypeImports; |
|
exports.getProtocol = getProtocol; |
|
exports.hasCJSSyntax = hasCJSSyntax; |
|
exports.hasESMSyntax = hasESMSyntax; |
|
exports.interopDefault = interopDefault; |
|
exports.isNodeBuiltin = isNodeBuiltin; |
|
exports.isValidNodeImport = isValidNodeImport; |
|
exports.loadModule = loadModule; |
|
exports.loadURL = loadURL; |
|
exports.lookupNodeModuleSubpath = lookupNodeModuleSubpath; |
|
exports.normalizeid = normalizeid; |
|
exports.parseNodeModulePath = parseNodeModulePath; |
|
exports.parseStaticImport = parseStaticImport; |
|
exports.parseTypeImport = parseTypeImport; |
|
exports.pathToFileURL = pathToFileURL; |
|
exports.resolve = resolve; |
|
exports.resolveImports = resolveImports; |
|
exports.resolveModuleExportNames = resolveModuleExportNames; |
|
exports.resolvePath = resolvePath; |
|
exports.resolvePathSync = resolvePathSync; |
|
exports.resolveSync = resolveSync; |
|
exports.sanitizeFilePath = sanitizeFilePath; |
|
exports.sanitizeURIComponent = sanitizeURIComponent; |
|
exports.toDataURL = toDataURL; |
|
exports.transformModule = transformModule; |
|
|