diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/lib/index.js new file mode 100644 index 0000000000000000000000000000000000000000..5e40f48882cad4e622a9f78cb7d80619de9793c3 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/lib/index.js @@ -0,0 +1,189 @@ +const localeCompare = require('@isaacs/string-locale-compare')('en') +const { join, basename, resolve } = require('path') +const transformHTML = require('./transform-html.js') +const { version } = require('../../lib/npm.js') +const { aliases } = require('../../lib/utils/cmd-list') +const { shorthands, definitions } = require('@npmcli/config/lib/definitions') + +const DOC_EXT = '.md' + +const TAGS = { + CONFIG: '', + USAGE: '', + SHORTHANDS: '', +} + +const assertPlaceholder = (src, path, placeholder) => { + if (!src.includes(placeholder)) { + throw new Error( + `Cannot replace ${placeholder} in ${path} due to missing placeholder` + ) + } + return placeholder +} + +const getCommandByDoc = (docFile, docExt) => { + // Grab the command name from the *.md filename + // NOTE: We cannot use the name property command file because in the case of + // `npx` the file being used is `lib/commands/exec.js` + const name = basename(docFile, docExt).replace('npm-', '') + + if (name === 'npm') { + return { + name, + params: null, + usage: 'npm', + } + } + + // special case for `npx`: + // `npx` is not technically a command in and of itself, + // so it just needs the usage of npm exec + const srcName = name === 'npx' ? 'exec' : name + const { params, usage = [''], workspaces } = require(`../../lib/commands/${srcName}`) + const usagePrefix = name === 'npx' ? 'npx' : `npm ${name}` + if (params) { + for (const param of params) { + if (definitions[param].exclusive) { + for (const e of definitions[param].exclusive) { + if (!params.includes(e)) { + params.splice(params.indexOf(param) + 1, 0, e) + } + } + } + } + } + + return { + name, + workspaces, + params: name === 'npx' ? null : params, + usage: usage.map(u => `${usagePrefix} ${u}`.trim()).join('\n'), + } +} + +const replaceVersion = (src) => src.replace(/@VERSION@/g, version) + +const replaceUsage = (src, { path }) => { + const replacer = assertPlaceholder(src, path, TAGS.USAGE) + const { usage, name, workspaces } = getCommandByDoc(path, DOC_EXT) + + const synopsis = ['```bash', usage] + + const cmdAliases = Object.keys(aliases).reduce((p, c) => { + if (aliases[c] === name) { + p.push(c) + } + return p + }, []) + + if (cmdAliases.length === 1) { + synopsis.push('', `alias: ${cmdAliases[0]}`) + } else if (cmdAliases.length > 1) { + synopsis.push('', `aliases: ${cmdAliases.join(', ')}`) + } + + synopsis.push('```') + + if (!workspaces) { + synopsis.push('', 'Note: This command is unaware of workspaces.') + } + + return src.replace(replacer, synopsis.join('\n')) +} + +const replaceParams = (src, { path }) => { + const { params } = getCommandByDoc(path, DOC_EXT) + const replacer = params && assertPlaceholder(src, path, TAGS.CONFIG) + + if (!params) { + return src + } + + const paramsConfig = params.map((n) => definitions[n].describe()) + + return src.replace(replacer, paramsConfig.join('\n\n')) +} + +const replaceConfig = (src, { path }) => { + const replacer = assertPlaceholder(src, path, TAGS.CONFIG) + + // sort not-deprecated ones to the top + /* istanbul ignore next - typically already sorted in the definitions file, + * but this is here so that our help doc will stay consistent if we decide + * to move them around. */ + const sort = ([keya, { deprecated: depa }], [keyb, { deprecated: depb }]) => { + return depa && !depb ? 1 + : !depa && depb ? -1 + : localeCompare(keya, keyb) + } + + const allConfig = Object.entries(definitions).sort(sort) + .map(([, def]) => def.describe()) + .join('\n\n') + + return src.replace(replacer, allConfig) +} + +const replaceShorthands = (src, { path }) => { + const replacer = assertPlaceholder(src, path, TAGS.SHORTHANDS) + + const sh = Object.entries(shorthands) + .sort(([shorta, expansiona], [shortb, expansionb]) => + // sort by what they're short FOR + localeCompare(expansiona.join(' '), expansionb.join(' ')) || localeCompare(shorta, shortb) + ) + .map(([short, expansion]) => { + // XXX: this is incorrect. we have multicharacter flags like `-iwr` that + // can only be set with a single dash + const dash = short.length === 1 ? '-' : '--' + return `* \`${dash}${short}\`: \`${expansion.join(' ')}\`` + }) + + return src.replace(replacer, sh.join('\n')) +} + +const replaceHelpLinks = (src) => { + // replaces markdown links with equivalent-ish npm help commands + return src.replace( + /\[`?([\w\s-]+)`?\]\(\/(?:commands|configuring-npm|using-npm)\/(?:[\w\s-]+)\)/g, + (_, p1) => { + const term = p1.replace(/npm\s/g, '').replace(/\s+/g, ' ').trim() + const help = `npm help ${term.includes(' ') ? `"${term}"` : term}` + return help + } + ) +} + +const transformMan = (src, { data, unified, remarkParse, remarkMan }) => unified() + .use(remarkParse) + .use(remarkMan, { version: `NPM@${version}` }) + .processSync(`# ${data.title}(${data.section}) - ${data.description}\n\n${src}`) + .toString() + +const manPath = (name, { data }) => join(`man${data.section}`, `${name}.${data.section}`) + +const transformMd = (src, { frontmatter }) => ['---', frontmatter, '---', '', src].join('\n') + +module.exports = { + DOC_EXT, + TAGS, + paths: { + content: resolve(__dirname, 'content'), + nav: resolve(__dirname, 'content', 'nav.yml'), + template: resolve(__dirname, 'template.html'), + man: resolve(__dirname, '..', '..', 'man'), + html: resolve(__dirname, '..', 'output'), + md: resolve(__dirname, '..', 'content'), + }, + usage: replaceUsage, + params: replaceParams, + config: replaceConfig, + shorthands: replaceShorthands, + version: replaceVersion, + helpLinks: replaceHelpLinks, + man: transformMan, + manPath: manPath, + md: transformMd, + html: transformHTML, +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/configuring-npm/folders.html b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/configuring-npm/folders.html new file mode 100644 index 0000000000000000000000000000000000000000..5b0f7946324bdf746b7fff359f1d65767d1663cd --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/configuring-npm/folders.html @@ -0,0 +1,286 @@ + + +folders + + + + + +
+
+

+ folders + @11.6.2 +

+Folder Structures Used by npm +
+ +
+

Table of contents

+ +
+ +

Description

+

npm puts various things on your computer. +That's its job.

+

This document will tell you what it puts where.

+

tl;dr

+ +

prefix Configuration

+

The prefix config defaults to the location where node is installed. +On most systems, this is /usr/local. +On Windows, it's %AppData%\npm. +On Unix systems, it's one level up, since node is typically installed at {prefix}/bin/node rather than {prefix}/node.exe.

+

When the global flag is set, npm installs things into this prefix. +When it is not set, it uses the root of the current package, or the current working directory if not in a package already.

+

Node Modules

+

Packages are dropped into the node_modules folder under the prefix. +When installing locally, this means that you can require("packagename") to load its main module, or require("packagename/lib/path/to/sub/module") to load other modules.

+

Global installs on Unix systems go to {prefix}/lib/node_modules. +Global installs on Windows go to {prefix}/node_modules (that is, no lib folder.)

+

Scoped packages are installed the same way, except they are grouped together in a sub-folder of the relevant node_modules folder with the name of that scope prefix by the @ symbol, e.g. npm install @myorg/package would place the package in {prefix}/node_modules/@myorg/package. +See scope for more details.

+

If you wish to require() a package, then install it locally.

+

Executables

+

When in global mode, executables are linked into {prefix}/bin on Unix, or directly into {prefix} on Windows. +Ensure that path is in your terminal's PATH environment to run them.

+

When in local mode, executables are linked into ./node_modules/.bin so that they can be made available to scripts run through npm. +(For example, so that a test runner will be in the path when you run npm test.)

+

Man Pages

+

When in global mode, man pages are linked into {prefix}/share/man.

+

When in local mode, man pages are not installed.

+

Man pages are not installed on Windows systems.

+

Cache

+

See npm cache. +Cache files are stored in ~/.npm on Posix, or %LocalAppData%/npm-cache on Windows.

+

This is controlled by the cache config param.

+

More Information

+

When installing locally, npm first tries to find an appropriate prefix folder. +This is so that npm install foo@1.2.3 will install to the sensible root of your package, even if you happen to have cded into some other folder.

+

Starting at the $PWD, npm will walk up the folder tree checking for a folder that contains either a package.json file, or a node_modules folder. +If such a thing is found, then that is treated as the effective "current directory" for the purpose of running npm commands. +(This behavior is inspired by and similar to git's .git-folder seeking logic when running git commands in a working dir.)

+

If no package root is found, then the current folder is used.

+

When you run npm install foo@1.2.3, then the package is loaded into the cache, and then unpacked into ./node_modules/foo. +Then, any of foo's dependencies are similarly unpacked into ./node_modules/foo/node_modules/....

+

Any bin files are symlinked to ./node_modules/.bin/, so that they may be found by npm scripts when necessary.

+

Global Installation

+

If the global config is set to true, then npm will install packages "globally".

+

For global installation, packages are installed roughly the same way, but using the folders described above.

+

Cycles, Conflicts, and Folder Parsimony

+

Cycles are handled using the property of node's module system that it walks up the directories looking for node_modules folders. +So, at every stage, if a package is already installed in an ancestor node_modules folder, then it is not installed at the current location.

+

Consider the case above, where foo -> bar -> baz. +Imagine if, in addition to that, baz depended on bar, so you'd have: +foo -> bar -> baz -> bar -> baz .... +However, since the folder structure is: foo/node_modules/bar/node_modules/baz, there's no need to put another copy of bar into .../baz/node_modules, since when baz calls require("bar"), it will get the copy that is installed in foo/node_modules/bar.

+

This shortcut is only used if the exact same version would be installed in multiple nested node_modules folders. +It is still possible to have a/node_modules/b/node_modules/a if the two "a" packages are different versions. +However, without repeating the exact same package multiple times, an infinite regress will always be prevented.

+

Another optimization can be made by installing dependencies at the highest level possible, below the localized "target" folder (hoisting). +Since version 3, npm hoists dependencies by default.

+

Example

+

Consider this dependency graph:

+
foo
++-- blerg@1.2.5
++-- bar@1.2.3
+|   +-- blerg@1.x (latest=1.3.7)
+|   +-- baz@2.x
+|   |   `-- quux@3.x
+|   |       `-- bar@1.2.3 (cycle)
+|   `-- asdf@*
+`-- baz@1.2.3
+    `-- quux@3.x
+        `-- bar
+
+

In this case, we might expect a folder structure like this (with all dependencies hoisted to the highest level possible):

+
foo
++-- node_modules
+    +-- blerg (1.2.5) <---[A]
+    +-- bar (1.2.3) <---[B]
+    |   +-- node_modules
+    |       +-- baz (2.0.2) <---[C]
+    +-- asdf (2.3.4)
+    +-- baz (1.2.3) <---[D]
+    +-- quux (3.2.0) <---[E]
+
+

Since foo depends directly on bar@1.2.3 and baz@1.2.3, those are installed in foo's node_modules folder.

+

Even though the latest copy of blerg is 1.3.7, foo has a specific dependency on version 1.2.5. +So, that gets installed at [A]. +Since the parent installation of blerg satisfies bar's dependency on blerg@1.x, it does not install another copy under [B].

+

Bar [B] also has dependencies on baz and asdf. +Because it depends on baz@2.x, it cannot re-use the baz@1.2.3 installed in the parent node_modules folder [D], and must install its own copy [C]. +In order to minimize duplication, npm hoists dependencies to the top level by default, so asdf is installed under [A].

+

Underneath bar, the baz -> quux -> bar dependency creates a cycle. +However, because bar is already in quux's ancestry [B], it does not unpack another copy of bar into that folder. +Likewise, quux's [E] folder tree is empty, because its dependency on bar is satisfied by the parent folder copy installed at [B].

+

For a graphical breakdown of what is installed where, use npm ls.

+

Publishing

+

Upon publishing, npm will look in the node_modules folder. +If any of the items there are not in the bundleDependencies array, then they will not be included in the package tarball.

+

This allows a package maintainer to install all of their dependencies (and dev dependencies) locally, but only re-publish those items that cannot be found elsewhere. +See package.json for more information.

+

See also

+
+ + +
+ + + + \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/configuring-npm/npm-shrinkwrap-json.html b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/configuring-npm/npm-shrinkwrap-json.html new file mode 100644 index 0000000000000000000000000000000000000000..db174340c220bfd415d8b1a81aadc5187c8d325d --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/configuring-npm/npm-shrinkwrap-json.html @@ -0,0 +1,184 @@ + + +npm-shrinkwrap.json + + + + + +
+
+

+ npm-shrinkwrap.json + @11.6.2 +

+A publishable lockfile +
+ +
+

Table of contents

+ +
+ +

Description

+

npm-shrinkwrap.json is a file created by npm shrinkwrap. +It is identical to package-lock.json, with one major caveat: Unlike package-lock.json, +npm-shrinkwrap.json may be included when publishing a package.

+

The recommended use-case for npm-shrinkwrap.json is applications deployed through the publishing process on the registry: for example, daemons and command-line tools intended as global installs or devDependencies. +It's strongly discouraged for library authors to publish this file, since that would prevent end users from having control over transitive dependency updates.

+

If both package-lock.json and npm-shrinkwrap.json are present in a package root, npm-shrinkwrap.json will be preferred over the package-lock.json file.

+

For full details and description of the npm-shrinkwrap.json file format, refer to the manual page for package-lock.json.

+

See also

+
+ + +
+ + + + \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/configuring-npm/package-json.html b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/configuring-npm/package-json.html new file mode 100644 index 0000000000000000000000000000000000000000..bd50e9c1f15907ba038322ecfa0fa45d675c1081 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/configuring-npm/package-json.html @@ -0,0 +1,981 @@ + + +package.json + + + + + +
+
+

+ package.json + @11.6.2 +

+Specifics of npm's package.json handling +
+ +
+

Table of contents

+ +
+ +

Description

+

This document is all you need to know about what's required in your package.json file. +It must be actual JSON, not just a JavaScript object literal.

+

A lot of the behavior described in this document is affected by the config settings described in config.

+

name

+

If you plan to publish your package, the most important things in your package.json are the name and version fields as they will be required. +The name and version together form an identifier that is assumed to be completely unique. +Changes to the package should come along with changes to the version. +If you don't plan to publish your package, the name and version fields are optional.

+

The name is what your thing is called.

+

Some rules:

+ +

Some tips:

+ +

A name can be optionally prefixed by a scope, e.g. @npm/example. +See scope for more detail.

+

version

+

If you plan to publish your package, the most important things in your package.json are the name and version fields as they will be required. +The name and version together form an identifier that is assumed to be completely unique. +Changes to the package should come along with changes to the version. +If you don't plan to publish your package, the name and version fields are optional.

+

Version must be parseable by node-semver, which is bundled with npm as a dependency. +(npm install semver to use it yourself.)

+

description

+

Put a description in it. +It's a string. +This helps people discover your package, as it's listed in npm search.

+

keywords

+

Put keywords in it. +It's an array of strings. +This helps people discover your package as it's listed in npm search.

+

homepage

+

The URL to the project homepage.

+

Example:

+
"homepage": "https://github.com/npm/example#readme"
+
+

bugs

+

The URL to your project's issue tracker and / or the email address to which issues should be reported. +These are helpful for people who encounter issues with your package.

+

It should look like this:

+
{
+  "bugs": {
+    "url": "https://github.com/npm/example/issues",
+    "email": "example@npmjs.com"
+  }
+}
+
+

You can specify either one or both values. +If you want to provide only a URL, you can specify the value for "bugs" as a simple string instead of an object.

+

If a URL is provided, it will be used by the npm bugs command.

+

license

+

You should specify a license for your package so that people know how they are permitted to use it, and any restrictions you're placing on it.

+

If you're using a common license such as BSD-2-Clause or MIT, add a current SPDX license identifier for the license you're using, like this:

+
{
+  "license" : "BSD-3-Clause"
+}
+
+

You can check the full list of SPDX license IDs. +Ideally, you should pick one that is OSI approved.

+

If your package is licensed under multiple common licenses, use an SPDX license expression syntax version 2.0 string, like this:

+
{
+  "license" : "(ISC OR GPL-3.0)"
+}
+
+

If you are using a license that hasn't been assigned an SPDX identifier, or if you are using a custom license, use a string value like this one:

+
{
+  "license" : "SEE LICENSE IN <filename>"
+}
+
+

Then include a file named <filename> at the top level of the package.

+

Some old packages used license objects or a "licenses" property containing an array of license objects:

+
// Not valid metadata
+{
+  "license" : {
+    "type" : "ISC",
+    "url" : "https://opensource.org/licenses/ISC"
+  }
+}
+
+// Not valid metadata
+{
+  "licenses" : [
+    {
+      "type": "MIT",
+      "url": "https://www.opensource.org/licenses/mit-license.php"
+    },
+    {
+      "type": "Apache-2.0",
+      "url": "https://opensource.org/licenses/apache2.0.php"
+    }
+  ]
+}
+
+

Those styles are now deprecated. +Instead, use SPDX expressions, like this:

+
{
+  "license": "ISC"
+}
+
+
{
+  "license": "(MIT OR Apache-2.0)"
+}
+
+

Finally, if you do not wish to grant others the right to use a private or unpublished package under any terms:

+
{
+  "license": "UNLICENSED"
+}
+
+

Consider also setting "private": true to prevent accidental publication.

+

people fields: author, contributors

+

The "author" is one person. +"contributors" is an array of people. +A "person" is an object with a "name" field and optionally "url" and "email", like this:

+
{
+  "name" : "Barney Rubble",
+  "email" : "barney@npmjs.com",
+  "url" : "http://barnyrubble.npmjs.com/"
+}
+
+

Or you can shorten that all into a single string, and npm will parse it for you:

+
{
+  "author": "Barney Rubble <barney@npmjs.com> (http://barnyrubble.npmjs.com/)"
+}
+
+

Both email and url are optional either way.

+

npm also sets a top-level "maintainers" field with your npm user info.

+

funding

+

You can specify an object containing a URL that provides up-to-date information about ways to help fund development of your package, a string URL, or an array of objects and string URLs:

+
{
+  "funding": {
+    "type" : "individual",
+    "url" : "http://npmjs.com/donate"
+  }
+}
+
+
{
+  "funding": {
+    "type" : "patreon",
+    "url" : "https://www.patreon.com/user"
+  }
+}
+
+
{
+  "funding": "http://npmjs.com/donate"
+}
+
+
{
+  "funding": [
+    {
+      "type" : "individual",
+      "url" : "http://npmjs.com/donate"
+    },
+    "http://npmjs.com/donate-also",
+    {
+      "type" : "patreon",
+      "url" : "https://www.patreon.com/user"
+    }
+  ]
+}
+
+

Users can use the npm fund subcommand to list the funding URLs of all dependencies of their project, direct and indirect. +A shortcut to visit each funding URL is also available when providing the project name such as: +npm fund <projectname> (when there are multiple URLs, the first one will be visited)

+

files

+

The optional files field is an array of file patterns that describes the entries to be included when your package is installed as a dependency. +File patterns follow a similar syntax to .gitignore, but reversed: including a file, directory, or glob pattern (*, **/*, and such) will make it so that file is included in the tarball when it's packed. +Omitting the field will make it default to ["*"], which means it will include all files.

+

Some special files and directories are also included or excluded regardless of whether they exist in the files array (see below).

+

You can also provide a .npmignore file in the root of your package or in subdirectories, which will keep files from being included. +At the root of your package it will not override the "files" field, but in subdirectories it will. +The .npmignore file works just like a .gitignore. +If there is a .gitignore file, and .npmignore is missing, .gitignore's contents will be used instead.

+

Certain files are always included, regardless of settings:

+ +

README & LICENSE can have any case and extension.

+

Some files are always ignored by default:

+ +

Most of these ignored files can be included specifically if included in the files globs. +Exceptions to this are:

+ +

These cannot be included.

+

exports

+

The "exports" provides a modern alternative to "main" allowing multiple entry points to be defined, conditional entry resolution support between environments, and preventing any other entry points besides those defined in "exports". This encapsulation allows module authors to clearly define the public interface for their package. +For more details see the node.js documentation on package entry points

+

main

+

The main field is a module ID that is the primary entry point to your program. +That is, if your package is named foo, and a user installs it, and then does require("foo"), then your main module's exports object will be returned.

+

This should be a module relative to the root of your package folder.

+

For most modules, it makes the most sense to have a main script and often not much else.

+

If main is not set, it defaults to index.js in the package's root folder.

+

browser

+

If your module is meant to be used client-side the browser field should be used instead of the main field. +This is helpful to hint users that it might rely on primitives that aren't available in Node.js modules. +(e.g. window)

+

bin

+

A lot of packages have one or more executable files that they'd like to install into the PATH. npm makes this pretty easy (in fact, it uses this feature to install the "npm" executable.)

+

To use this, supply a bin field in your package.json which is a map of command name to local file name. +When this package is installed globally, that file will be either linked inside the global bins directory or a cmd (Windows Command File) will be created which executes the specified file in the bin field, so it is available to run by name or name.cmd (on Windows PowerShell). +When this package is installed as a dependency in another package, the file will be linked where it will be available to that package either directly by npm exec or by name in other scripts when invoking them via npm run.

+

For example, myapp could have this:

+
{
+  "bin": {
+    "myapp": "bin/cli.js"
+  }
+}
+
+

So, when you install myapp, in case of unix-like OS it'll create a symlink from the cli.js script to /usr/local/bin/myapp and in case of windows it will create a cmd file usually at C:\Users\{Username}\AppData\Roaming\npm\myapp.cmd which runs the cli.js script.

+

If you have a single executable, and its name should be the name of the package, then you can just supply it as a string. +For example:

+
{
+  "name": "my-program",
+  "version": "1.2.5",
+  "bin": "path/to/program"
+}
+
+

would be the same as this:

+
{
+  "name": "my-program",
+  "version": "1.2.5",
+  "bin": {
+    "my-program": "path/to/program"
+  }
+}
+
+

Please make sure that your file(s) referenced in bin starts with #!/usr/bin/env node; otherwise, the scripts are started without the node executable!

+

Note that you can also set the executable files using directories.bin.

+

See folders for more info on executables.

+

man

+

Specify either a single file or an array of filenames to put in place for the man program to find.

+

If only a single file is provided, then it's installed such that it is the result from man <pkgname>, regardless of its actual filename. +For example:

+
{
+  "name": "foo",
+  "version": "1.2.3",
+  "description": "A packaged foo fooer for fooing foos",
+  "main": "foo.js",
+  "man": "./man/doc.1"
+}
+
+

would link the ./man/doc.1 file in such that it is the target for man foo

+

If the filename doesn't start with the package name, then it's prefixed. +So, this:

+
{
+  "name": "foo",
+  "version": "1.2.3",
+  "description": "A packaged foo fooer for fooing foos",
+  "main": "foo.js",
+  "man": [
+    "./man/foo.1",
+    "./man/bar.1"
+  ]
+}
+
+

will create files to do man foo and man foo-bar.

+

Man files must end with a number, and optionally a .gz suffix if they are compressed. +The number dictates which man section the file is installed into.

+
{
+  "name": "foo",
+  "version": "1.2.3",
+  "description": "A packaged foo fooer for fooing foos",
+  "main": "foo.js",
+  "man": [
+    "./man/foo.1",
+    "./man/foo.2"
+  ]
+}
+
+

will create entries for man foo and man 2 foo

+

directories

+

The CommonJS Packages spec details a few ways that you can indicate the structure of your package using a directories object. +If you look at npm's package.json, you'll see that it has directories for doc, lib, and man.

+

In the future, this information may be used in other creative ways.

+

directories.bin

+

If you specify a bin directory in directories.bin, all the files in that folder will be added.

+

Because of the way the bin directive works, specifying both a bin path and setting directories.bin is an error. +If you want to specify individual files, use bin, and for all the files in an existing bin directory, use directories.bin.

+

directories.man

+

A folder that is full of man pages. +Sugar to generate a "man" array by walking the folder.

+

repository

+

Specify the place where your code lives. +This is helpful for people who want to contribute. +If the git repo is on GitHub, then the npm repo command will be able to find you.

+

Do it like this:

+
{
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/npm/cli.git"
+  }
+}
+
+

The URL should be a publicly available (perhaps read-only) URL that can be handed directly to a VCS program without any modification. +It should not be a URL to an html project page that you put in your browser. +It's for computers.

+

For GitHub, GitHub gist, Bitbucket, or GitLab repositories you can use the same shortcut syntax you use for npm install:

+
{
+  "repository": "npm/example",
+
+  "repository": "github:npm/example",
+
+  "repository": "gist:11081aaa281",
+
+  "repository": "bitbucket:user/repo",
+
+  "repository": "gitlab:user/repo"
+}
+
+

If the package.json for your package is not in the root directory (for example if it is part of a monorepo), you can specify the directory in which it lives:

+
{
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/npm/cli.git",
+    "directory": "workspaces/libnpmpublish"
+  }
+}
+
+

scripts

+

The "scripts" property is a dictionary containing script commands that are run at various times in the lifecycle of your package. +The key is the lifecycle event, and the value is the command to run at that point.

+

See scripts to find out more about writing package scripts.

+

config

+

A "config" object can be used to set configuration parameters used in package scripts that persist across upgrades. +For instance, if a package had the following:

+
{
+  "name": "foo",
+  "config": {
+    "port": "8080"
+  }
+}
+
+

It could also have a "start" script that referenced the npm_package_config_port environment variable.

+

dependencies

+

Dependencies are specified in a simple object that maps a package name to a version range. +The version range is a string which has one or more space-separated descriptors. +Dependencies can also be identified with a tarball or git URL.

+

Please do not put test harnesses or transpilers or other "development" time tools in your dependencies object. +See devDependencies, below.

+

See semver for more details about specifying version ranges.

+ +

For example, these are all valid:

+
{
+  "dependencies": {
+    "foo": "1.0.0 - 2.9999.9999",
+    "bar": ">=1.0.2 <2.1.2",
+    "baz": ">1.0.2 <=2.3.4",
+    "boo": "2.0.1",
+    "qux": "<1.0.0 || >=2.3.1 <2.4.5 || >=2.5.2 <3.0.0",
+    "asd": "http://npmjs.com/example.tar.gz",
+    "til": "~1.2",
+    "elf": "~1.2.3",
+    "two": "2.x",
+    "thr": "3.3.x",
+    "lat": "latest",
+    "dyl": "file:../dyl",
+    "kpg": "npm:pkg@1.0.0"
+  }
+}
+
+

URLs as Dependencies

+

You may specify a tarball URL in place of a version range.

+

This tarball will be downloaded and installed locally to your package at install time.

+

Git URLs as Dependencies

+

Git URLs are of the form:

+
<protocol>://[<user>[:<password>]@]<hostname>[:<port>][:][/]<path>[#<commit-ish> | #semver:<semver>]
+
+

<protocol> is one of git, git+ssh, git+http, git+https, or git+file.

+

If #<commit-ish> is provided, it will be used to clone exactly that commit. +If the commit-ish has the format #semver:<semver>, <semver> can be any valid semver range or exact version, and npm will look for any tags or refs matching that range in the remote repository, much as it would for a registry dependency. +If neither #<commit-ish> or #semver:<semver> is specified, then the default branch is used.

+

Examples:

+
git+ssh://git@github.com:npm/cli.git#v1.0.27
+git+ssh://git@github.com:npm/cli#semver:^5.0
+git+https://isaacs@github.com/npm/cli.git
+git://github.com/npm/cli.git#v1.0.27
+
+

When installing from a git repository, the presence of certain fields in the package.json will cause npm to believe it needs to perform a build. +To do so your repository will be cloned into a temporary directory, all of its deps installed, relevant scripts run, and the resulting directory packed and installed.

+

This flow will occur if your git dependency uses workspaces, or if any of the following scripts are present:

+ +

If your git repository includes pre-built artifacts, you will likely want to make sure that none of the above scripts are defined, or your dependency will be rebuilt for every installation.

+

GitHub URLs

+

As of version 1.1.65, you can refer to GitHub URLs as just "foo": +"user/foo-project". Just as with git URLs, a commit-ish suffix can be included. +For example:

+
{
+  "name": "foo",
+  "version": "0.0.0",
+  "dependencies": {
+    "express": "expressjs/express",
+    "mocha": "mochajs/mocha#4727d357ea",
+    "module": "npm/example-github-repo#feature\/branch"
+  }
+}
+
+

Local Paths

+

As of version 2.0.0 you can provide a path to a local directory that contains a package. +Local paths can be saved using npm install -S or npm install --save, using any of these forms:

+
../foo/bar
+~/foo/bar
+./foo/bar
+/foo/bar
+
+

in which case they will be normalized to a relative path and added to your package.json. +For example:

+
{
+  "name": "baz",
+  "dependencies": {
+    "bar": "file:../foo/bar"
+  }
+}
+
+

This feature is helpful for local offline development and creating tests that require npm installing where you don't want to hit an external server, but should not be used when publishing your package to the public registry.

+

note: Packages linked by local path will not have their own dependencies installed when npm install is run. +You must run npm install from inside the local path itself.

+

devDependencies

+

If someone is planning on downloading and using your module in their program, then they probably don't want or need to download and build the external test or documentation framework that you use.

+

In this case, it's best to map these additional items in a devDependencies object.

+

These things will be installed when doing npm link or npm install from the root of a package, and can be managed like any other npm configuration param. +See config for more on the topic.

+

For build steps that are not platform-specific, such as compiling CoffeeScript or other languages to JavaScript, use the prepare script to do this, and make the required package a devDependency.

+

For example:

+
{
+  "name": "@npm/ethopia-waza",
+  "description": "a delightfully fruity coffee varietal",
+  "version": "1.2.3",
+  "devDependencies": {
+    "coffee-script": "~1.6.3"
+  },
+  "scripts": {
+    "prepare": "coffee -o lib/ -c src/waza.coffee"
+  },
+  "main": "lib/waza.js"
+}
+
+

The prepare script will be run before publishing, so that users can consume the functionality without requiring them to compile it themselves. +In dev mode (ie, locally running npm install), it'll run this script as well, so that you can test it easily.

+

peerDependencies

+

In some cases, you want to express the compatibility of your package with a host tool or library, while not necessarily doing a require of this host. +This is usually referred to as a plugin. Notably, your module may be exposing a specific interface, expected and specified by the host documentation.

+

For example:

+
{
+  "name": "@npm/tea-latte",
+  "version": "1.3.5",
+  "peerDependencies": {
+    "@npm/tea": "2.x"
+  }
+}
+
+

This ensures your package @npm/tea-latte can be installed along with the second major version of the host package @npm/tea only. +npm install tea-latte could possibly yield the following dependency graph:

+
├── @npm/tea-latte@1.3.5
+└── @npm/tea@2.2.0
+
+

In npm versions 3 through 6, peerDependencies were not automatically installed, and would raise a warning if an invalid version of the peer dependency was found in the tree. +As of npm v7, peerDependencies are installed by default.

+

Trying to install another plugin with a conflicting requirement may cause an error if the tree cannot be resolved correctly. +For this reason, make sure your plugin requirement is as broad as possible, and not to lock it down to specific patch versions.

+

Assuming the host complies with semver, only changes in the host package's major version will break your plugin. +Thus, if you've worked with every 1.x version of the host package, use "^1.0" or "1.x" to express this. +If you depend on features introduced in 1.5.2, use "^1.5.2".

+

peerDependenciesMeta

+

The peerDependenciesMeta field serves to provide npm more information on how your peer dependencies are to be used. +Specifically, it allows peer dependencies to be marked as optional. +Npm will not automatically install optional peer dependencies. +This allows you to integrate and interact with a variety of host packages without requiring all of them to be installed.

+

For example:

+
{
+  "name": "@npm/tea-latte",
+  "version": "1.3.5",
+  "peerDependencies": {
+    "@npm/tea": "2.x",
+    "@npm/soy-milk": "1.2"
+  },
+  "peerDependenciesMeta": {
+    "@npm/soy-milk": {
+      "optional": true
+    }
+  }
+}
+
+

bundleDependencies

+

This defines an array of package names that will be bundled when publishing the package.

+

In cases where you need to preserve npm packages locally or have them available through a single file download, you can bundle the packages in a tarball file by specifying the package names in the bundleDependencies array and executing npm pack.

+

For example:

+

If we define a package.json like this:

+
{
+  "name": "@npm/awesome-web-framework",
+  "version": "1.0.0",
+  "bundleDependencies": [
+    "@npm/renderized",
+    "@npm/super-streams"
+  ]
+}
+
+

we can obtain @npm/awesome-web-framework-1.0.0.tgz file by running npm pack. +This file contains the dependencies @npm/renderized and @npm/super-streams which can be installed in a new project by executing npm install awesome-web-framework-1.0.0.tgz. +Note that the package names do not include any versions, as that information is specified in dependencies.

+

If this is spelled "bundledDependencies", then that is also honored.

+

Alternatively, "bundleDependencies" can be defined as a boolean value. +A value of true will bundle all dependencies, a value of false will bundle none.

+

optionalDependencies

+

If a dependency can be used, but you would like npm to proceed if it cannot be found or fails to install, then you may put it in the optionalDependencies object. +This is a map of package name to version or URL, just like the dependencies object. +The difference is that build failures do not cause installation to fail. +Running npm install --omit=optional will prevent these dependencies from being installed.

+

It is still your program's responsibility to handle the lack of the dependency. +For example, something like this:

+
try {
+  var foo = require('@npm/foo')
+  var fooVersion = require('@npm/foo/package.json').version
+} catch (er) {
+  foo = null
+}
+if ( notGoodFooVersion(fooVersion) ) {
+  foo = null
+}
+
+// .. then later in your program ..
+
+if (foo) {
+  foo.doFooThings()
+}
+
+

Entries in optionalDependencies will override entries of the same name in dependencies, so it's usually best to only put in one place.

+

overrides

+

If you need to make specific changes to dependencies of your dependencies, for example replacing the version of a dependency with a known security issue, replacing an existing dependency with a fork, or making sure that the same version of a package is used everywhere, then you may add an override.

+

Overrides provide a way to replace a package in your dependency tree with another version, or another package entirely. +These changes can be scoped as specific or as vague as desired.

+

Overrides are only considered in the root package.json file for a project. +Overrides in installed dependencies (including workspaces) are not considered in dependency tree resolution. +Published packages may dictate their resolutions by pinning dependencies or using an npm-shrinkwrap.json file.

+

To make sure the package @npm/foo is always installed as version 1.0.0 no matter what version your dependencies rely on:

+
{
+  "overrides": {
+    "@npm/foo": "1.0.0"
+  }
+}
+
+

The above is a short hand notation, the full object form can be used to allow overriding a package itself as well as a child of the package. +This will cause @npm/foo to always be 1.0.0 while also making @npm/bar at any depth beyond @npm/foo also 1.0.0:

+
{
+  "overrides": {
+    "@npm/foo": {
+      ".": "1.0.0",
+      "@npm/bar": "1.0.0"
+    }
+  }
+}
+
+

To only override @npm/foo to be 1.0.0 when it's a child (or grandchild, or great grandchild, etc) of the package @npm/bar:

+
{
+  "overrides": {
+    "@npm/bar": {
+      "@npm/foo": "1.0.0"
+    }
+  }
+}
+
+

Keys can be nested to any arbitrary length. +To override @npm/foo only when it's a child of @npm/bar and only when @npm/bar is a child of @npm/baz:

+
{
+  "overrides": {
+    "@npm/baz": {
+      "@npm/bar": {
+        "@npm/foo": "1.0.0"
+      }
+    }
+  }
+}
+
+

The key of an override can also include a version, or range of versions. +To override @npm/foo to 1.0.0, but only when it's a child of @npm/bar@2.0.0:

+
{
+  "overrides": {
+    "@npm/bar@2.0.0": {
+      "@npm/foo": "1.0.0"
+    }
+  }
+}
+
+

You may not set an override for a package that you directly depend on unless both the dependency and the override itself share the exact same spec. +To make this limitation easier to deal with, overrides may also be defined as a reference to a spec for a direct dependency by prefixing the name of the package you wish the version to match with a $.

+
{
+  "dependencies": {
+    "@npm/foo": "^1.0.0"
+  },
+  "overrides": {
+    // BAD, will throw an EOVERRIDE error
+    // "foo": "^2.0.0"
+    // GOOD, specs match so override is allowed
+    // "foo": "^1.0.0"
+    // BEST, the override is defined as a reference to the dependency
+    "@npm/foo": "$foo",
+    // the referenced package does not need to match the overridden one
+    "@npm/bar": "$foo"
+  }
+}
+
+

engines

+

You can specify the version of node that your stuff works on:

+
{
+  "engines": {
+    "node": ">=0.10.3 <15"
+  }
+}
+
+

And, like with dependencies, if you don't specify the version (or if you specify "*" as the version), then any version of node will do.

+

You can also use the "engines" field to specify which versions of npm are capable of properly installing your program. +For example:

+
{
+  "engines": {
+    "npm": "~1.0.20"
+  }
+}
+
+

Unless the user has set the engine-strict config flag, this field is advisory only and will only produce warnings when your package is installed as a dependency.

+

os

+

You can specify which operating systems your module will run on:

+
{
+  "os": [
+    "darwin",
+    "linux"
+  ]
+}
+
+

You can also block instead of allowing operating systems, just prepend the blocked os with a '!':

+
{
+  "os": [
+    "!win32"
+  ]
+}
+
+

The host operating system is determined by process.platform

+

It is allowed to both block and allow an item, although there isn't any good reason to do this.

+

cpu

+

If your code only runs on certain cpu architectures, you can specify which ones.

+
{
+  "cpu": [
+    "x64",
+    "ia32"
+  ]
+}
+
+

Like the os option, you can also block architectures:

+
{
+  "cpu": [
+    "!arm",
+    "!mips"
+  ]
+}
+
+

The host architecture is determined by process.arch

+

libc

+

If your code only runs or builds in certain versions of libc, you can specify which ones. +This field only applies if os is linux.

+
{
+  "os": "linux",
+  "libc": "glibc"
+}
+
+

devEngines

+

The devEngines field aids engineers working on a codebase to all be using the same tooling.

+

You can specify a devEngines property in your package.json which will run before install, ci, and run commands.

+
+

Note: engines and devEngines differ in object shape. +They also function very differently. +engines is designed to alert the user when a dependency uses a different npm or node version than the project it's being used in, whereas devEngines is used to alert people interacting with the source code of a project.

+
+

The supported keys under the devEngines property are cpu, os, libc, runtime, and packageManager. +Each property can be an object or an array of objects. +Objects must contain name, and optionally can specify version, and onFail. +onFail can be warn, error, or ignore, and if left undefined is of the same value as error. +npm will assume that you're running with node. +Here's an example of a project that will fail if the environment is not node and npm. +If you set runtime.name or packageManager.name to any other string, it will fail within the npm CLI.

+
{
+  "devEngines": {
+    "runtime": {
+      "name": "node",
+      "onFail": "error"
+    },
+    "packageManager": {
+      "name": "npm",
+      "onFail": "error"
+    }
+  }
+}
+
+

private

+

If you set "private": true in your package.json, then npm will refuse to publish it.

+

This is a way to prevent accidental publication of private repositories. +If you would like to ensure that a given package is only ever published to a specific registry (for example, an internal registry), then use the publishConfig dictionary described below to override the registry config param at publish-time.

+

publishConfig

+

This is a set of config values that will be used at publish-time. +It's especially handy if you want to set the tag, registry or access, so that you can ensure that a given package is not tagged with "latest", published to the global public registry or that a scoped module is private by default.

+

See config to see the list of config options that can be overridden.

+

workspaces

+

The optional workspaces field is an array of file patterns that describes locations within the local file system that the install client should look up to find each workspace that needs to be symlinked to the top level node_modules folder.

+

It can describe either the direct paths of the folders to be used as workspaces or it can define globs that will resolve to these same folders.

+

In the following example, all folders located inside the folder ./packages will be treated as workspaces as long as they have valid package.json files inside them:

+
{
+  "name": "workspace-example",
+  "workspaces": [
+    "./packages/*"
+  ]
+}
+
+

See workspaces for more examples.

+

DEFAULT VALUES

+

npm will default some values based on package contents.

+ +

SEE ALSO

+
+ + +
+ + + + \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/configuring-npm/package-lock-json.html b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/configuring-npm/package-lock-json.html new file mode 100644 index 0000000000000000000000000000000000000000..cfb5d95a82739532d61016d2592ac0335db131f7 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/configuring-npm/package-lock-json.html @@ -0,0 +1,335 @@ + + +package-lock.json + + + + + +
+
+

+ package-lock.json + @11.6.2 +

+A manifestation of the manifest +
+ +
+

Table of contents

+ +
+ +

Description

+

package-lock.json is automatically generated for any operations where npm modifies either the node_modules tree, or package.json. +It describes the exact tree that was generated, such that subsequent installs are able to generate identical trees, regardless of intermediate dependency updates.

+

This file is intended to be committed into source repositories, and serves various purposes:

+ +

When npm creates or updates package-lock.json, it will infer line endings and indentation from package.json so that the formatting of both files matches.

+

package-lock.json vs npm-shrinkwrap.json

+

Both of these files have the same format, and perform similar functions in the root of a project.

+

The difference is that package-lock.json cannot be published, and it will be ignored if found in any place other than the root project.

+

In contrast, npm-shrinkwrap.json allows publication, and defines the dependency tree from the point encountered. +This is not recommended unless deploying a CLI tool or otherwise using the publication process for producing production packages.

+

If both package-lock.json and npm-shrinkwrap.json are present in the root of a project, npm-shrinkwrap.json will take precedence and package-lock.json will be ignored.

+

Hidden Lockfiles

+

In order to avoid processing the node_modules folder repeatedly, npm as of v7 uses a "hidden" lockfile present in node_modules/.package-lock.json. +This contains information about the tree, and is used in lieu of reading the entire node_modules hierarchy provided that the following conditions are met:

+ +

That is, the hidden lockfile will only be relevant if it was created as part of the most recent update to the package tree. +If another CLI mutates the tree in any way, this will be detected, and the hidden lockfile will be ignored.

+

Note that it is possible to manually change the contents of a package in such a way that the modified time of the package folder is unaffected. +For example, if you add a file to node_modules/foo/lib/bar.js, then the modified time on node_modules/foo will not reflect this change. +If you are manually editing files in node_modules, it is generally best to delete the file at node_modules/.package-lock.json.

+

As the hidden lockfile is ignored by older npm versions, it does not contain the backwards compatibility affordances present in "normal" lockfiles. +That is, it is lockfileVersion: 3, rather than lockfileVersion: 2.

+

Handling Old Lockfiles

+

When npm detects a lockfile from npm v6 or before during the package installation process, it is automatically updated to fetch missing information from either the node_modules tree or (in the case of empty node_modules trees or very old lockfile formats) the npm registry.

+

File Format

+

name

+

The name of the package this is a package-lock for. +This will match what's in package.json.

+

version

+

The version of the package this is a package-lock for. +This will match what's in package.json.

+

lockfileVersion

+

An integer version, starting at 1 with the version number of this document whose semantics were used when generating this package-lock.json.

+

Note that the file format changed significantly in npm v7 to track information that would have otherwise required looking in node_modules or the npm registry. +Lockfiles generated by npm v7 will contain lockfileVersion: 2.

+ +

npm will always attempt to get whatever data it can out of a lockfile, even if it is not a version that it was designed to support.

+

packages

+

This is an object that maps package locations to an object containing the information about that package.

+

The root project is typically listed with a key of "", and all other packages are listed with their relative paths from the root project folder.

+

Package descriptors have the following fields:

+ +

dependencies

+

Legacy data for supporting versions of npm that use lockfileVersion: 1. +This is a mapping of package names to dependency objects. +Because the object structure is strictly hierarchical, symbolic link dependencies are somewhat challenging to represent in some cases.

+

npm v7 ignores this section entirely if a packages section is present, but does keep it up to date in order to support switching between npm v6 and npm v7.

+

Dependency objects have the following fields:

+ +

See also

+
+ + +
+ + + + \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/using-npm/config.html b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/using-npm/config.html new file mode 100644 index 0000000000000000000000000000000000000000..1cffacc56efad26df73f26212ddccd83824d2df9 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/using-npm/config.html @@ -0,0 +1,1686 @@ + + +config + + + + + +
+
+

+ config + @11.6.2 +

+More than you probably want to know about npm configuration +
+ +
+

Table of contents

+
+
+ +

Description

+

This article details npm configuration in general. +To learn about the config command, see npm config.

+

npm gets its configuration values from the following sources, sorted by priority:

+

Command Line Flags

+

Putting --foo bar on the command line sets the foo configuration parameter to "bar". +A -- argument tells the cli parser to stop reading flags. +Using --flag without specifying any value will set the value to true.

+

Example: --flag1 --flag2 will set both configuration parameters to true, while --flag1 --flag2 bar will set flag1 to true, and flag2 to bar. +Finally, --flag1 --flag2 -- bar will set both configuration parameters to true, and the bar is taken as a command argument.

+

Environment Variables

+

Any environment variables that start with npm_config_ will be interpreted as a configuration parameter. +For example, putting npm_config_foo=bar in your environment will set the foo configuration parameter to bar. +Any environment configurations that are not given a value will be given the value of true. +Config values are case-insensitive, so NPM_CONFIG_FOO=bar will work the same. +However, please note that inside scripts npm will set its own environment variables and Node will prefer those lowercase versions over any uppercase ones that you might set. +For details see this issue.

+

Notice that you need to use underscores instead of dashes, so --allow-same-version would become npm_config_allow_same_version=true.

+

npmrc Files

+

The four relevant files are:

+ +

See npmrc for more details.

+

Default Configs

+

Run npm config ls -l to see a set of configuration parameters that are internal to npm, and are defaults if nothing else is specified.

+

Shorthands and Other CLI Niceties

+

The following shorthands are parsed on the command-line:

+ +

If the specified configuration param resolves unambiguously to a known configuration parameter, then it is expanded to that configuration parameter. +For example:

+
npm ls --par
+# same as:
+npm ls --parseable
+
+

If multiple single-character shorthands are strung together, and the resulting combination is unambiguously not some other configuration param, then it is expanded to its various component pieces. +For example:

+
npm ls -gpld
+# same as:
+npm ls --global --parseable --long --loglevel info
+
+

Config Settings

+

_auth

+ +

A basic-auth string to use when authenticating against the npm +registry. This will ONLY be used to authenticate against the npm +registry. For other registries you will need to scope it like +"//other-registry.tld/:_auth"

+

Warning: This should generally not be set via a command-line option. +It is safer to use a registry-provided authentication bearer token +stored in the ~/.npmrc file by running npm login.

+

access

+ +

If you do not want your scoped package to be publicly viewable (and +installable) set --access=restricted.

+

Unscoped packages cannot be set to restricted.

+

Note: This defaults to not changing the current access level for +existing packages. Specifying a value of restricted or public +during publish will change the access for an existing package the +same way that npm access set status would.

+

all

+ +

When running npm outdated and npm ls, setting --all will show +all outdated or installed packages, rather than only those directly +depended upon by the current project.

+

allow-same-version

+ +

Prevents throwing an error when npm version is used to set the new +version to the same value as the current version.

+

audit

+ +

When "true" submit audit reports alongside the current npm command to +the default registry and all registries configured for scopes. See +the documentation for npm audit for details +on what is submitted.

+

audit-level

+ +

The minimum level of vulnerability for npm audit to exit with a +non-zero exit code.

+

auth-type

+ +

What authentication strategy to use with login. Note that if an +otp config is given, this value will always be set to legacy.

+

before

+ +

If passed to npm install, will rebuild the npm tree such that only +versions that were available on or before the given date are +installed. If there are no versions available for the current set of +dependencies, the command will error.

+

If the requested version is a dist-tag and the given tag does not +pass the --before filter, the most recent version less than or +equal to that tag will be used. For example, foo@latest might +install foo@1.2 even though latest is 2.0.

+ + +

Tells npm to create symlinks (or .cmd shims on Windows) for package +executables.

+

Set to false to have it not do this. This can be used to work around +the fact that some file systems don't support symlinks, even on +ostensibly Unix systems.

+

browser

+ +

The browser that is called by npm commands to open websites.

+

Set to false to suppress browser behavior and instead print urls to +terminal.

+

Set to true to use default system URL opener.

+

ca

+ +

The Certificate Authority signing certificate that is trusted for SSL +connections to the registry. Values should be in PEM format (Windows +calls it "Base-64 encoded X.509 (.CER)") with newlines replaced by +the string "\n". For example:

+
ca="-----BEGIN CERTIFICATE-----\nXXXX\nXXXX\n-----END CERTIFICATE-----"
+
+

Set to null to only allow "known" registrars, or to a specific CA +cert to trust only that specific signing authority.

+

Multiple CAs can be trusted by specifying an array of certificates:

+
ca[]="..."
+ca[]="..."
+
+

See also the strict-ssl config.

+

cache

+ +

The location of npm's cache directory.

+

cafile

+ +

A path to a file containing one or multiple Certificate Authority +signing certificates. Similar to the ca setting, but allows for +multiple CA's, as well as for the CA information to be stored in a +file on disk.

+

call

+ +

Optional companion option for npm exec, npx that allows for +specifying a custom command to be run along with the installed +packages.

+
npm exec --package yo --package generator-node --call "yo node"
+
+

cidr

+ +

This is a list of CIDR address to be used when configuring limited +access tokens with the npm token create command.

+

color

+ +

If false, never shows colors. If "always" then always shows colors. +If true, then only prints color codes for tty file descriptors.

+

commit-hooks

+ +

Run git commit hooks when using the npm version command.

+

cpu

+ +

Override CPU architecture of native modules to install. Acceptable +values are same as cpu field of package.json, which comes from +process.arch.

+

depth

+ +

The depth to go when recursing packages for npm ls.

+

If not set, npm ls will show only the immediate dependencies of the +root project. If --all is set, then npm will show all dependencies +by default.

+

description

+ +

Show the description in npm search

+

diff

+ +

Define arguments to compare in npm diff.

+

diff-dst-prefix

+ +

Destination prefix to be used in npm diff output.

+

diff-ignore-all-space

+ +

Ignore whitespace when comparing lines in npm diff.

+

diff-name-only

+ +

Prints only filenames when using npm diff.

+

diff-no-prefix

+ +

Do not show any source or destination prefix in npm diff output.

+

Note: this causes npm diff to ignore the --diff-src-prefix and +--diff-dst-prefix configs.

+

diff-src-prefix

+ +

Source prefix to be used in npm diff output.

+

diff-text

+ +

Treat all files as text in npm diff.

+

diff-unified

+ +

The number of lines of context to print in npm diff.

+

dry-run

+ +

Indicates that you don't want npm to make any changes and that it +should only report what it would have done. This can be passed into +any of the commands that modify your local installation, eg, +install, update, dedupe, uninstall, as well as pack and +publish.

+

Note: This is NOT honored by other network related commands, eg +dist-tags, owner, etc.

+

editor

+ +

The command to run for npm edit and npm config edit.

+

engine-strict

+ +

If set to true, then npm will stubbornly refuse to install (or even +consider installing) any package that claims to not be compatible +with the current Node.js version.

+

This can be overridden by setting the --force flag.

+

expect-result-count

+ +

Tells to expect a specific number of results from the command.

+

This config cannot be used with: expect-results

+

expect-results

+ +

Tells npm whether or not to expect results from the command. Can be +either true (expect some results) or false (expect no results).

+

This config cannot be used with: expect-result-count

+

fetch-retries

+ +

The "retries" config for the retry module to use when fetching +packages from the registry.

+

npm will retry idempotent read requests to the registry in the case +of network failures or 5xx HTTP errors.

+

fetch-retry-factor

+ +

The "factor" config for the retry module to use when fetching +packages.

+

fetch-retry-maxtimeout

+ +

The "maxTimeout" config for the retry module to use when fetching +packages.

+

fetch-retry-mintimeout

+ +

The "minTimeout" config for the retry module to use when fetching +packages.

+

fetch-timeout

+ +

The maximum amount of time to wait for HTTP requests to complete.

+

force

+ +

Removes various protections against unfortunate side effects, common +mistakes, unnecessary performance degradation, and malicious input.

+ +

If you don't have a clear idea of what you want to do, it is strongly +recommended that you do not use this option!

+

foreground-scripts

+ +

Run all build scripts (ie, preinstall, install, and +postinstall) scripts for installed packages in the foreground +process, sharing standard input, output, and error with the main npm +process.

+

Note that this will generally make installs run slower, and be much +noisier, but can be useful for debugging.

+

format-package-lock

+ +

Format package-lock.json or npm-shrinkwrap.json as a human +readable file.

+

fund

+ +

When "true" displays the message at the end of each npm install +acknowledging the number of dependencies looking for funding. See +npm fund for details.

+

git

+ +

The command to use for git commands. If git is installed on the +computer, but is not in the PATH, then set this to the full path to +the git binary.

+

git-tag-version

+ +

Tag the commit when using the npm version command. Setting this to +false results in no commit being made at all.

+

global

+ +

Operates in "global" mode, so that packages are installed into the +prefix folder instead of the current working directory. See +folders for more on the differences in +behavior.

+ +

globalconfig

+ +

The config file to read for global config options.

+

heading

+ +

The string that starts all the debugging log output.

+

https-proxy

+ +

A proxy to use for outgoing https requests. If the HTTPS_PROXY or +https_proxy or HTTP_PROXY or http_proxy environment variables +are set, proxy settings will be honored by the underlying +make-fetch-happen library.

+

if-present

+ +

If true, npm will not exit with an error code when run is invoked +for a script that isn't defined in the scripts section of +package.json. This option can be used when it's desirable to +optionally run a script when it's present and fail if the script +fails. This is useful, for example, when running scripts that may +only apply for some builds in an otherwise generic CI setup.

+

This value is not exported to the environment for child processes.

+

ignore-scripts

+ +

If true, npm does not run scripts specified in package.json files.

+

Note that commands explicitly intended to run a particular script, +such as npm start, npm stop, npm restart, npm test, and npm run will still run their intended script if ignore-scripts is set, +but they will not run any pre- or post-scripts.

+

include

+ +

Option that allows for defining which types of dependencies to +install.

+

This is the inverse of --omit=<type>.

+

Dependency types specified in --include will not be omitted, +regardless of the order in which omit/include are specified on the +command-line.

+

include-staged

+ +

Allow installing "staged" published packages, as defined by npm RFC +PR #92.

+

This is experimental, and not implemented by the npm public registry.

+

include-workspace-root

+ +

Include the workspace root when workspaces are enabled for a command.

+

When false, specifying individual workspaces via the workspace +config, or all workspaces via the workspaces flag, will cause npm +to operate only on the specified workspaces, and not on the root +project.

+

This value is not exported to the environment for child processes.

+

init-author-email

+ +

The value npm init should use by default for the package author's +email.

+

init-author-name

+ +

The value npm init should use by default for the package author's +name.

+

init-author-url

+ +

The value npm init should use by default for the package author's +homepage.

+

init-license

+ +

The value npm init should use by default for the package license.

+

init-module

+ +

A module that will be loaded by the npm init command. See the +documentation for the +init-package-json module +for more information, or npm init.

+

init-private

+ +

The value npm init should use by default for the package's private +flag.

+

init-type

+ +

The value that npm init should use by default for the package.json +type field.

+

init-version

+ +

The value that npm init should use by default for the package +version number, if not already set in package.json.

+ + +

When set file: protocol dependencies will be packed and installed as +regular dependencies instead of creating a symlink. This option has +no effect on workspaces.

+

install-strategy

+ +

Sets the strategy for installing packages in node_modules. hoisted +(default): Install non-duplicated in top-level, and duplicated as +necessary within directory structure. nested: (formerly +--legacy-bundling) install in place, no hoisting. shallow (formerly +--global-style) only install direct deps at top-level. linked: +(experimental) install in node_modules/.store, link in place, +unhoisted.

+

json

+ +

Whether or not to output JSON data, rather than the normal output.

+ +

Not supported by all npm commands.

+

legacy-peer-deps

+ +

Causes npm to completely ignore peerDependencies when building a +package tree, as in npm versions 3 through 6.

+

If a package cannot be installed because of overly strict +peerDependencies that collide, it provides a way to move forward +resolving the situation.

+

This differs from --omit=peer, in that --omit=peer will avoid +unpacking peerDependencies on disk, but will still design a tree +such that peerDependencies could be unpacked in a correct place.

+

Use of legacy-peer-deps is not recommended, as it will not enforce +the peerDependencies contract that meta-dependencies may rely on.

+

libc

+ +

Override libc of native modules to install. Acceptable values are +same as libc field of package.json

+ + +

Used with npm ls, limiting output to only those packages that are +linked.

+

local-address

+ +

The IP address of the local interface to use when making connections +to the npm registry. Must be IPv4 in versions of Node prior to 0.12.

+

location

+ +

When passed to npm config this refers to which config file to use.

+

When set to "global" mode, packages are installed into the prefix +folder instead of the current working directory. See +folders for more on the differences in +behavior.

+ +

lockfile-version

+ +

Set the lockfile format version to be used in package-lock.json and +npm-shrinkwrap-json files. Possible options are:

+

1: The lockfile version used by npm versions 5 and 6. Lacks some data +that is used during the install, resulting in slower and possibly +less deterministic installs. Prevents lockfile churn when +interoperating with older npm versions.

+

2: The default lockfile version used by npm version 7 and 8. Includes +both the version 1 lockfile data and version 3 lockfile data, for +maximum determinism and interoperability, at the expense of more +bytes on disk.

+

3: Only the new lockfile information introduced in npm version 7. +Smaller on disk than lockfile version 2, but not interoperable with +older npm versions. Ideal if all users are on npm version 7 and +higher.

+

loglevel

+ +

What level of logs to report. All logs are written to a debug log, +with the path to that file printed if the execution of a command +fails.

+

Any logs of a higher level than the setting are shown. The default is +"notice".

+

See also the foreground-scripts config.

+

logs-dir

+ +

The location of npm's log directory. See npm logging for more information.

+

logs-max

+ +

The maximum number of log files to store.

+

If set to 0, no log files will be written for the current run.

+

long

+ +

Show extended information in ls, search, and help-search.

+

maxsockets

+ +

The maximum number of connections to use per origin +(protocol/host/port combination).

+

message

+ +

Commit message which is used by npm version when creating version +commit.

+

Any "%s" in the message will be replaced with the version number.

+

node-gyp

+ +

This is the location of the "node-gyp" bin. By default it uses one +that ships with npm itself.

+

You can use this config to specify your own "node-gyp" to run when it +is required to build a package.

+

node-options

+ +

Options to pass through to Node.js via the NODE_OPTIONS environment +variable. This does not impact how npm itself is executed but it does +impact how lifecycle scripts are called.

+

noproxy

+ +

Domain extensions that should bypass any proxies.

+

Also accepts a comma-delimited string.

+

offline

+ +

Force offline mode: no network requests will be done during install. +To allow the CLI to fill in missing cache data, see +--prefer-offline.

+

omit

+ +

Dependency types to omit from the installation tree on disk.

+

Note that these dependencies are still resolved and added to the +package-lock.json or npm-shrinkwrap.json file. They are just not +physically installed on disk.

+

If a package type appears in both the --include and --omit lists, +then it will be included.

+

If the resulting omit list includes 'dev', then the NODE_ENV +environment variable will be set to 'production' for all lifecycle +scripts.

+

omit-lockfile-registry-resolved

+ +

This option causes npm to create lock files without a resolved key +for registry dependencies. Subsequent installs will need to resolve +tarball endpoints with the configured registry, likely resulting in a +longer install time.

+

os

+ +

Override OS of native modules to install. Acceptable values are same +as os field of package.json, which comes from process.platform.

+

otp

+ +

This is a one-time password from a two-factor authenticator. It's +needed when publishing or changing package permissions with npm access.

+

If not set, and a registry response fails with a challenge for a +one-time password, npm will prompt on the command line for one.

+

pack-destination

+ +

Directory in which npm pack will save tarballs.

+

package

+ +

The package or packages to install for npm exec

+

package-lock

+ +

If set to false, then ignore package-lock.json files when +installing. This will also prevent writing package-lock.json if +save is true.

+

package-lock-only

+ +

If set to true, the current operation will only use the +package-lock.json, ignoring node_modules.

+

For update this means only the package-lock.json will be updated, +instead of checking node_modules and downloading dependencies.

+

For list this means the output will be based on the tree described +by the package-lock.json, rather than the contents of +node_modules.

+

parseable

+ +

Output parseable results from commands that write to standard output. +For npm search, this will be tab-separated table format.

+

prefer-dedupe

+ +

Prefer to deduplicate packages if possible, rather than choosing a +newer version of a dependency.

+

prefer-offline

+ +

If true, staleness checks for cached data will be bypassed, but +missing data will be requested from the server. To force full offline +mode, use --offline.

+

prefer-online

+ +

If true, staleness checks for cached data will be forced, making the +CLI look for updates immediately even for fresh package data.

+

prefix

+ +

The location to install global items. If set on the command line, +then it forces non-global commands to run in the specified folder.

+

preid

+ +

The "prerelease identifier" to use as a prefix for the "prerelease" +part of a semver. Like the rc in 1.2.0-rc.8.

+

progress

+ +

When set to true, npm will display a progress bar during time +intensive operations, if process.stderr and process.stdout are a +TTY.

+

Set to false to suppress the progress bar.

+

provenance

+ +

When publishing from a supported cloud CI/CD system, the package will +be publicly linked to where it was built and published from.

+

This config cannot be used with: provenance-file

+

provenance-file

+ +

When publishing, the provenance bundle at the given path will be +used.

+

This config cannot be used with: provenance

+

proxy

+ +

A proxy to use for outgoing http requests. If the HTTP_PROXY or +http_proxy environment variables are set, proxy settings will be +honored by the underlying request library.

+

read-only

+ +

This is used to mark a token as unable to publish when configuring +limited access tokens with the npm token create command.

+

rebuild-bundle

+ +

Rebuild bundled dependencies after installation.

+

registry

+ +

The base URL of the npm registry.

+

replace-registry-host

+ +

Defines behavior for replacing the registry host in a lockfile with +the configured registry.

+

The default behavior is to replace package dist URLs from the default +registry (https://registry.npmjs.org) to the configured registry. If +set to "never", then use the registry value. If set to "always", then +replace the registry host with the configured host every time.

+

You may also specify a bare hostname (e.g., "registry.npmjs.org").

+

save

+ +

Save installed packages to a package.json file as dependencies.

+

When used with the npm rm command, removes the dependency from +package.json.

+

Will also prevent writing to package-lock.json if set to false.

+

save-bundle

+ +

If a package would be saved at install time by the use of --save, +--save-dev, or --save-optional, then also put it in the +bundleDependencies list.

+

Ignored if --save-peer is set, since peerDependencies cannot be +bundled.

+

save-dev

+ +

Save installed packages to a package.json file as devDependencies.

+

This config cannot be used with: save-optional, save-peer, +save-prod

+

save-exact

+ +

Dependencies saved to package.json will be configured with an exact +version rather than using npm's default semver range operator.

+

save-optional

+ +

Save installed packages to a package.json file as +optionalDependencies.

+

This config cannot be used with: save-dev, save-peer, save-prod

+

save-peer

+ +

Save installed packages to a package.json file as peerDependencies

+

This config cannot be used with: save-dev, save-optional, +save-prod

+

save-prefix

+ +

Configure how versions of packages installed to a package.json file +via --save or --save-dev get prefixed.

+

For example if a package has version 1.2.3, by default its version +is set to ^1.2.3 which allows minor upgrades for that package, but +after npm config set save-prefix='~' it would be set to ~1.2.3 +which only allows patch upgrades.

+

save-prod

+ +

Save installed packages into dependencies specifically. This is +useful if a package already exists in devDependencies or +optionalDependencies, but you want to move it to be a non-optional +production dependency.

+

This is the default behavior if --save is true, and neither +--save-dev or --save-optional are true.

+

This config cannot be used with: save-dev, save-optional, +save-peer

+

sbom-format

+ +

SBOM format to use when generating SBOMs.

+

sbom-type

+ +

The type of package described by the generated SBOM. For SPDX, this +is the value for the primaryPackagePurpose field. For CycloneDX, +this is the value for the type field.

+

scope

+ +

Associate an operation with a scope for a scoped registry.

+

Useful when logging in to or out of a private registry:

+
# log in, linking the scope to the custom registry
+npm login --scope=@mycorp --registry=https://registry.mycorp.com
+
+# log out, removing the link and the auth token
+npm logout --scope=@mycorp
+
+

This will cause @mycorp to be mapped to the registry for future +installation of packages specified according to the pattern +@mycorp/package.

+

This will also cause npm init to create a scoped package.

+
# accept all defaults, and create a package named "@foo/whatever",
+# instead of just named "whatever"
+npm init --scope=@foo --yes
+
+

script-shell

+ +

The shell to use for scripts run with the npm exec, npm run and +npm init <package-spec> commands.

+

searchexclude

+ +

Space-separated options that limit the results from search.

+

searchlimit

+ +

Number of items to limit search results to. Will not apply at all to +legacy searches.

+

searchopts

+ +

Space-separated options that are always passed to search.

+

searchstaleness

+ +

The age of the cache, in seconds, before another registry request is +made if using legacy search endpoint.

+

shell

+ +

The shell to run for the npm explore command.

+

sign-git-commit

+ +

If set to true, then the npm version command will commit the new +package version using -S to add a signature.

+

Note that git requires you to have set up GPG keys in your git +configs for this to work properly.

+

sign-git-tag

+ +

If set to true, then the npm version command will tag the version +using -s to add a signature.

+

Note that git requires you to have set up GPG keys in your git +configs for this to work properly.

+

strict-peer-deps

+ +

If set to true, and --legacy-peer-deps is not set, then any +conflicting peerDependencies will be treated as an install failure, +even if npm could reasonably guess the appropriate resolution based +on non-peer dependency relationships.

+

By default, conflicting peerDependencies deep in the dependency +graph will be resolved using the nearest non-peer dependency +specification, even if doing so will result in some packages +receiving a peer dependency outside the range set in their package's +peerDependencies object.

+

When such an override is performed, a warning is printed, explaining +the conflict and the packages involved. If --strict-peer-deps is +set, then this warning is treated as a failure.

+

strict-ssl

+ +

Whether or not to do SSL key validation when making requests to the +registry via https.

+

See also the ca config.

+

tag

+ +

If you ask npm to install a package and don't tell it a specific +version, then it will install the specified tag.

+

It is the tag added to the package@version specified in the npm dist-tag add command, if no explicit tag is given.

+

When used by the npm diff command, this is the tag used to fetch +the tarball that will be compared with the local files by default.

+

If used in the npm publish command, this is the tag that will be +added to the package submitted to the registry.

+

tag-version-prefix

+ +

If set, alters the prefix used when tagging a new version when +performing a version increment using npm version. To remove the +prefix altogether, set it to the empty string: "".

+

Because other tools may rely on the convention that npm version tags +look like v1.0.0, only use this property if it is absolutely +necessary. In particular, use care when overriding this setting for +public packages.

+

timing

+ +

If true, writes timing information to a process specific json file in +the cache or logs-dir. The file name ends with -timing.json.

+

You can quickly view it with this json command +line: cat ~/.npm/_logs/*-timing.json | npm exec -- json -g.

+

Timing information will also be reported in the terminal. To suppress +this while still writing the timing file, use --silent.

+

umask

+ +

The "umask" value to use when setting the file creation mode on files +and folders.

+

Folders and executables are given a mode which is 0o777 masked +against this value. Other files are given a mode which is 0o666 +masked against this value.

+

Note that the underlying system will also apply its own umask value +to files and folders that are created, and npm does not circumvent +this, but rather adds the --umask config to it.

+

Thus, the effective default umask value on most POSIX systems is +0o22, meaning that folders and executables are created with a mode of +0o755 and other files are created with a mode of 0o644.

+

unicode

+ +

When set to true, npm uses unicode characters in the tree output. +When false, it uses ascii characters instead of unicode glyphs.

+

update-notifier

+ +

Set to false to suppress the update notification when using an older +version of npm than the latest.

+

usage

+ +

Show short usage output about the command specified.

+

user-agent

+ +

Sets the User-Agent request header. The following fields are replaced +with their actual counterparts:

+ +

userconfig

+ +

The location of user-level configuration settings.

+

This may be overridden by the npm_config_userconfig environment +variable or the --userconfig command line option, but may not be +overridden by settings in the globalconfig file.

+

version

+ +

If true, output the npm version and exit successfully.

+

Only relevant when specified explicitly on the command line.

+

versions

+ +

If true, output the npm version as well as node's process.versions +map and the version in the current working directory's package.json +file if one exists, and exit successfully.

+

Only relevant when specified explicitly on the command line.

+

viewer

+ +

The program to use to view help content.

+

Set to "browser" to view html help content in the default web +browser.

+

which

+ +

If there are multiple funding sources, which 1-indexed source URL to +open.

+

workspace

+ +

Enable running a command in the context of the configured workspaces +of the current project while filtering by running only the workspaces +defined by this configuration option.

+

Valid values for the workspace config are either:

+ +

When set for the npm init command, this may be set to the folder of +a workspace which does not yet exist, to create the folder and set it +up as a brand new workspace within the project.

+

This value is not exported to the environment for child processes.

+

workspaces

+ +

Set to true to run the command in the context of all configured +workspaces.

+

Explicitly setting this to false will cause commands like install +to ignore workspaces altogether. When not set explicitly:

+ +

This value is not exported to the environment for child processes.

+

workspaces-update

+ +

If set to true, the npm cli will run an update after operations that +may possibly change the workspaces installed to the node_modules +folder.

+

yes

+ +

Automatically answer "yes" to any prompts that npm might print on the +command line.

+

also

+ +

When set to dev or development, this is an alias for +--include=dev.

+

cache-max

+ +

--cache-max=0 is an alias for --prefer-online

+

cache-min

+ +

--cache-min=9999 (or bigger) is an alias for --prefer-offline.

+

cert

+ +

A client certificate to pass when accessing the registry. Values +should be in PEM format (Windows calls it "Base-64 encoded X.509 +(.CER)") with newlines replaced by the string "\n". For example:

+
cert="-----BEGIN CERTIFICATE-----\nXXXX\nXXXX\n-----END CERTIFICATE-----"
+
+

It is not the path to a certificate file, though you can set a +registry-scoped "certfile" path like +"//other-registry.tld/:certfile=/path/to/cert.pem".

+

dev

+ +

Alias for --include=dev.

+

global-style

+ +

Only install direct dependencies in the top level node_modules, but +hoist on deeper dependencies. Sets --install-strategy=shallow.

+

init.author.email

+ +

Alias for --init-author-email

+

init.author.name

+ +

Alias for --init-author-name

+

init.author.url

+ +

Alias for --init-author-url

+

init.license

+ +

Alias for --init-license

+

init.module

+ +

Alias for --init-module

+

init.version

+ +

Alias for --init-version

+

key

+ +

A client key to pass when accessing the registry. Values should be in +PEM format with newlines replaced by the string "\n". For example:

+
key="-----BEGIN PRIVATE KEY-----\nXXXX\nXXXX\n-----END PRIVATE KEY-----"
+
+

It is not the path to a key file, though you can set a +registry-scoped "keyfile" path like +"//other-registry.tld/:keyfile=/path/to/key.pem".

+

legacy-bundling

+ +

Instead of hoisting package installs in node_modules, install +packages in the same manner that they are depended on. This may cause +very deep directory structures and duplicate package installs as +there is no de-duplicating. Sets --install-strategy=nested.

+

only

+ +

When set to prod or production, this is an alias for +--omit=dev.

+

optional

+ +

Default value does install optional deps unless otherwise omitted.

+

Alias for --include=optional or --omit=optional

+

production

+ +

Alias for --omit=dev

+

shrinkwrap

+ +

Alias for --package-lock

+

See also

+
+ + +
+ + + + \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/using-npm/dependency-selectors.html b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/using-npm/dependency-selectors.html new file mode 100644 index 0000000000000000000000000000000000000000..67ea145085b0b66534f8a2a557cfc121d0b0b0c0 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/using-npm/dependency-selectors.html @@ -0,0 +1,380 @@ + + +Dependency Selector Syntax & Querying + + + + + +
+
+

+ Dependency Selector Syntax & Querying + @11.6.2 +

+Dependency Selector Syntax & Querying +
+ +
+

Table of contents

+ +
+ +

Description

+

The npm query command exposes a new dependency selector syntax (informed by & respecting many aspects of the CSS Selectors 4 Spec) which:

+ +

Dependency Selector Syntax

+

Overview:

+ +

Combinators

+ +

Selectors

+ +

Dependency Type Selectors

+ +

Pseudo Selectors

+ +
:semver(<spec>, [selector], [function])
+

The :semver() pseudo selector allows comparing fields from each node's package.json using semver methods. +It accepts up to 3 parameters, all but the first of which are optional.

+ +

When the special infer function is used the spec and the actual value from the node are compared. +If both are versions, according to semver.valid(), eq is used. +If both values are ranges, according to !semver.valid(), intersects is used. +If the values are mixed types satisfies is used.

+

Some examples:

+ +
:outdated(<type>)
+

The :outdated pseudo selector retrieves data from the registry and returns information about which of your dependencies are outdated. +The type parameter may be one of the following:

+ +

In addition to the filtering performed by the pseudo selector, some extra data is added to the resulting objects. +The following data can be found under the queryContext property of each node.

+ +

Some examples:

+ +
:vuln
+

The :vuln pseudo selector retrieves data from the registry and returns information about which if your dependencies has a known vulnerability. +Only dependencies whose current version matches a vulnerability will be returned. +For example if you have semver@7.6.0 in your tree, a vulnerability for semver which affects versions <=6.3.1 will not match.

+

You can also filter results by certain attributes in advisories. +Currently that includes severity and cwe. +Note that severity filtering is done per severity, it does not include severities "higher" or "lower" than the one specified.

+

In addition to the filtering performed by the pseudo selector, info about each relevant advisory will be added to the queryContext attribute of each node under the advisories attribute.

+

Some examples:

+ +

Attribute Selectors

+

The attribute selector evaluates the key/value pairs in package.json if they are Strings.

+ +

Array & Object Attribute Selectors

+

The generic :attr() pseudo selector standardizes a pattern which can be used for attribute selection of Objects, Arrays or Arrays of Objects accessible via Arborist's Node.package metadata. +This allows for iterative attribute selection beyond top-level String evaluation. +The last argument passed to :attr() must be an attribute selector or a nested :attr(). +See examples below:

+

Objects

+
/* return dependencies that have a `scripts.test` containing `"tap"` */
+*:attr(scripts, [test~=tap])
+
+

Nested Objects

+

Nested objects are expressed as sequential arguments to :attr().

+
/* return dependencies that have a [testling config](https://ci.testling.com/guide/advanced_configuration) for opera browsers */
+*:attr(testling, browsers, [~=opera])
+
+

Arrays

+

Arrays specifically uses a special/reserved . character in place of a typical attribute name. +Arrays also support exact value matching when a String is passed to the selector.

+
Example of an Array Attribute Selection:
+
/* removes the distinction between properties & arrays */
+/* ie. we'd have to check the property & iterate to match selection */
+*:attr([keywords^=react])
+*:attr(contributors, :attr([name~=Jordan]))
+
+
Example of an Array matching directly to a value:
+
/* return dependencies that have the exact keyword "react" */
+/* this is equivalent to `*:keywords([value="react"])` */
+*:attr([keywords=react])
+
+
Example of an Array of Objects:
+
/* returns */
+*:attr(contributors, [email=ruyadorno@github.com])
+
+

Groups

+

Dependency groups are defined by the package relationships to their ancestors (ie. +the dependency types that are defined in package.json). +This approach is user-centric as the ecosystem has been taught to think about dependencies in these groups first-and-foremost. +Dependencies are allowed to be included in multiple groups (ex. +a prod dependency may also be a dev dependency (in that it's also required by another dev dependency) & may also be bundled - a selector for that type of dependency would look like: *.prod.dev.bundled).

+ +

Please note that currently workspace deps are always prod dependencies. +Additionally the .root dependency is also considered a prod dependency.

+

Programmatic Usage

+ +
const Arborist = require('@npmcli/arborist')
+const arb = new Arborist({})
+
+
// root-level
+arb.loadActual().then(async (tree) => {
+  // query all production dependencies
+  const results = await tree.querySelectorAll('.prod')
+  console.log(results)
+})
+
+
// iterative
+arb.loadActual().then(async (tree) => {
+  // query for the deduped version of react
+  const results = await tree.querySelectorAll('#react:not(:deduped)')
+  // query the deduped react for git deps
+  const deps = await results[0].querySelectorAll(':type(git)')
+  console.log(deps)
+})
+
+

See Also

+
+ + +
+ + + + \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/using-npm/developers.html b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/using-npm/developers.html new file mode 100644 index 0000000000000000000000000000000000000000..41890488eead600de4e17c8add2be57332e88b4c --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/using-npm/developers.html @@ -0,0 +1,330 @@ + + +developers + + + + + +
+
+

+ developers + @11.6.2 +

+Developer Guide +
+ +
+

Table of contents

+ +
+ +

Description

+

So, you've decided to use npm to develop (and maybe publish/deploy) your project.

+

Fantastic!

+

There are a few things that you need to do above the simple steps that your users will do to install your program.

+

About These Documents

+

These are man pages. +If you install npm, you should be able to then do man npm-thing to get the documentation on a particular topic, or npm help thing to see the same information.

+

What is a Package

+

A package is:

+ +

Even if you never publish your package, you can still get a lot of benefits of using npm if you just want to write a node program (a), and perhaps if you also want to be able to easily install it elsewhere after packing it up into a tarball (b).

+

Git urls can be of the form:

+
git://github.com/user/project.git#commit-ish
+git+ssh://user@hostname:project.git#commit-ish
+git+http://user@hostname/project/blah.git#commit-ish
+git+https://user@hostname/project/blah.git#commit-ish
+
+

The commit-ish can be any tag, sha, or branch which can be supplied as an argument to git checkout. +The default is whatever the repository uses as its default branch.

+

The package.json File

+

You need to have a package.json file in the root of your project to do much of anything with npm. +That is basically the whole interface.

+

See package.json for details about what goes in that file. +At the very least, you need:

+ +

You can use npm init in the root of your package in order to get you started with a pretty basic package.json file. +See npm init for more info.

+

Keeping files out of your Package

+

Use a .npmignore file to keep stuff out of your package. +If there's no .npmignore file, but there is a .gitignore file, then npm will ignore the stuff matched by the .gitignore file. +If you want to include something that is excluded by your .gitignore file, you can create an empty .npmignore file to override it. +Like git, npm looks for .npmignore and .gitignore files in all subdirectories of your package, not only the root directory.

+

.npmignore files follow the same pattern rules as .gitignore files:

+ +

By default, some paths and files are ignored, so there's no need to add them to .npmignore explicitly. +Some examples are:

+ +

Additionally, everything in node_modules is ignored, except for bundled dependencies. +npm automatically handles this for you, so don't bother adding node_modules to .npmignore.

+

The following paths and files are never ignored, so adding them to .npmignore is pointless:

+ +

If, given the structure of your project, you find .npmignore to be a maintenance headache, you might instead try populating the files property of package.json, which is an array of file or directory names that should be included in your package. +Sometimes manually picking which items to allow is easier to manage than building a block list.

+

See package.json for more info on what can and can't be ignored.

+

Testing whether your .npmignore or files config works

+

If you want to double check that your package will include only the files you intend it to when published, you can run the npm pack command locally which will generate a tarball in the working directory, the same way it does for publishing.

+ +

npm link is designed to install a development package and see the changes in real time without having to keep re-installing it. +(You do need to either re-link or npm rebuild -g to update compiled packages, of course.)

+

More info at npm link.

+

Before Publishing: Make Sure Your Package Installs and Works

+

This is important.

+

If you cannot install it locally, you'll have problems trying to publish it. +Or, worse yet, you'll be able to publish it, but you'll be publishing a broken or pointless package. +So don't do that.

+

In the root of your package, do this:

+
npm install . -g
+
+

That'll show you that it's working. +If you'd rather just create a symlink package that points to your working directory, then do this:

+
npm link
+
+

Use npm ls -g to see if it's there.

+

To test a local install, go into some other folder, and then do:

+
cd ../some-other-folder
+npm install ../my-package
+
+

to install it locally into the node_modules folder in that other place.

+

Then go into the node-repl, and try using require("my-thing") to bring in your module's main module.

+

Create a User Account

+

Create a user with the adduser command. +It works like this:

+
npm adduser
+
+

and then follow the prompts.

+

This is documented better in npm adduser.

+

Publish your Package

+

This part's easy. +In the root of your folder, do this:

+
npm publish
+
+

You can give publish a url to a tarball, or a filename of a tarball, or a path to a folder.

+

Note that pretty much everything in that folder will be exposed by default. +So, if you have secret stuff in there, use a .npmignore file to list out the globs to ignore, or publish from a fresh checkout.

+

Brag about it

+

Send emails, write blogs, blab in IRC.

+

Tell the world how easy it is to install your program!

+

See also

+
+ + +
+ + + + \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/using-npm/logging.html b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/using-npm/logging.html new file mode 100644 index 0000000000000000000000000000000000000000..b2d76c4b1c54c630d72b08b21c36d0db9173db38 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/using-npm/logging.html @@ -0,0 +1,239 @@ + + +Logging + + + + + +
+
+

+ Logging + @11.6.2 +

+Why, What & How We Log +
+ +
+

Table of contents

+ +
+ +

Description

+

The npm CLI has various mechanisms for showing different levels of information back to end-users for certain commands, configurations & environments.

+

Setting Log File Location

+

All logs are written to a debug log, with the path to that file printed if the execution of a command fails.

+

The default location of the logs directory is a directory named _logs inside the npm cache. +This can be changed with the logs-dir config option.

+

For example, if you wanted to write all your logs to the current working directory, you could run: npm install --logs-dir=.. +This is especially helpful in debugging a specific npm issue as you can run a command multiple times with different config values and then diff all the log files.

+

Log files will be removed from the logs-dir when the number of log files exceeds logs-max, with the oldest logs being deleted first.

+

To turn off logs completely set --logs-max=0.

+

Setting Log Levels

+

loglevel

+

loglevel is a global argument/config that can be set to determine the type of information to be displayed.

+

The default value of loglevel is "notice" but there are several levels/types of logs available, including:

+ +

All logs pertaining to a level preceding the current setting will be shown.

+
Aliases
+

The log levels listed above have various corresponding aliases, including:

+ +

foreground-scripts

+

The npm CLI began hiding the output of lifecycle scripts for npm install as of v7. +Notably, this means you will not see logs/output from packages that may be using "install scripts" to display information back to you or from your own project's scripts defined in package.json. +If you'd like to change this behavior & log this output you can set foreground-scripts to true.

+

Timing Information

+

The --timing config can be set which does a few things:

+
    +
  1. Always shows the full path to the debug log regardless of command exit status
  2. +
  3. Write timing information to a process specific timing file in the cache or logs-dir
  4. +
  5. Output timing information to the terminal
  6. +
+

This file contains a timers object where the keys are an identifier for the portion of the process being timed and the value is the number of milliseconds it took to complete.

+

Sometimes it is helpful to get timing information without outputting anything to the terminal. +For example, the performance might be affected by writing to the terminal. +In this case you can use +--timing --silent which will still write the timing file, but not output anything to the terminal while running.

+

Registry Response Headers

+

npm-notice

+

The npm CLI reads from & logs any npm-notice headers that are returned from the configured registry. +This mechanism can be used by third-party registries to provide useful information when network-dependent requests occur.

+

This header is not cached, and will not be logged if the request is served from the cache.

+

Logs and Sensitive Information

+

The npm CLI makes a best effort to redact the following from terminal output and log files:

+ +

However, this behavior should not be relied on to keep all possible sensitive information redacted. +If you are concerned about secrets in your log file or terminal output, you can use --loglevel=silent and --logs-max=0 to ensure no logs are written to your terminal or filesystem.

+

See also

+
+ + +
+ + + + \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/using-npm/orgs.html b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/using-npm/orgs.html new file mode 100644 index 0000000000000000000000000000000000000000..b998bb015815264239cf853df9e6c48950b9632e --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/using-npm/orgs.html @@ -0,0 +1,249 @@ + + +orgs + + + + + +
+
+

+ orgs + @11.6.2 +

+Working with Teams & Orgs +
+ +
+

Table of contents

+ +
+ +

Description

+

There are three levels of org users:

+
    +
  1. Super admin, controls billing & adding people to the org.
  2. +
  3. Team admin, manages team membership & package access.
  4. +
  5. Developer, works on packages they are given access to.
  6. +
+

The super admin is the only person who can add users to the org because it impacts the monthly bill. +The super admin will use the website to manage membership. +Every org has a developers team that all users are automatically added to.

+

The team admin is the person who manages team creation, team membership, and package access for teams. +The team admin grants package access to teams, not individuals.

+

The developer will be able to access packages based on the teams they are on. +Access is either read-write or read-only.

+

There are two main commands:

+
    +
  1. npm team see npm team for more details
  2. +
  3. npm access see npm access for more details
  4. +
+

Team Admins create teams

+ +
npm team ls <org>:developers
+
+ +
npm team create <org:team>
+
+ +
npm team add <org:team> <user>
+
+

Publish a package and adjust package access

+ +
npm init --scope=<org>
+
+

to scope it for your org & publish as usual

+ +
npm access grant <read-only|read-write> <org:team> [<package>]
+
+ +
npm access revoke <org:team> [<package>]
+
+

Monitor your package access

+ +
npm access list packages <org> <user>
+
+ +
npm access list packages <org:team>
+
+ +
npm access list collaborators <pkg>
+
+

See also

+
+ + +
+ + + + \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/using-npm/package-spec.html b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/using-npm/package-spec.html new file mode 100644 index 0000000000000000000000000000000000000000..764a2ad3eb429100abfba86ba551693df987f511 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/using-npm/package-spec.html @@ -0,0 +1,250 @@ + + +package-spec + + + + + +
+
+

+ package-spec + @11.6.2 +

+Package name specifier +
+ +
+

Table of contents

+ +
+ +

Description

+

Commands like npm install and the dependency sections in the package.json use a package name specifier. +This can be many different things that all refer to a "package". Examples include a package name, git url, tarball, or local directory. +These will generally be referred to as <package-spec> in the help output for the npm commands that use this package name specifier.

+

Package name

+ +

Refers to a package by name, with or without a scope, and optionally tag, version, or version range. +This is typically used in combination with the registry config to refer to a package in a registry.

+

Examples:

+ +

Aliases

+ +

Primarily used by commands like npm install and in the dependency sections in the package.json, this refers to a package by an alias. +The <alias> is the name of the package as it is reified in the node_modules folder, and the <name> refers to a package name as found in the configured registry.

+

See Package name above for more info on referring to a package by name, and registry for configuring which registry is used when referring to a package by name.

+

Examples:

+ +

Folders

+ +

This refers to a package on the local filesystem. +Specifically this is a folder with a package.json file in it. +This should always be prefixed with a / or ./ (or your OS equivalent) to reduce confusion. +npm currently will parse a string with more than one / in it as a folder, but this is legacy behavior that may be removed in a future version.

+

Examples:

+ +

Tarballs

+ +

Examples:

+ +

Refers to a package in a tarball format, either on the local filesystem or remotely via url. +This is the format that packages exist in when uploaded to a registry.

+

git urls

+ +

Refers to a package in a git repo. +This can be a full git url, git shorthand, or a username/package on GitHub. +You can specify a git tag, branch, or other git ref by appending #ref.

+

Examples:

+ +

See also

+
+ + +
+ + + + \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/using-npm/registry.html b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/using-npm/registry.html new file mode 100644 index 0000000000000000000000000000000000000000..95fe69d0f0e1fb245932a5c2b1a88de7696260df --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/using-npm/registry.html @@ -0,0 +1,210 @@ + + +registry + + + + + +
+
+

+ registry + @11.6.2 +

+The JavaScript Package Registry +
+ +
+

Table of contents

+ +
+ +

Description

+

To resolve packages by name and version, npm talks to a registry website that implements the CommonJS Package Registry specification for reading package info.

+

npm is configured to use the npm public registry at +https://registry.npmjs.org by default. +Use of the npm public registry is subject to terms of use available at https://docs.npmjs.com/policies/terms.

+

You can configure npm to use any compatible registry you like, and even run your own registry. +Use of someone else's registry may be governed by their terms of use.

+

npm's package registry implementation supports several write APIs as well, to allow for publishing packages and managing user account information.

+

The registry URL used is determined by the scope of the package (see scope. +If no scope is specified, the default registry is used, which is supplied by the registry config parameter. +See npm config, npmrc, and config for more on managing npm's configuration. +Authentication configuration such as auth tokens and certificates are configured specifically scoped to an individual registry. +See Auth Related Configuration

+

When the default registry is used in a package-lock or shrinkwrap it has the special meaning of "the currently configured registry". If you create a lock file while using the default registry you can switch to another registry and npm will install packages from the new registry, but if you create a lock file while using a custom registry packages will be installed from that registry even after you change to another registry.

+

Does npm send any information about me back to the registry?

+

Yes.

+

When making requests of the registry npm adds two headers with information about your environment:

+ +

The npm registry does not try to correlate the information in these headers with any authenticated accounts that may be used in the same requests.

+

How can I prevent my package from being published in the official registry?

+

Set "private": true in your package.json to prevent it from being published at all, or +"publishConfig":{"registry":"http://my-internal-registry.local"} +to force it to be published only to your internal/private registry.

+

See package.json for more info on what goes in the package.json file.

+

Where can I find my (and others') published packages?

+

https://www.npmjs.com/

+

See also

+
+ + +
+ + + + \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/using-npm/removal.html b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/using-npm/removal.html new file mode 100644 index 0000000000000000000000000000000000000000..23db6a16ca78d57499bdba8d53e3ba885c82d608 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/using-npm/removal.html @@ -0,0 +1,200 @@ + + +removal + + + + + +
+
+

+ removal + @11.6.2 +

+Cleaning the Slate +
+ +
+

Table of contents

+ +
+ +

Synopsis

+

So sad to see you go.

+
sudo npm uninstall npm -g
+
+

Or, if that fails, please proceed to more severe uninstalling methods.

+

More Severe Uninstalling

+

Usually, the above instructions are sufficient. +That will remove npm, but leave behind anything you've installed.

+

If that doesn't work, or if you require more drastic measures, continue reading.

+

Note that this is only necessary for globally-installed packages. +Local installs are completely contained within a project's node_modules folder. +Delete that folder, and everything is gone unless a package's install script is particularly ill-behaved.

+

This assumes that you installed node and npm in the default place. +If you configured node with a different --prefix, or installed npm with a different prefix setting, then adjust the paths accordingly, replacing +/usr/local with your install prefix.

+

To remove everything npm-related manually:

+
rm -rf /usr/local/{lib/node{,/.npm,_modules},bin,share/man}/npm*
+
+

If you installed things with npm, then your best bet is to uninstall them with npm first, and then install them again once you have a proper install. +This can help find any symlinks that are lying around:

+
ls -laF /usr/local/{lib/node{,/.npm},bin,share/man} | grep npm
+
+

Prior to version 0.3, npm used shim files for executables and node modules. +To track those down, you can do the following:

+
find /usr/local/{lib/node,bin} -exec grep -l npm \{\} \; ;
+
+

See also

+
+ + +
+ + + + \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/using-npm/scope.html b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/using-npm/scope.html new file mode 100644 index 0000000000000000000000000000000000000000..a2ff83d6221a24e8be9c4668cc0296f681cff1e7 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/using-npm/scope.html @@ -0,0 +1,245 @@ + + +scope + + + + + +
+
+

+ scope + @11.6.2 +

+Scoped packages +
+ +
+

Table of contents

+ +
+ +

Description

+

All npm packages have a name. +Some package names also have a scope. +A scope follows the usual rules for package names (URL-safe characters, no leading dots or underscores). +When used in package names, scopes are preceded by an @ symbol and followed by a slash, e.g.

+
@somescope/somepackagename
+
+

Scopes are a way of grouping related packages together, and also affect a few things about the way npm treats the package.

+

Each npm user/organization has their own scope, and only you can add packages in your scope. +This means you don't have to worry about someone taking your package name ahead of you. +Thus it is also a good way to signal official packages for organizations.

+

Scoped packages can be published and installed as of npm@2 and are supported by the primary npm registry. +Unscoped packages can depend on scoped packages and vice versa. +The npm client is backwards-compatible with unscoped registries, so it can be used to work with scoped and unscoped registries at the same time.

+

Installing scoped packages

+

Scoped packages are installed to a sub-folder of the regular installation folder, e.g. if your other packages are installed in node_modules/packagename, scoped modules will be installed in node_modules/@myorg/packagename. +The scope folder (@myorg) is simply the name of the scope preceded by an @ symbol, and can contain any number of scoped packages.

+

A scoped package is installed by referencing it by name, preceded by an +@ symbol, in npm install:

+
npm install @myorg/mypackage
+
+

Or in package.json:

+
"dependencies": {
+  "@myorg/mypackage": "^1.3.0"
+}
+
+

Note that if the @ symbol is omitted, in either case, npm will instead attempt to install from GitHub; see npm install.

+

Requiring scoped packages

+

Because scoped packages are installed into a scope folder, you have to include the name of the scope when requiring them in your code, e.g.

+
require('@myorg/mypackage')
+
+

There is nothing special about the way Node treats scope folders. +This simply requires the mypackage module in the folder named @myorg.

+

Publishing scoped packages

+

Scoped packages can be published from the CLI as of npm@2 and can be published to any registry that supports them, including the primary npm registry.

+

(As of 2015-04-19, and with npm 2.0 or better, the primary npm registry +does support scoped packages.)

+

If you wish, you may associate a scope with a registry; see below.

+

Publishing public scoped packages to the primary npm registry

+

Publishing to a scope, you have two options:

+ +

If publishing a public module to an organization scope, you must first either create an organization with the name of the scope that you'd like to publish to or be added to an existing organization with the appropriate permissions. +For example, if you'd like to publish to @org, you would need to create the org organization on npmjs.com prior to trying to publish.

+

Scoped packages are not public by default. +You will need to specify +--access public with the initial npm publish command. +This will publish the package and set access to public as if you had run npm access public after publishing. +You do not need to do this when publishing new versions of an existing scoped package.

+

Publishing private scoped packages to the npm registry

+

To publish a private scoped package to the npm registry, you must have an npm Private Modules account.

+

You can then publish the module with npm publish or npm publish --access restricted, and it will be present in the npm registry, with restricted access. +You can then change the access permissions, if desired, with npm access or on the npmjs.com website.

+

Associating a scope with a registry

+

Scopes can be associated with a separate registry. +This allows you to seamlessly use a mix of packages from the primary npm registry and one or more private registries, such as GitHub Packages or the open source Verdaccio project.

+

You can associate a scope with a registry at login, e.g.

+
npm login --registry=http://reg.example.com --scope=@myco
+
+

Scopes have a many-to-one relationship with registries: one registry can host multiple scopes, but a scope only ever points to one registry.

+

You can also associate a scope with a registry using npm config:

+
npm config set @myco:registry=http://reg.example.com
+
+

Once a scope is associated with a registry, any npm install for a package with that scope will request packages from that registry instead. +Any +npm publish for a package name that contains the scope will be published to +that registry instead.

+

See also

+
+ + +
+ + + + \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/using-npm/scripts.html b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/using-npm/scripts.html new file mode 100644 index 0000000000000000000000000000000000000000..1e76b65876390724de7d0f4d83f924f6241b7c5b --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/using-npm/scripts.html @@ -0,0 +1,470 @@ + + +scripts + + + + + +
+
+

+ scripts + @11.6.2 +

+How npm handles the "scripts" field +
+ +
+

Table of contents

+ +
+ +

Description

+

The "scripts" property of your package.json file supports a number of built-in scripts and their preset life cycle events as well as arbitrary scripts. +These all can be executed by running npm run <stage>. +Pre and post commands with matching names will be run for those as well (e.g. +premyscript, +myscript, postmyscript). +Scripts from dependencies can be run with npm explore <pkg> -- npm run <stage>.

+

Pre & Post Scripts

+

To create "pre" or "post" scripts for any scripts defined in the +"scripts" section of the package.json, simply create another script +with a matching name and add "pre" or "post" to the beginning of them.

+
{
+  "scripts": {
+    "precompress": "{{ executes BEFORE the `compress` script }}",
+    "compress": "{{ run command to compress files }}",
+    "postcompress": "{{ executes AFTER `compress` script }}"
+  }
+}
+
+

In this example npm run compress would execute these scripts as described.

+

Life Cycle Scripts

+

There are some special life cycle scripts that happen only in certain situations. +These scripts happen in addition to the pre<event>, post<event>, and +<event> scripts.

+ +

prepare (since npm@4.0.0)

+ +

prepublish (DEPRECATED)

+ +

prepublishOnly

+ +

prepack

+ +

postpack

+ +

dependencies

+ +

Prepare and Prepublish

+

Deprecation Note: prepublish

+

Since npm@1.1.71, the npm CLI has run the prepublish script for both npm publish and npm install, because it's a convenient way to prepare a package for use (some common use cases are described in the section below). +It has also turned out to be, in practice, very confusing. +As of npm@4.0.0, a new event has been introduced, prepare, that preserves this existing behavior. +A new event, prepublishOnly has been added as a transitional strategy to allow users to avoid the confusing behavior of existing npm versions and only run on npm publish (for instance, running the tests one last time to ensure they're in good shape).

+

See https://github.com/npm/npm/issues/10074 for a much lengthier justification, with further reading, for this change.

+

Use Cases

+

If you need to perform operations on your package before it is used, in a way that is not dependent on the operating system or architecture of the target system, use a prepublish script. +This includes tasks such as:

+ +

The advantage of doing these things at prepublish time is that they can be done once, in a single place, thus reducing complexity and variability. +Additionally, this means that:

+ +

Dependencies

+

The dependencies script is run any time an npm command causes changes to the node_modules directory. +It is run AFTER the changes have been applied and the package.json and package-lock.json files have been updated.

+

Life Cycle Operation Order

+

npm cache add

+ +

npm ci

+ +

These all run after the actual installation of modules into +node_modules, in order, with no internal actions happening in between

+

npm diff

+ +

npm install

+

These also run when you run npm install -g <pkg-name>

+ +

If there is a binding.gyp file in the root of your package and you haven't defined your own install or preinstall scripts, npm will default the install command to compile using node-gyp via node-gyp rebuild

+

These are run from the scripts of <pkg-name>

+

npm pack

+ +

npm publish

+ +

npm rebuild

+ +

prepare is only run if the current directory is a symlink (e.g. +with linked packages)

+

npm restart

+

If there is a restart script defined, these events are run; otherwise, +stop and start are both run if present, including their pre and post iterations)

+ +

npm run <user defined>

+ +

npm start

+ +

If there is a server.js file in the root of your package, then npm will default the start command to node server.js. +prestart and poststart will still run in this case.

+

npm stop

+ +

npm test

+ +

npm version

+ +

A Note on a lack of npm uninstall scripts

+

While npm v6 had uninstall lifecycle scripts, npm v7 does not. +Removal of a package can happen for a wide variety of reasons, and there's no clear way to currently give the script enough context to be useful.

+

Reasons for a package removal include:

+ +

Due to the lack of necessary context, uninstall lifecycle scripts are not implemented and will not function.

+

Working Directory for Scripts

+

Scripts are always run from the root of the package folder, regardless of what the current working directory is when npm is invoked. +This means your scripts can reliably assume they are running in the package root.

+

If you want your script to behave differently based on the directory you were in when you ran npm, you can use the INIT_CWD environment variable, which holds the full path you were in when you ran npm run.

+

Historical Behavior in Older npm Versions

+

For npm v6 and earlier, scripts were generally run from the root of the package, but there were rare cases and bugs in older versions where this was not guaranteed. +If your package must support very old npm versions, you may wish to add a safeguard in your scripts (for example, by checking process.cwd()).

+

For more details, see:

+ +

User

+

When npm is run as root, scripts are always run with the effective uid and gid of the working directory owner.

+

Environment

+

Package scripts run in an environment where many pieces of information are made available regarding the setup of npm and the current state of the process.

+

path

+

If you depend on modules that define executable scripts, like test suites, then those executables will be added to the PATH for executing the scripts. +So, if your package.json has this:

+
{
+  "name" : "foo",
+  "dependencies" : {
+    "bar" : "0.1.x"
+  },
+  "scripts": {
+    "start" : "bar ./test"
+  }
+}
+
+

then you could run npm start to execute the bar script, which is exported into the node_modules/.bin directory on npm install.

+

package.json vars

+

The package.json fields are tacked onto the npm_package_ prefix. +So, for instance, if you had {"name":"foo", "version":"1.2.5"} in your package.json file, then your package scripts would have the npm_package_name environment variable set to "foo", and the npm_package_version set to "1.2.5". You can access these variables in your code with process.env.npm_package_name and process.env.npm_package_version, and so on for other fields.

+

See package.json for more on package configs.

+

current lifecycle event

+

Lastly, the npm_lifecycle_event environment variable is set to whichever stage of the cycle is being executed. +So, you could have a single script used for different parts of the process which switches based on what's currently happening.

+

Objects are flattened following this format, so if you had +{"scripts":{"install":"foo.js"}} in your package.json, then you'd see this in the script:

+
process.env.npm_package_scripts_install === "foo.js"
+
+

Examples

+

For example, if your package.json contains this:

+
{
+  "scripts" : {
+    "install" : "scripts/install.js",
+    "postinstall" : "scripts/install.js"
+  }
+}
+
+

then scripts/install.js will be called for the install and post-install stages of the lifecycle. +Since scripts/install.js is running for two different phases, it would be wise in this case to look at the +npm_lifecycle_event environment variable.

+

If you want to run a make command, you can do so. +This works just fine:

+
{
+  "scripts" : {
+    "preinstall" : "./configure",
+    "install" : "make && make install",
+    "test" : "make test"
+  }
+}
+
+

Exiting

+

Scripts are run by passing the line as a script argument to /bin/sh on POSIX systems or cmd.exe on Windows. +You can control which shell is used by setting the script-shell configuration option.

+

If the script exits with a code other than 0, then this will abort the process.

+

Note that these script files don't have to be Node.js or even JavaScript programs. +They just have to be some kind of executable file.

+

Best Practices

+ +

See Also

+
+ + +
+ + + + \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/using-npm/workspaces.html b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/using-npm/workspaces.html new file mode 100644 index 0000000000000000000000000000000000000000..f1062e2d28399f083cb6cdc4804a09786a6b8cab --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/docs/output/using-npm/workspaces.html @@ -0,0 +1,293 @@ + + +workspaces + + + + + +
+
+

+ workspaces + @11.6.2 +

+Working with workspaces +
+ +
+

Table of contents

+ +
+ +

Description

+

Workspaces is a generic term that refers to the set of features in the npm cli that provides support for managing multiple packages from your local file system from within a singular top-level, root package.

+

This set of features makes up for a much more streamlined workflow handling linked packages from the local file system. +It automates the linking process as part of npm install and removes the need to manually use npm link in order to add references to packages that should be symlinked into the current node_modules folder.

+

We also refer to these packages being auto-symlinked during npm install as a single workspace, meaning it's a nested package within the current local file system that is explicitly defined in the package.json +workspaces configuration.

+

Defining workspaces

+

Workspaces are usually defined via the workspaces property of the package.json file, e.g:

+
{
+  "name": "my-workspaces-powered-project",
+  "workspaces": [
+    "packages/a"
+  ]
+}
+
+

Given the above package.json example living at a current working directory . that contains a folder named packages/a that itself contains a package.json inside it, defining a Node.js package, e.g:

+
.
++-- package.json
+`-- packages
+   +-- a
+   |   `-- package.json
+
+

The expected result once running npm install in this current working directory . is that the folder packages/a will get symlinked to the node_modules folder of the current working dir.

+

Below is a post npm install example, given that same previous example structure of files and folders:

+
.
++-- node_modules
+|  `-- a -> ../packages/a
++-- package-lock.json
++-- package.json
+`-- packages
+   +-- a
+   |   `-- package.json
+
+

Getting started with workspaces

+

You may automate the required steps to define a new workspace using npm init. +For example in a project that already has a package.json defined you can run:

+
npm init -w ./packages/a
+
+

This command will create the missing folders and a new package.json file (if needed) while also making sure to properly configure the +"workspaces" property of your root project package.json.

+

Adding dependencies to a workspace

+

It's possible to directly add/remove/update dependencies of your workspaces using the workspace config.

+

For example, assuming the following structure:

+
.
++-- package.json
+`-- packages
+   +-- a
+   |   `-- package.json
+   `-- b
+       `-- package.json
+
+

If you want to add a dependency named abbrev from the registry as a dependency of your workspace a, you may use the workspace config to tell the npm installer that package should be added as a dependency of the provided workspace:

+
npm install abbrev -w a
+
+

Note: other installing commands such as uninstall, ci, etc will also respect the provided workspace configuration.

+

Using workspaces

+

Given the specifics of how Node.js handles module resolution it's possible to consume any defined workspace by its declared package.json name. +Continuing from the example defined above, let's also create a Node.js script that will require the workspace a example module, e.g:

+
// ./packages/a/index.js
+module.exports = 'a'
+
+// ./lib/index.js
+const moduleA = require('a')
+console.log(moduleA) // -> a
+
+

When running it with:

+

node lib/index.js

+

This demonstrates how the nature of node_modules resolution allows for +workspaces to enable a portable workflow for requiring each workspace in such a way that is also easy to publish these nested workspaces to be consumed elsewhere.

+

Running commands in the context of workspaces

+

You can use the workspace configuration option to run commands in the context of a configured workspace. +Additionally, if your current directory is in a workspace, the workspace configuration is implicitly set, and prefix is set to the root workspace.

+

Following is a quick example on how to use the npm run command in the context of nested workspaces. +For a project containing multiple workspaces, e.g:

+
.
++-- package.json
+`-- packages
+   +-- a
+   |   `-- package.json
+   `-- b
+       `-- package.json
+
+

By running a command using the workspace option, it's possible to run the given command in the context of that specific workspace. +e.g:

+
npm run test --workspace=a
+
+

You could also run the command within the workspace.

+
cd packages/a && npm run test
+
+

Either will run the test script defined within the +./packages/a/package.json file.

+

Please note that you can also specify this argument multiple times in the command-line in order to target multiple workspaces, e.g:

+
npm run test --workspace=a --workspace=b
+
+

Or run the command for each workspace within the 'packages' folder:

+
npm run test --workspace=packages
+
+

It's also possible to use the workspaces (plural) configuration option to enable the same behavior but running that command in the context of all configured workspaces. +e.g:

+
npm run test --workspaces
+
+

Will run the test script in both ./packages/a and ./packages/b.

+

Commands will be run in each workspace in the order they appear in your package.json

+
{
+  "workspaces": [ "packages/a", "packages/b" ]
+}
+
+

Order of run is different with:

+
{
+  "workspaces": [ "packages/b", "packages/a" ]
+}
+
+

Ignoring missing scripts

+

It is not required for all of the workspaces to implement scripts run with the npm run command.

+

By running the command with the --if-present flag, npm will ignore workspaces missing target script.

+
npm run test --workspaces --if-present
+
+

See also

+
+ + +
+ + + + \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/balanced-match/LICENSE.md b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/balanced-match/LICENSE.md new file mode 100644 index 0000000000000000000000000000000000000000..61ece8cc92afb41fdb1552ec1f72ac147b9eaceb --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/balanced-match/LICENSE.md @@ -0,0 +1,23 @@ +(MIT) + +Original code Copyright Julian Gruber + +Port to TypeScript Copyright Isaac Z. Schlueter + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/balanced-match/dist/commonjs/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/balanced-match/dist/commonjs/index.js new file mode 100644 index 0000000000000000000000000000000000000000..0c9014bac153187d5b0e3d1d214a5d1c71eefd32 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/balanced-match/dist/commonjs/index.js @@ -0,0 +1,59 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.range = exports.balanced = void 0; +const balanced = (a, b, str) => { + const ma = a instanceof RegExp ? maybeMatch(a, str) : a; + const mb = b instanceof RegExp ? maybeMatch(b, str) : b; + const r = ma !== null && mb != null && (0, exports.range)(ma, mb, str); + return (r && { + start: r[0], + end: r[1], + pre: str.slice(0, r[0]), + body: str.slice(r[0] + ma.length, r[1]), + post: str.slice(r[1] + mb.length), + }); +}; +exports.balanced = balanced; +const maybeMatch = (reg, str) => { + const m = str.match(reg); + return m ? m[0] : null; +}; +const range = (a, b, str) => { + let begs, beg, left, right = undefined, result; + let ai = str.indexOf(a); + let bi = str.indexOf(b, ai + 1); + let i = ai; + if (ai >= 0 && bi > 0) { + if (a === b) { + return [ai, bi]; + } + begs = []; + left = str.length; + while (i >= 0 && !result) { + if (i === ai) { + begs.push(i); + ai = str.indexOf(a, i + 1); + } + else if (begs.length === 1) { + const r = begs.pop(); + if (r !== undefined) + result = [r, bi]; + } + else { + beg = begs.pop(); + if (beg !== undefined && beg < left) { + left = beg; + right = bi; + } + bi = str.indexOf(b, i + 1); + } + i = ai < bi && ai >= 0 ? ai : bi; + } + if (begs.length && right !== undefined) { + result = [left, right]; + } + } + return result; +}; +exports.range = range; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/balanced-match/dist/commonjs/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/balanced-match/dist/commonjs/package.json new file mode 100644 index 0000000000000000000000000000000000000000..5bbefffbabee392d1855491b84dc0a716b6a3bf2 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/balanced-match/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/balanced-match/dist/esm/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/balanced-match/dist/esm/index.js new file mode 100644 index 0000000000000000000000000000000000000000..fe81200f9d676da19bf1bf088ecef7944290e38a --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/balanced-match/dist/esm/index.js @@ -0,0 +1,54 @@ +export const balanced = (a, b, str) => { + const ma = a instanceof RegExp ? maybeMatch(a, str) : a; + const mb = b instanceof RegExp ? maybeMatch(b, str) : b; + const r = ma !== null && mb != null && range(ma, mb, str); + return (r && { + start: r[0], + end: r[1], + pre: str.slice(0, r[0]), + body: str.slice(r[0] + ma.length, r[1]), + post: str.slice(r[1] + mb.length), + }); +}; +const maybeMatch = (reg, str) => { + const m = str.match(reg); + return m ? m[0] : null; +}; +export const range = (a, b, str) => { + let begs, beg, left, right = undefined, result; + let ai = str.indexOf(a); + let bi = str.indexOf(b, ai + 1); + let i = ai; + if (ai >= 0 && bi > 0) { + if (a === b) { + return [ai, bi]; + } + begs = []; + left = str.length; + while (i >= 0 && !result) { + if (i === ai) { + begs.push(i); + ai = str.indexOf(a, i + 1); + } + else if (begs.length === 1) { + const r = begs.pop(); + if (r !== undefined) + result = [r, bi]; + } + else { + beg = begs.pop(); + if (beg !== undefined && beg < left) { + left = beg; + right = bi; + } + bi = str.indexOf(b, i + 1); + } + i = ai < bi && ai >= 0 ? ai : bi; + } + if (begs.length && right !== undefined) { + result = [left, right]; + } + } + return result; +}; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/balanced-match/dist/esm/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/balanced-match/dist/esm/package.json new file mode 100644 index 0000000000000000000000000000000000000000..3dbc1ca591c0557e35b6004aeba250e6a70b56e3 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/balanced-match/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/balanced-match/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/balanced-match/package.json new file mode 100644 index 0000000000000000000000000000000000000000..49296e6af443c4c424844aa2c73081c3e34b8c8c --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/balanced-match/package.json @@ -0,0 +1,79 @@ +{ + "name": "@isaacs/balanced-match", + "description": "Match balanced character pairs, like \"{\" and \"}\"", + "version": "4.0.1", + "files": [ + "dist" + ], + "repository": { + "type": "git", + "url": "git://github.com/isaacs/balanced-match.git" + }, + "exports": { + "./package.json": "./package.json", + ".": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } + }, + "type": "module", + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "prepare": "tshy", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "test": "tap", + "snap": "tap", + "format": "prettier --write . --loglevel warn", + "benchmark": "node benchmark/index.js", + "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts" + }, + "prettier": { + "semi": false, + "printWidth": 80, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "devDependencies": { + "@types/brace-expansion": "^1.1.2", + "@types/node": "^24.0.0", + "mkdirp": "^3.0.1", + "prettier": "^3.3.2", + "tap": "^21.1.0", + "tshy": "^3.0.2", + "typedoc": "^0.28.5" + }, + "keywords": [ + "match", + "regexp", + "test", + "balanced", + "parse" + ], + "license": "MIT", + "engines": { + "node": "20 || >=22" + }, + "tshy": { + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts" + } + }, + "main": "./dist/commonjs/index.js", + "types": "./dist/commonjs/index.d.ts", + "module": "./dist/esm/index.js" +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/brace-expansion/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/brace-expansion/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..46e7b75c91ced041ae473299398af2b0472dc352 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/brace-expansion/LICENSE @@ -0,0 +1,23 @@ +MIT License + +Copyright Julian Gruber + +TypeScript port Copyright Isaac Z. Schlueter + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/brace-expansion/dist/commonjs/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/brace-expansion/dist/commonjs/index.js new file mode 100644 index 0000000000000000000000000000000000000000..99cee69d560e24a30934a93db3c116532e257417 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/brace-expansion/dist/commonjs/index.js @@ -0,0 +1,196 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.expand = expand; +const balanced_match_1 = require("@isaacs/balanced-match"); +const escSlash = '\0SLASH' + Math.random() + '\0'; +const escOpen = '\0OPEN' + Math.random() + '\0'; +const escClose = '\0CLOSE' + Math.random() + '\0'; +const escComma = '\0COMMA' + Math.random() + '\0'; +const escPeriod = '\0PERIOD' + Math.random() + '\0'; +const escSlashPattern = new RegExp(escSlash, 'g'); +const escOpenPattern = new RegExp(escOpen, 'g'); +const escClosePattern = new RegExp(escClose, 'g'); +const escCommaPattern = new RegExp(escComma, 'g'); +const escPeriodPattern = new RegExp(escPeriod, 'g'); +const slashPattern = /\\\\/g; +const openPattern = /\\{/g; +const closePattern = /\\}/g; +const commaPattern = /\\,/g; +const periodPattern = /\\./g; +function numeric(str) { + return !isNaN(str) ? parseInt(str, 10) : str.charCodeAt(0); +} +function escapeBraces(str) { + return str + .replace(slashPattern, escSlash) + .replace(openPattern, escOpen) + .replace(closePattern, escClose) + .replace(commaPattern, escComma) + .replace(periodPattern, escPeriod); +} +function unescapeBraces(str) { + return str + .replace(escSlashPattern, '\\') + .replace(escOpenPattern, '{') + .replace(escClosePattern, '}') + .replace(escCommaPattern, ',') + .replace(escPeriodPattern, '.'); +} +/** + * Basically just str.split(","), but handling cases + * where we have nested braced sections, which should be + * treated as individual members, like {a,{b,c},d} + */ +function parseCommaParts(str) { + if (!str) { + return ['']; + } + const parts = []; + const m = (0, balanced_match_1.balanced)('{', '}', str); + if (!m) { + return str.split(','); + } + const { pre, body, post } = m; + const p = pre.split(','); + p[p.length - 1] += '{' + body + '}'; + const postParts = parseCommaParts(post); + if (post.length) { + ; + p[p.length - 1] += postParts.shift(); + p.push.apply(p, postParts); + } + parts.push.apply(parts, p); + return parts; +} +function expand(str) { + if (!str) { + return []; + } + // I don't know why Bash 4.3 does this, but it does. + // Anything starting with {} will have the first two bytes preserved + // but *only* at the top level, so {},a}b will not expand to anything, + // but a{},b}c will be expanded to [a}c,abc]. + // One could argue that this is a bug in Bash, but since the goal of + // this module is to match Bash's rules, we escape a leading {} + if (str.slice(0, 2) === '{}') { + str = '\\{\\}' + str.slice(2); + } + return expand_(escapeBraces(str), true).map(unescapeBraces); +} +function embrace(str) { + return '{' + str + '}'; +} +function isPadded(el) { + return /^-?0\d/.test(el); +} +function lte(i, y) { + return i <= y; +} +function gte(i, y) { + return i >= y; +} +function expand_(str, isTop) { + /** @type {string[]} */ + const expansions = []; + const m = (0, balanced_match_1.balanced)('{', '}', str); + if (!m) + return [str]; + // no need to expand pre, since it is guaranteed to be free of brace-sets + const pre = m.pre; + const post = m.post.length ? expand_(m.post, false) : ['']; + if (/\$$/.test(m.pre)) { + for (let k = 0; k < post.length; k++) { + const expansion = pre + '{' + m.body + '}' + post[k]; + expansions.push(expansion); + } + } + else { + const isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); + const isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); + const isSequence = isNumericSequence || isAlphaSequence; + const isOptions = m.body.indexOf(',') >= 0; + if (!isSequence && !isOptions) { + // {a},b} + if (m.post.match(/,(?!,).*\}/)) { + str = m.pre + '{' + m.body + escClose + m.post; + return expand_(str); + } + return [str]; + } + let n; + if (isSequence) { + n = m.body.split(/\.\./); + } + else { + n = parseCommaParts(m.body); + if (n.length === 1 && n[0] !== undefined) { + // x{{a,b}}y ==> x{a}y x{b}y + n = expand_(n[0], false).map(embrace); + //XXX is this necessary? Can't seem to hit it in tests. + /* c8 ignore start */ + if (n.length === 1) { + return post.map(p => m.pre + n[0] + p); + } + /* c8 ignore stop */ + } + } + // at this point, n is the parts, and we know it's not a comma set + // with a single entry. + let N; + if (isSequence && n[0] !== undefined && n[1] !== undefined) { + const x = numeric(n[0]); + const y = numeric(n[1]); + const width = Math.max(n[0].length, n[1].length); + let incr = n.length === 3 && n[2] !== undefined ? Math.abs(numeric(n[2])) : 1; + let test = lte; + const reverse = y < x; + if (reverse) { + incr *= -1; + test = gte; + } + const pad = n.some(isPadded); + N = []; + for (let i = x; test(i, y); i += incr) { + let c; + if (isAlphaSequence) { + c = String.fromCharCode(i); + if (c === '\\') { + c = ''; + } + } + else { + c = String(i); + if (pad) { + const need = width - c.length; + if (need > 0) { + const z = new Array(need + 1).join('0'); + if (i < 0) { + c = '-' + z + c.slice(1); + } + else { + c = z + c; + } + } + } + } + N.push(c); + } + } + else { + N = []; + for (let j = 0; j < n.length; j++) { + N.push.apply(N, expand_(n[j], false)); + } + } + for (let j = 0; j < N.length; j++) { + for (let k = 0; k < post.length; k++) { + const expansion = pre + N[j] + post[k]; + if (!isTop || isSequence || expansion) { + expansions.push(expansion); + } + } + } + } + return expansions; +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/brace-expansion/dist/commonjs/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/brace-expansion/dist/commonjs/package.json new file mode 100644 index 0000000000000000000000000000000000000000..5bbefffbabee392d1855491b84dc0a716b6a3bf2 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/brace-expansion/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/brace-expansion/dist/esm/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/brace-expansion/dist/esm/index.js new file mode 100644 index 0000000000000000000000000000000000000000..ebb88ed4117c87e40d30ee9418c9b2278435de1c --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/brace-expansion/dist/esm/index.js @@ -0,0 +1,193 @@ +import { balanced } from '@isaacs/balanced-match'; +const escSlash = '\0SLASH' + Math.random() + '\0'; +const escOpen = '\0OPEN' + Math.random() + '\0'; +const escClose = '\0CLOSE' + Math.random() + '\0'; +const escComma = '\0COMMA' + Math.random() + '\0'; +const escPeriod = '\0PERIOD' + Math.random() + '\0'; +const escSlashPattern = new RegExp(escSlash, 'g'); +const escOpenPattern = new RegExp(escOpen, 'g'); +const escClosePattern = new RegExp(escClose, 'g'); +const escCommaPattern = new RegExp(escComma, 'g'); +const escPeriodPattern = new RegExp(escPeriod, 'g'); +const slashPattern = /\\\\/g; +const openPattern = /\\{/g; +const closePattern = /\\}/g; +const commaPattern = /\\,/g; +const periodPattern = /\\./g; +function numeric(str) { + return !isNaN(str) ? parseInt(str, 10) : str.charCodeAt(0); +} +function escapeBraces(str) { + return str + .replace(slashPattern, escSlash) + .replace(openPattern, escOpen) + .replace(closePattern, escClose) + .replace(commaPattern, escComma) + .replace(periodPattern, escPeriod); +} +function unescapeBraces(str) { + return str + .replace(escSlashPattern, '\\') + .replace(escOpenPattern, '{') + .replace(escClosePattern, '}') + .replace(escCommaPattern, ',') + .replace(escPeriodPattern, '.'); +} +/** + * Basically just str.split(","), but handling cases + * where we have nested braced sections, which should be + * treated as individual members, like {a,{b,c},d} + */ +function parseCommaParts(str) { + if (!str) { + return ['']; + } + const parts = []; + const m = balanced('{', '}', str); + if (!m) { + return str.split(','); + } + const { pre, body, post } = m; + const p = pre.split(','); + p[p.length - 1] += '{' + body + '}'; + const postParts = parseCommaParts(post); + if (post.length) { + ; + p[p.length - 1] += postParts.shift(); + p.push.apply(p, postParts); + } + parts.push.apply(parts, p); + return parts; +} +export function expand(str) { + if (!str) { + return []; + } + // I don't know why Bash 4.3 does this, but it does. + // Anything starting with {} will have the first two bytes preserved + // but *only* at the top level, so {},a}b will not expand to anything, + // but a{},b}c will be expanded to [a}c,abc]. + // One could argue that this is a bug in Bash, but since the goal of + // this module is to match Bash's rules, we escape a leading {} + if (str.slice(0, 2) === '{}') { + str = '\\{\\}' + str.slice(2); + } + return expand_(escapeBraces(str), true).map(unescapeBraces); +} +function embrace(str) { + return '{' + str + '}'; +} +function isPadded(el) { + return /^-?0\d/.test(el); +} +function lte(i, y) { + return i <= y; +} +function gte(i, y) { + return i >= y; +} +function expand_(str, isTop) { + /** @type {string[]} */ + const expansions = []; + const m = balanced('{', '}', str); + if (!m) + return [str]; + // no need to expand pre, since it is guaranteed to be free of brace-sets + const pre = m.pre; + const post = m.post.length ? expand_(m.post, false) : ['']; + if (/\$$/.test(m.pre)) { + for (let k = 0; k < post.length; k++) { + const expansion = pre + '{' + m.body + '}' + post[k]; + expansions.push(expansion); + } + } + else { + const isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); + const isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); + const isSequence = isNumericSequence || isAlphaSequence; + const isOptions = m.body.indexOf(',') >= 0; + if (!isSequence && !isOptions) { + // {a},b} + if (m.post.match(/,(?!,).*\}/)) { + str = m.pre + '{' + m.body + escClose + m.post; + return expand_(str); + } + return [str]; + } + let n; + if (isSequence) { + n = m.body.split(/\.\./); + } + else { + n = parseCommaParts(m.body); + if (n.length === 1 && n[0] !== undefined) { + // x{{a,b}}y ==> x{a}y x{b}y + n = expand_(n[0], false).map(embrace); + //XXX is this necessary? Can't seem to hit it in tests. + /* c8 ignore start */ + if (n.length === 1) { + return post.map(p => m.pre + n[0] + p); + } + /* c8 ignore stop */ + } + } + // at this point, n is the parts, and we know it's not a comma set + // with a single entry. + let N; + if (isSequence && n[0] !== undefined && n[1] !== undefined) { + const x = numeric(n[0]); + const y = numeric(n[1]); + const width = Math.max(n[0].length, n[1].length); + let incr = n.length === 3 && n[2] !== undefined ? Math.abs(numeric(n[2])) : 1; + let test = lte; + const reverse = y < x; + if (reverse) { + incr *= -1; + test = gte; + } + const pad = n.some(isPadded); + N = []; + for (let i = x; test(i, y); i += incr) { + let c; + if (isAlphaSequence) { + c = String.fromCharCode(i); + if (c === '\\') { + c = ''; + } + } + else { + c = String(i); + if (pad) { + const need = width - c.length; + if (need > 0) { + const z = new Array(need + 1).join('0'); + if (i < 0) { + c = '-' + z + c.slice(1); + } + else { + c = z + c; + } + } + } + } + N.push(c); + } + } + else { + N = []; + for (let j = 0; j < n.length; j++) { + N.push.apply(N, expand_(n[j], false)); + } + } + for (let j = 0; j < N.length; j++) { + for (let k = 0; k < post.length; k++) { + const expansion = pre + N[j] + post[k]; + if (!isTop || isSequence || expansion) { + expansions.push(expansion); + } + } + } + } + return expansions; +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/brace-expansion/dist/esm/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/brace-expansion/dist/esm/package.json new file mode 100644 index 0000000000000000000000000000000000000000..3dbc1ca591c0557e35b6004aeba250e6a70b56e3 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/brace-expansion/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/brace-expansion/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/brace-expansion/package.json new file mode 100644 index 0000000000000000000000000000000000000000..cf1035688398b9b4d136c21b9de1e9ace6089b5c --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/brace-expansion/package.json @@ -0,0 +1,71 @@ +{ + "name": "@isaacs/brace-expansion", + "description": "Brace expansion as known from sh/bash", + "version": "5.0.0", + "files": [ + "dist" + ], + "exports": { + "./package.json": "./package.json", + ".": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } + }, + "type": "module", + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "prepare": "tshy", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "test": "tap", + "snap": "tap", + "format": "prettier --write . --loglevel warn", + "benchmark": "node benchmark/index.js", + "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts" + }, + "prettier": { + "semi": false, + "printWidth": 80, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "devDependencies": { + "@types/brace-expansion": "^1.1.2", + "@types/node": "^24.0.0", + "mkdirp": "^3.0.1", + "prettier": "^3.3.2", + "tap": "^21.1.0", + "tshy": "^3.0.2", + "typedoc": "^0.28.5" + }, + "dependencies": { + "@isaacs/balanced-match": "^4.0.1" + }, + "license": "MIT", + "engines": { + "node": "20 || >=22" + }, + "tshy": { + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts" + } + }, + "main": "./dist/commonjs/index.js", + "types": "./dist/commonjs/index.d.ts", + "module": "./dist/esm/index.js" +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/LICENSE.txt b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/LICENSE.txt new file mode 100644 index 0000000000000000000000000000000000000000..c7e27478a3eff8862ca150f10d1b93a5ac866af2 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/LICENSE.txt @@ -0,0 +1,14 @@ +Copyright (c) 2015, Contributors + +Permission to use, copy, modify, and/or distribute this software +for any purpose with or without fee is hereby granted, provided +that the above copyright notice and this permission notice +appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE +LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES +OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, +ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/build/index.cjs b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/build/index.cjs new file mode 100644 index 0000000000000000000000000000000000000000..aca2b8507ac0f34d914ea50268da62158cf935cf --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/build/index.cjs @@ -0,0 +1,317 @@ +'use strict'; + +const align = { + right: alignRight, + center: alignCenter +}; +const top = 0; +const right = 1; +const bottom = 2; +const left = 3; +class UI { + constructor(opts) { + var _a; + this.width = opts.width; + /* c8 ignore start */ + this.wrap = (_a = opts.wrap) !== null && _a !== void 0 ? _a : true; + /* c8 ignore stop */ + this.rows = []; + } + span(...args) { + const cols = this.div(...args); + cols.span = true; + } + resetOutput() { + this.rows = []; + } + div(...args) { + if (args.length === 0) { + this.div(''); + } + if (this.wrap && this.shouldApplyLayoutDSL(...args) && typeof args[0] === 'string') { + return this.applyLayoutDSL(args[0]); + } + const cols = args.map(arg => { + if (typeof arg === 'string') { + return this.colFromString(arg); + } + return arg; + }); + this.rows.push(cols); + return cols; + } + shouldApplyLayoutDSL(...args) { + return args.length === 1 && typeof args[0] === 'string' && + /[\t\n]/.test(args[0]); + } + applyLayoutDSL(str) { + const rows = str.split('\n').map(row => row.split('\t')); + let leftColumnWidth = 0; + // simple heuristic for layout, make sure the + // second column lines up along the left-hand. + // don't allow the first column to take up more + // than 50% of the screen. + rows.forEach(columns => { + if (columns.length > 1 && mixin.stringWidth(columns[0]) > leftColumnWidth) { + leftColumnWidth = Math.min(Math.floor(this.width * 0.5), mixin.stringWidth(columns[0])); + } + }); + // generate a table: + // replacing ' ' with padding calculations. + // using the algorithmically generated width. + rows.forEach(columns => { + this.div(...columns.map((r, i) => { + return { + text: r.trim(), + padding: this.measurePadding(r), + width: (i === 0 && columns.length > 1) ? leftColumnWidth : undefined + }; + })); + }); + return this.rows[this.rows.length - 1]; + } + colFromString(text) { + return { + text, + padding: this.measurePadding(text) + }; + } + measurePadding(str) { + // measure padding without ansi escape codes + const noAnsi = mixin.stripAnsi(str); + return [0, noAnsi.match(/\s*$/)[0].length, 0, noAnsi.match(/^\s*/)[0].length]; + } + toString() { + const lines = []; + this.rows.forEach(row => { + this.rowToString(row, lines); + }); + // don't display any lines with the + // hidden flag set. + return lines + .filter(line => !line.hidden) + .map(line => line.text) + .join('\n'); + } + rowToString(row, lines) { + this.rasterize(row).forEach((rrow, r) => { + let str = ''; + rrow.forEach((col, c) => { + const { width } = row[c]; // the width with padding. + const wrapWidth = this.negatePadding(row[c]); // the width without padding. + let ts = col; // temporary string used during alignment/padding. + if (wrapWidth > mixin.stringWidth(col)) { + ts += ' '.repeat(wrapWidth - mixin.stringWidth(col)); + } + // align the string within its column. + if (row[c].align && row[c].align !== 'left' && this.wrap) { + const fn = align[row[c].align]; + ts = fn(ts, wrapWidth); + if (mixin.stringWidth(ts) < wrapWidth) { + /* c8 ignore start */ + const w = width || 0; + /* c8 ignore stop */ + ts += ' '.repeat(w - mixin.stringWidth(ts) - 1); + } + } + // apply border and padding to string. + const padding = row[c].padding || [0, 0, 0, 0]; + if (padding[left]) { + str += ' '.repeat(padding[left]); + } + str += addBorder(row[c], ts, '| '); + str += ts; + str += addBorder(row[c], ts, ' |'); + if (padding[right]) { + str += ' '.repeat(padding[right]); + } + // if prior row is span, try to render the + // current row on the prior line. + if (r === 0 && lines.length > 0) { + str = this.renderInline(str, lines[lines.length - 1]); + } + }); + // remove trailing whitespace. + lines.push({ + text: str.replace(/ +$/, ''), + span: row.span + }); + }); + return lines; + } + // if the full 'source' can render in + // the target line, do so. + renderInline(source, previousLine) { + const match = source.match(/^ */); + /* c8 ignore start */ + const leadingWhitespace = match ? match[0].length : 0; + /* c8 ignore stop */ + const target = previousLine.text; + const targetTextWidth = mixin.stringWidth(target.trimEnd()); + if (!previousLine.span) { + return source; + } + // if we're not applying wrapping logic, + // just always append to the span. + if (!this.wrap) { + previousLine.hidden = true; + return target + source; + } + if (leadingWhitespace < targetTextWidth) { + return source; + } + previousLine.hidden = true; + return target.trimEnd() + ' '.repeat(leadingWhitespace - targetTextWidth) + source.trimStart(); + } + rasterize(row) { + const rrows = []; + const widths = this.columnWidths(row); + let wrapped; + // word wrap all columns, and create + // a data-structure that is easy to rasterize. + row.forEach((col, c) => { + // leave room for left and right padding. + col.width = widths[c]; + if (this.wrap) { + wrapped = mixin.wrap(col.text, this.negatePadding(col), { hard: true }).split('\n'); + } + else { + wrapped = col.text.split('\n'); + } + if (col.border) { + wrapped.unshift('.' + '-'.repeat(this.negatePadding(col) + 2) + '.'); + wrapped.push("'" + '-'.repeat(this.negatePadding(col) + 2) + "'"); + } + // add top and bottom padding. + if (col.padding) { + wrapped.unshift(...new Array(col.padding[top] || 0).fill('')); + wrapped.push(...new Array(col.padding[bottom] || 0).fill('')); + } + wrapped.forEach((str, r) => { + if (!rrows[r]) { + rrows.push([]); + } + const rrow = rrows[r]; + for (let i = 0; i < c; i++) { + if (rrow[i] === undefined) { + rrow.push(''); + } + } + rrow.push(str); + }); + }); + return rrows; + } + negatePadding(col) { + /* c8 ignore start */ + let wrapWidth = col.width || 0; + /* c8 ignore stop */ + if (col.padding) { + wrapWidth -= (col.padding[left] || 0) + (col.padding[right] || 0); + } + if (col.border) { + wrapWidth -= 4; + } + return wrapWidth; + } + columnWidths(row) { + if (!this.wrap) { + return row.map(col => { + return col.width || mixin.stringWidth(col.text); + }); + } + let unset = row.length; + let remainingWidth = this.width; + // column widths can be set in config. + const widths = row.map(col => { + if (col.width) { + unset--; + remainingWidth -= col.width; + return col.width; + } + return undefined; + }); + // any unset widths should be calculated. + /* c8 ignore start */ + const unsetWidth = unset ? Math.floor(remainingWidth / unset) : 0; + /* c8 ignore stop */ + return widths.map((w, i) => { + if (w === undefined) { + return Math.max(unsetWidth, _minWidth(row[i])); + } + return w; + }); + } +} +function addBorder(col, ts, style) { + if (col.border) { + if (/[.']-+[.']/.test(ts)) { + return ''; + } + if (ts.trim().length !== 0) { + return style; + } + return ' '; + } + return ''; +} +// calculates the minimum width of +// a column, based on padding preferences. +function _minWidth(col) { + const padding = col.padding || []; + const minWidth = 1 + (padding[left] || 0) + (padding[right] || 0); + if (col.border) { + return minWidth + 4; + } + return minWidth; +} +function getWindowWidth() { + /* c8 ignore start */ + if (typeof process === 'object' && process.stdout && process.stdout.columns) { + return process.stdout.columns; + } + return 80; +} +/* c8 ignore stop */ +function alignRight(str, width) { + str = str.trim(); + const strWidth = mixin.stringWidth(str); + if (strWidth < width) { + return ' '.repeat(width - strWidth) + str; + } + return str; +} +function alignCenter(str, width) { + str = str.trim(); + const strWidth = mixin.stringWidth(str); + /* c8 ignore start */ + if (strWidth >= width) { + return str; + } + /* c8 ignore stop */ + return ' '.repeat((width - strWidth) >> 1) + str; +} +let mixin; +function cliui(opts, _mixin) { + mixin = _mixin; + return new UI({ + /* c8 ignore start */ + width: (opts === null || opts === void 0 ? void 0 : opts.width) || getWindowWidth(), + wrap: opts === null || opts === void 0 ? void 0 : opts.wrap + /* c8 ignore stop */ + }); +} + +// Bootstrap cliui with CommonJS dependencies: +const stringWidth = require('string-width-cjs'); +const stripAnsi = require('strip-ansi-cjs'); +const wrap = require('wrap-ansi-cjs'); +function ui(opts) { + return cliui(opts, { + stringWidth, + stripAnsi, + wrap + }); +} + +module.exports = ui; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/build/index.d.cts b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/build/index.d.cts new file mode 100644 index 0000000000000000000000000000000000000000..4567f945e81a73dc2e589f153913910fa2f5fea9 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/build/index.d.cts @@ -0,0 +1,43 @@ +interface UIOptions { + width: number; + wrap?: boolean; + rows?: string[]; +} +interface Column { + text: string; + width?: number; + align?: "right" | "left" | "center"; + padding: number[]; + border?: boolean; +} +interface ColumnArray extends Array { + span: boolean; +} +interface Line { + hidden?: boolean; + text: string; + span?: boolean; +} +declare class UI { + width: number; + wrap: boolean; + rows: ColumnArray[]; + constructor(opts: UIOptions); + span(...args: ColumnArray): void; + resetOutput(): void; + div(...args: (Column | string)[]): ColumnArray; + private shouldApplyLayoutDSL; + private applyLayoutDSL; + private colFromString; + private measurePadding; + toString(): string; + rowToString(row: ColumnArray, lines: Line[]): Line[]; + // if the full 'source' can render in + // the target line, do so. + private renderInline; + private rasterize; + private negatePadding; + private columnWidths; +} +declare function ui(opts: UIOptions): UI; +export { ui as default }; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/build/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/build/lib/index.js new file mode 100644 index 0000000000000000000000000000000000000000..587b5ecd3e773b9f5aeb07720b13407a5974b0cc --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/build/lib/index.js @@ -0,0 +1,302 @@ +'use strict'; +const align = { + right: alignRight, + center: alignCenter +}; +const top = 0; +const right = 1; +const bottom = 2; +const left = 3; +export class UI { + constructor(opts) { + var _a; + this.width = opts.width; + /* c8 ignore start */ + this.wrap = (_a = opts.wrap) !== null && _a !== void 0 ? _a : true; + /* c8 ignore stop */ + this.rows = []; + } + span(...args) { + const cols = this.div(...args); + cols.span = true; + } + resetOutput() { + this.rows = []; + } + div(...args) { + if (args.length === 0) { + this.div(''); + } + if (this.wrap && this.shouldApplyLayoutDSL(...args) && typeof args[0] === 'string') { + return this.applyLayoutDSL(args[0]); + } + const cols = args.map(arg => { + if (typeof arg === 'string') { + return this.colFromString(arg); + } + return arg; + }); + this.rows.push(cols); + return cols; + } + shouldApplyLayoutDSL(...args) { + return args.length === 1 && typeof args[0] === 'string' && + /[\t\n]/.test(args[0]); + } + applyLayoutDSL(str) { + const rows = str.split('\n').map(row => row.split('\t')); + let leftColumnWidth = 0; + // simple heuristic for layout, make sure the + // second column lines up along the left-hand. + // don't allow the first column to take up more + // than 50% of the screen. + rows.forEach(columns => { + if (columns.length > 1 && mixin.stringWidth(columns[0]) > leftColumnWidth) { + leftColumnWidth = Math.min(Math.floor(this.width * 0.5), mixin.stringWidth(columns[0])); + } + }); + // generate a table: + // replacing ' ' with padding calculations. + // using the algorithmically generated width. + rows.forEach(columns => { + this.div(...columns.map((r, i) => { + return { + text: r.trim(), + padding: this.measurePadding(r), + width: (i === 0 && columns.length > 1) ? leftColumnWidth : undefined + }; + })); + }); + return this.rows[this.rows.length - 1]; + } + colFromString(text) { + return { + text, + padding: this.measurePadding(text) + }; + } + measurePadding(str) { + // measure padding without ansi escape codes + const noAnsi = mixin.stripAnsi(str); + return [0, noAnsi.match(/\s*$/)[0].length, 0, noAnsi.match(/^\s*/)[0].length]; + } + toString() { + const lines = []; + this.rows.forEach(row => { + this.rowToString(row, lines); + }); + // don't display any lines with the + // hidden flag set. + return lines + .filter(line => !line.hidden) + .map(line => line.text) + .join('\n'); + } + rowToString(row, lines) { + this.rasterize(row).forEach((rrow, r) => { + let str = ''; + rrow.forEach((col, c) => { + const { width } = row[c]; // the width with padding. + const wrapWidth = this.negatePadding(row[c]); // the width without padding. + let ts = col; // temporary string used during alignment/padding. + if (wrapWidth > mixin.stringWidth(col)) { + ts += ' '.repeat(wrapWidth - mixin.stringWidth(col)); + } + // align the string within its column. + if (row[c].align && row[c].align !== 'left' && this.wrap) { + const fn = align[row[c].align]; + ts = fn(ts, wrapWidth); + if (mixin.stringWidth(ts) < wrapWidth) { + /* c8 ignore start */ + const w = width || 0; + /* c8 ignore stop */ + ts += ' '.repeat(w - mixin.stringWidth(ts) - 1); + } + } + // apply border and padding to string. + const padding = row[c].padding || [0, 0, 0, 0]; + if (padding[left]) { + str += ' '.repeat(padding[left]); + } + str += addBorder(row[c], ts, '| '); + str += ts; + str += addBorder(row[c], ts, ' |'); + if (padding[right]) { + str += ' '.repeat(padding[right]); + } + // if prior row is span, try to render the + // current row on the prior line. + if (r === 0 && lines.length > 0) { + str = this.renderInline(str, lines[lines.length - 1]); + } + }); + // remove trailing whitespace. + lines.push({ + text: str.replace(/ +$/, ''), + span: row.span + }); + }); + return lines; + } + // if the full 'source' can render in + // the target line, do so. + renderInline(source, previousLine) { + const match = source.match(/^ */); + /* c8 ignore start */ + const leadingWhitespace = match ? match[0].length : 0; + /* c8 ignore stop */ + const target = previousLine.text; + const targetTextWidth = mixin.stringWidth(target.trimEnd()); + if (!previousLine.span) { + return source; + } + // if we're not applying wrapping logic, + // just always append to the span. + if (!this.wrap) { + previousLine.hidden = true; + return target + source; + } + if (leadingWhitespace < targetTextWidth) { + return source; + } + previousLine.hidden = true; + return target.trimEnd() + ' '.repeat(leadingWhitespace - targetTextWidth) + source.trimStart(); + } + rasterize(row) { + const rrows = []; + const widths = this.columnWidths(row); + let wrapped; + // word wrap all columns, and create + // a data-structure that is easy to rasterize. + row.forEach((col, c) => { + // leave room for left and right padding. + col.width = widths[c]; + if (this.wrap) { + wrapped = mixin.wrap(col.text, this.negatePadding(col), { hard: true }).split('\n'); + } + else { + wrapped = col.text.split('\n'); + } + if (col.border) { + wrapped.unshift('.' + '-'.repeat(this.negatePadding(col) + 2) + '.'); + wrapped.push("'" + '-'.repeat(this.negatePadding(col) + 2) + "'"); + } + // add top and bottom padding. + if (col.padding) { + wrapped.unshift(...new Array(col.padding[top] || 0).fill('')); + wrapped.push(...new Array(col.padding[bottom] || 0).fill('')); + } + wrapped.forEach((str, r) => { + if (!rrows[r]) { + rrows.push([]); + } + const rrow = rrows[r]; + for (let i = 0; i < c; i++) { + if (rrow[i] === undefined) { + rrow.push(''); + } + } + rrow.push(str); + }); + }); + return rrows; + } + negatePadding(col) { + /* c8 ignore start */ + let wrapWidth = col.width || 0; + /* c8 ignore stop */ + if (col.padding) { + wrapWidth -= (col.padding[left] || 0) + (col.padding[right] || 0); + } + if (col.border) { + wrapWidth -= 4; + } + return wrapWidth; + } + columnWidths(row) { + if (!this.wrap) { + return row.map(col => { + return col.width || mixin.stringWidth(col.text); + }); + } + let unset = row.length; + let remainingWidth = this.width; + // column widths can be set in config. + const widths = row.map(col => { + if (col.width) { + unset--; + remainingWidth -= col.width; + return col.width; + } + return undefined; + }); + // any unset widths should be calculated. + /* c8 ignore start */ + const unsetWidth = unset ? Math.floor(remainingWidth / unset) : 0; + /* c8 ignore stop */ + return widths.map((w, i) => { + if (w === undefined) { + return Math.max(unsetWidth, _minWidth(row[i])); + } + return w; + }); + } +} +function addBorder(col, ts, style) { + if (col.border) { + if (/[.']-+[.']/.test(ts)) { + return ''; + } + if (ts.trim().length !== 0) { + return style; + } + return ' '; + } + return ''; +} +// calculates the minimum width of +// a column, based on padding preferences. +function _minWidth(col) { + const padding = col.padding || []; + const minWidth = 1 + (padding[left] || 0) + (padding[right] || 0); + if (col.border) { + return minWidth + 4; + } + return minWidth; +} +function getWindowWidth() { + /* c8 ignore start */ + if (typeof process === 'object' && process.stdout && process.stdout.columns) { + return process.stdout.columns; + } + return 80; +} +/* c8 ignore stop */ +function alignRight(str, width) { + str = str.trim(); + const strWidth = mixin.stringWidth(str); + if (strWidth < width) { + return ' '.repeat(width - strWidth) + str; + } + return str; +} +function alignCenter(str, width) { + str = str.trim(); + const strWidth = mixin.stringWidth(str); + /* c8 ignore start */ + if (strWidth >= width) { + return str; + } + /* c8 ignore stop */ + return ' '.repeat((width - strWidth) >> 1) + str; +} +let mixin; +export function cliui(opts, _mixin) { + mixin = _mixin; + return new UI({ + /* c8 ignore start */ + width: (opts === null || opts === void 0 ? void 0 : opts.width) || getWindowWidth(), + wrap: opts === null || opts === void 0 ? void 0 : opts.wrap + /* c8 ignore stop */ + }); +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/index.mjs b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/index.mjs new file mode 100644 index 0000000000000000000000000000000000000000..5177519af372221f252cda2d75ac66385f44f753 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/index.mjs @@ -0,0 +1,14 @@ +// Bootstrap cliui with ESM dependencies: +import { cliui } from './build/lib/index.js' + +import stringWidth from 'string-width' +import stripAnsi from 'strip-ansi' +import wrap from 'wrap-ansi' + +export default function ui (opts) { + return cliui(opts, { + stringWidth, + stripAnsi, + wrap + }) +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/ansi-regex/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/ansi-regex/index.js new file mode 100644 index 0000000000000000000000000000000000000000..2cc5ca2419f1b2f191826b449192354ad5c4a065 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/ansi-regex/index.js @@ -0,0 +1,14 @@ +export default function ansiRegex({onlyFirst = false} = {}) { + // Valid string terminator sequences are BEL, ESC\, and 0x9c + const ST = '(?:\\u0007|\\u001B\\u005C|\\u009C)'; + + // OSC sequences only: ESC ] ... ST (non-greedy until the first ST) + const osc = `(?:\\u001B\\][\\s\\S]*?${ST})`; + + // CSI and related: ESC/C1, optional intermediates, optional params (supports ; and :) then final byte + const csi = '[\\u001B\\u009B][[\\]()#;?]*(?:\\d{1,4}(?:[;:]\\d{0,4})*)?[\\dA-PR-TZcf-nq-uy=><~]'; + + const pattern = `${osc}|${csi}`; + + return new RegExp(pattern, onlyFirst ? undefined : 'g'); +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/ansi-regex/license b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/ansi-regex/license new file mode 100644 index 0000000000000000000000000000000000000000..fa7ceba3eb4a9657a9db7f3ffca4e4e97a9019de --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/ansi-regex/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (https://sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/ansi-regex/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/ansi-regex/package.json new file mode 100644 index 0000000000000000000000000000000000000000..2efe9ebbe66be160939aa5e4a1c4e4ada16cad6c --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/ansi-regex/package.json @@ -0,0 +1,61 @@ +{ + "name": "ansi-regex", + "version": "6.2.2", + "description": "Regular expression for matching ANSI escape codes", + "license": "MIT", + "repository": "chalk/ansi-regex", + "funding": "https://github.com/chalk/ansi-regex?sponsor=1", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "https://sindresorhus.com" + }, + "type": "module", + "exports": "./index.js", + "types": "./index.d.ts", + "sideEffects": false, + "engines": { + "node": ">=12" + }, + "scripts": { + "test": "xo && ava && tsd", + "view-supported": "node fixtures/view-codes.js" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "ansi", + "styles", + "color", + "colour", + "colors", + "terminal", + "console", + "cli", + "string", + "tty", + "escape", + "formatting", + "rgb", + "256", + "shell", + "xterm", + "command-line", + "text", + "regex", + "regexp", + "re", + "match", + "test", + "find", + "pattern" + ], + "devDependencies": { + "ansi-escapes": "^5.0.0", + "ava": "^3.15.0", + "tsd": "^0.21.0", + "xo": "^0.54.2" + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/emoji-regex/LICENSE-MIT.txt b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/emoji-regex/LICENSE-MIT.txt new file mode 100644 index 0000000000000000000000000000000000000000..a41e0a7ef970ecdd83d82cd99bda97b22077bc62 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/emoji-regex/LICENSE-MIT.txt @@ -0,0 +1,20 @@ +Copyright Mathias Bynens + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/emoji-regex/RGI_Emoji.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/emoji-regex/RGI_Emoji.js new file mode 100644 index 0000000000000000000000000000000000000000..3fbe92410063f1dbd449de68973c54feee187c8e --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/emoji-regex/RGI_Emoji.js @@ -0,0 +1,6 @@ +"use strict"; + +module.exports = function () { + // https://mths.be/emoji + return /\uD83C\uDFF4\uDB40\uDC67\uDB40\uDC62(?:\uDB40\uDC77\uDB40\uDC6C\uDB40\uDC73|\uDB40\uDC73\uDB40\uDC63\uDB40\uDC74|\uDB40\uDC65\uDB40\uDC6E\uDB40\uDC67)\uDB40\uDC7F|(?:\uD83E\uDDD1\uD83C\uDFFF\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFF\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB-\uDFFE])|(?:\uD83E\uDDD1\uD83C\uDFFE\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFE\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB-\uDFFD\uDFFF])|(?:\uD83E\uDDD1\uD83C\uDFFD\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFD\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])|(?:\uD83E\uDDD1\uD83C\uDFFC\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFC\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB\uDFFD-\uDFFF])|(?:\uD83E\uDDD1\uD83C\uDFFB\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFB\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFC-\uDFFF])|\uD83D\uDC68(?:\uD83C\uDFFB(?:\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFF])|\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFF]))|\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFC-\uDFFF])|[\u2695\u2696\u2708]\uFE0F|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD]))?|(?:\uD83C[\uDFFC-\uDFFF])\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFF])|\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFF]))|\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83D\uDC68|(?:\uD83D[\uDC68\uDC69])\u200D(?:\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67]))|\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFE])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFE\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFD\uDFFF])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFC\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB\uDFFD-\uDFFF])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|(?:\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])\uFE0F|\u200D(?:(?:\uD83D[\uDC68\uDC69])\u200D(?:\uD83D[\uDC66\uDC67])|\uD83D[\uDC66\uDC67])|\uD83C\uDFFF|\uD83C\uDFFE|\uD83C\uDFFD|\uD83C\uDFFC)?|(?:\uD83D\uDC69(?:\uD83C\uDFFB\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D(?:\uD83D[\uDC68\uDC69])|\uD83D[\uDC68\uDC69])|(?:\uD83C[\uDFFC-\uDFFF])\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D(?:\uD83D[\uDC68\uDC69])|\uD83D[\uDC68\uDC69]))|\uD83E\uDDD1(?:\uD83C[\uDFFB-\uDFFF])\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1)(?:\uD83C[\uDFFB-\uDFFF])|\uD83D\uDC69\u200D\uD83D\uDC69\u200D(?:\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67]))|\uD83D\uDC69(?:\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D(?:\uD83D[\uDC68\uDC69])|\uD83D[\uDC68\uDC69])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFE\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFC\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFB\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD]))|\uD83E\uDDD1(?:\u200D(?:\uD83E\uDD1D\u200D\uD83E\uDDD1|\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFE\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFC\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFB\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD]))|\uD83D\uDC69\u200D\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC69\u200D\uD83D\uDC69\u200D(?:\uD83D[\uDC66\uDC67])|\uD83D\uDC69\u200D\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67])|(?:\uD83D\uDC41\uFE0F\u200D\uD83D\uDDE8|\uD83E\uDDD1(?:\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708]|\uD83C\uDFFB\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|\uD83D\uDC69(?:\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708]|\uD83C\uDFFB\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|\uD83D\uDE36\u200D\uD83C\uDF2B|\uD83C\uDFF3\uFE0F\u200D\u26A7|\uD83D\uDC3B\u200D\u2744|(?:(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC70\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD35\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD4\uDDD6-\uDDDD])(?:\uD83C[\uDFFB-\uDFFF])|\uD83D\uDC6F|\uD83E[\uDD3C\uDDDE\uDDDF])\u200D[\u2640\u2642]|(?:\u26F9|\uD83C[\uDFCB\uDFCC]|\uD83D\uDD75)(?:\uFE0F|\uD83C[\uDFFB-\uDFFF])\u200D[\u2640\u2642]|\uD83C\uDFF4\u200D\u2620|(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC70\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD35\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD4\uDDD6-\uDDDD])\u200D[\u2640\u2642]|[\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u2328\u23CF\u23ED-\u23EF\u23F1\u23F2\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB\u25FC\u2600-\u2604\u260E\u2611\u2618\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u2692\u2694-\u2697\u2699\u269B\u269C\u26A0\u26A7\u26B0\u26B1\u26C8\u26CF\u26D1\u26D3\u26E9\u26F0\u26F1\u26F4\u26F7\u26F8\u2702\u2708\u2709\u270F\u2712\u2714\u2716\u271D\u2721\u2733\u2734\u2744\u2747\u2763\u27A1\u2934\u2935\u2B05-\u2B07\u3030\u303D\u3297\u3299]|\uD83C[\uDD70\uDD71\uDD7E\uDD7F\uDE02\uDE37\uDF21\uDF24-\uDF2C\uDF36\uDF7D\uDF96\uDF97\uDF99-\uDF9B\uDF9E\uDF9F\uDFCD\uDFCE\uDFD4-\uDFDF\uDFF5\uDFF7]|\uD83D[\uDC3F\uDCFD\uDD49\uDD4A\uDD6F\uDD70\uDD73\uDD76-\uDD79\uDD87\uDD8A-\uDD8D\uDDA5\uDDA8\uDDB1\uDDB2\uDDBC\uDDC2-\uDDC4\uDDD1-\uDDD3\uDDDC-\uDDDE\uDDE1\uDDE3\uDDE8\uDDEF\uDDF3\uDDFA\uDECB\uDECD-\uDECF\uDEE0-\uDEE5\uDEE9\uDEF0\uDEF3])\uFE0F|\uD83C\uDFF3\uFE0F\u200D\uD83C\uDF08|\uD83D\uDC69\u200D\uD83D\uDC67|\uD83D\uDC69\u200D\uD83D\uDC66|\uD83D\uDE35\u200D\uD83D\uDCAB|\uD83D\uDE2E\u200D\uD83D\uDCA8|\uD83D\uDC15\u200D\uD83E\uDDBA|\uD83E\uDDD1(?:\uD83C\uDFFF|\uD83C\uDFFE|\uD83C\uDFFD|\uD83C\uDFFC|\uD83C\uDFFB)?|\uD83D\uDC69(?:\uD83C\uDFFF|\uD83C\uDFFE|\uD83C\uDFFD|\uD83C\uDFFC|\uD83C\uDFFB)?|\uD83C\uDDFD\uD83C\uDDF0|\uD83C\uDDF6\uD83C\uDDE6|\uD83C\uDDF4\uD83C\uDDF2|\uD83D\uDC08\u200D\u2B1B|\u2764\uFE0F\u200D(?:\uD83D\uDD25|\uD83E\uDE79)|\uD83D\uDC41\uFE0F|\uD83C\uDFF3\uFE0F|\uD83C\uDDFF(?:\uD83C[\uDDE6\uDDF2\uDDFC])|\uD83C\uDDFE(?:\uD83C[\uDDEA\uDDF9])|\uD83C\uDDFC(?:\uD83C[\uDDEB\uDDF8])|\uD83C\uDDFB(?:\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDEE\uDDF3\uDDFA])|\uD83C\uDDFA(?:\uD83C[\uDDE6\uDDEC\uDDF2\uDDF3\uDDF8\uDDFE\uDDFF])|\uD83C\uDDF9(?:\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDED\uDDEF-\uDDF4\uDDF7\uDDF9\uDDFB\uDDFC\uDDFF])|\uD83C\uDDF8(?:\uD83C[\uDDE6-\uDDEA\uDDEC-\uDDF4\uDDF7-\uDDF9\uDDFB\uDDFD-\uDDFF])|\uD83C\uDDF7(?:\uD83C[\uDDEA\uDDF4\uDDF8\uDDFA\uDDFC])|\uD83C\uDDF5(?:\uD83C[\uDDE6\uDDEA-\uDDED\uDDF0-\uDDF3\uDDF7-\uDDF9\uDDFC\uDDFE])|\uD83C\uDDF3(?:\uD83C[\uDDE6\uDDE8\uDDEA-\uDDEC\uDDEE\uDDF1\uDDF4\uDDF5\uDDF7\uDDFA\uDDFF])|\uD83C\uDDF2(?:\uD83C[\uDDE6\uDDE8-\uDDED\uDDF0-\uDDFF])|\uD83C\uDDF1(?:\uD83C[\uDDE6-\uDDE8\uDDEE\uDDF0\uDDF7-\uDDFB\uDDFE])|\uD83C\uDDF0(?:\uD83C[\uDDEA\uDDEC-\uDDEE\uDDF2\uDDF3\uDDF5\uDDF7\uDDFC\uDDFE\uDDFF])|\uD83C\uDDEF(?:\uD83C[\uDDEA\uDDF2\uDDF4\uDDF5])|\uD83C\uDDEE(?:\uD83C[\uDDE8-\uDDEA\uDDF1-\uDDF4\uDDF6-\uDDF9])|\uD83C\uDDED(?:\uD83C[\uDDF0\uDDF2\uDDF3\uDDF7\uDDF9\uDDFA])|\uD83C\uDDEC(?:\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEE\uDDF1-\uDDF3\uDDF5-\uDDFA\uDDFC\uDDFE])|\uD83C\uDDEB(?:\uD83C[\uDDEE-\uDDF0\uDDF2\uDDF4\uDDF7])|\uD83C\uDDEA(?:\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDED\uDDF7-\uDDFA])|\uD83C\uDDE9(?:\uD83C[\uDDEA\uDDEC\uDDEF\uDDF0\uDDF2\uDDF4\uDDFF])|\uD83C\uDDE8(?:\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDEE\uDDF0-\uDDF5\uDDF7\uDDFA-\uDDFF])|\uD83C\uDDE7(?:\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEF\uDDF1-\uDDF4\uDDF6-\uDDF9\uDDFB\uDDFC\uDDFE\uDDFF])|\uD83C\uDDE6(?:\uD83C[\uDDE8-\uDDEC\uDDEE\uDDF1\uDDF2\uDDF4\uDDF6-\uDDFA\uDDFC\uDDFD\uDDFF])|[#\*0-9]\uFE0F\u20E3|\u2764\uFE0F|(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC70\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD35\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD4\uDDD6-\uDDDD])(?:\uD83C[\uDFFB-\uDFFF])|(?:\u26F9|\uD83C[\uDFCB\uDFCC]|\uD83D\uDD75)(?:\uFE0F|\uD83C[\uDFFB-\uDFFF])|\uD83C\uDFF4|(?:[\u270A\u270B]|\uD83C[\uDF85\uDFC2\uDFC7]|\uD83D[\uDC42\uDC43\uDC46-\uDC50\uDC66\uDC67\uDC6B-\uDC6D\uDC72\uDC74-\uDC76\uDC78\uDC7C\uDC83\uDC85\uDC8F\uDC91\uDCAA\uDD7A\uDD95\uDD96\uDE4C\uDE4F\uDEC0\uDECC]|\uD83E[\uDD0C\uDD0F\uDD18-\uDD1C\uDD1E\uDD1F\uDD30-\uDD34\uDD36\uDD77\uDDB5\uDDB6\uDDBB\uDDD2\uDDD3\uDDD5])(?:\uD83C[\uDFFB-\uDFFF])|(?:[\u261D\u270C\u270D]|\uD83D[\uDD74\uDD90])(?:\uFE0F|\uD83C[\uDFFB-\uDFFF])|[\u270A\u270B]|\uD83C[\uDF85\uDFC2\uDFC7]|\uD83D[\uDC08\uDC15\uDC3B\uDC42\uDC43\uDC46-\uDC50\uDC66\uDC67\uDC6B-\uDC6D\uDC72\uDC74-\uDC76\uDC78\uDC7C\uDC83\uDC85\uDC8F\uDC91\uDCAA\uDD7A\uDD95\uDD96\uDE2E\uDE35\uDE36\uDE4C\uDE4F\uDEC0\uDECC]|\uD83E[\uDD0C\uDD0F\uDD18-\uDD1C\uDD1E\uDD1F\uDD30-\uDD34\uDD36\uDD77\uDDB5\uDDB6\uDDBB\uDDD2\uDDD3\uDDD5]|\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC70\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD35\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD4\uDDD6-\uDDDD]|\uD83D\uDC6F|\uD83E[\uDD3C\uDDDE\uDDDF]|[\u231A\u231B\u23E9-\u23EC\u23F0\u23F3\u25FD\u25FE\u2614\u2615\u2648-\u2653\u267F\u2693\u26A1\u26AA\u26AB\u26BD\u26BE\u26C4\u26C5\u26CE\u26D4\u26EA\u26F2\u26F3\u26F5\u26FA\u26FD\u2705\u2728\u274C\u274E\u2753-\u2755\u2757\u2795-\u2797\u27B0\u27BF\u2B1B\u2B1C\u2B50\u2B55]|\uD83C[\uDC04\uDCCF\uDD8E\uDD91-\uDD9A\uDE01\uDE1A\uDE2F\uDE32-\uDE36\uDE38-\uDE3A\uDE50\uDE51\uDF00-\uDF20\uDF2D-\uDF35\uDF37-\uDF7C\uDF7E-\uDF84\uDF86-\uDF93\uDFA0-\uDFC1\uDFC5\uDFC6\uDFC8\uDFC9\uDFCF-\uDFD3\uDFE0-\uDFF0\uDFF8-\uDFFF]|\uD83D[\uDC00-\uDC07\uDC09-\uDC14\uDC16-\uDC3A\uDC3C-\uDC3E\uDC40\uDC44\uDC45\uDC51-\uDC65\uDC6A\uDC79-\uDC7B\uDC7D-\uDC80\uDC84\uDC88-\uDC8E\uDC90\uDC92-\uDCA9\uDCAB-\uDCFC\uDCFF-\uDD3D\uDD4B-\uDD4E\uDD50-\uDD67\uDDA4\uDDFB-\uDE2D\uDE2F-\uDE34\uDE37-\uDE44\uDE48-\uDE4A\uDE80-\uDEA2\uDEA4-\uDEB3\uDEB7-\uDEBF\uDEC1-\uDEC5\uDED0-\uDED2\uDED5-\uDED7\uDEEB\uDEEC\uDEF4-\uDEFC\uDFE0-\uDFEB]|\uD83E[\uDD0D\uDD0E\uDD10-\uDD17\uDD1D\uDD20-\uDD25\uDD27-\uDD2F\uDD3A\uDD3F-\uDD45\uDD47-\uDD76\uDD78\uDD7A-\uDDB4\uDDB7\uDDBA\uDDBC-\uDDCB\uDDD0\uDDE0-\uDDFF\uDE70-\uDE74\uDE78-\uDE7A\uDE80-\uDE86\uDE90-\uDEA8\uDEB0-\uDEB6\uDEC0-\uDEC2\uDED0-\uDED6]/g; +}; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/emoji-regex/es2015/RGI_Emoji.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/emoji-regex/es2015/RGI_Emoji.js new file mode 100644 index 0000000000000000000000000000000000000000..ecf32f177908c15bd0faf4f20aeaff2f59f4e973 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/emoji-regex/es2015/RGI_Emoji.js @@ -0,0 +1,6 @@ +"use strict"; + +module.exports = () => { + // https://mths.be/emoji + return /\u{1F3F4}\u{E0067}\u{E0062}(?:\u{E0077}\u{E006C}\u{E0073}|\u{E0073}\u{E0063}\u{E0074}|\u{E0065}\u{E006E}\u{E0067})\u{E007F}|(?:\u{1F9D1}\u{1F3FF}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FF}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}-\u{1F3FE}]|(?:\u{1F9D1}\u{1F3FE}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FE}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}-\u{1F3FD}\u{1F3FF}]|(?:\u{1F9D1}\u{1F3FD}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FD}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}\u{1F3FC}\u{1F3FE}\u{1F3FF}]|(?:\u{1F9D1}\u{1F3FC}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FC}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}\u{1F3FD}-\u{1F3FF}]|(?:\u{1F9D1}\u{1F3FB}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FB}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FC}-\u{1F3FF}]|\u{1F468}(?:\u{1F3FB}(?:\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FF}]|\u{1F468}[\u{1F3FB}-\u{1F3FF}])|\u{1F91D}\u200D\u{1F468}[\u{1F3FC}-\u{1F3FF}]|[\u2695\u2696\u2708]\uFE0F|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]))?|[\u{1F3FC}-\u{1F3FF}]\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FF}]|\u{1F468}[\u{1F3FB}-\u{1F3FF}])|\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F468}|[\u{1F468}\u{1F469}]\u200D(?:\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}])|\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FE}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FE}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FD}\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FD}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FC}\u{1F3FE}\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FC}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FD}-\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|(?:\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])\uFE0F|\u200D(?:[\u{1F468}\u{1F469}]\u200D[\u{1F466}\u{1F467}]|[\u{1F466}\u{1F467}])|\u{1F3FF}|\u{1F3FE}|\u{1F3FD}|\u{1F3FC})?|(?:\u{1F469}(?:\u{1F3FB}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D[\u{1F468}\u{1F469}]|[\u{1F468}\u{1F469}])|[\u{1F3FC}-\u{1F3FF}]\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D[\u{1F468}\u{1F469}]|[\u{1F468}\u{1F469}]))|\u{1F9D1}[\u{1F3FB}-\u{1F3FF}]\u200D\u{1F91D}\u200D\u{1F9D1})[\u{1F3FB}-\u{1F3FF}]|\u{1F469}\u200D\u{1F469}\u200D(?:\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}])|\u{1F469}(?:\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D[\u{1F468}\u{1F469}]|[\u{1F468}\u{1F469}])|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FE}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FD}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FC}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FB}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F9D1}(?:\u200D(?:\u{1F91D}\u200D\u{1F9D1}|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FE}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FD}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FC}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FB}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F469}\u200D\u{1F466}\u200D\u{1F466}|\u{1F469}\u200D\u{1F469}\u200D[\u{1F466}\u{1F467}]|\u{1F469}\u200D\u{1F467}\u200D[\u{1F466}\u{1F467}]|(?:\u{1F441}\uFE0F\u200D\u{1F5E8}|\u{1F9D1}(?:\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708]|\u{1F3FB}\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|\u{1F469}(?:\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708]|\u{1F3FB}\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|\u{1F636}\u200D\u{1F32B}|\u{1F3F3}\uFE0F\u200D\u26A7|\u{1F43B}\u200D\u2744|(?:[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F470}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F935}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D4}\u{1F9D6}-\u{1F9DD}][\u{1F3FB}-\u{1F3FF}]|[\u{1F46F}\u{1F93C}\u{1F9DE}\u{1F9DF}])\u200D[\u2640\u2642]|[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}][\uFE0F\u{1F3FB}-\u{1F3FF}]\u200D[\u2640\u2642]|\u{1F3F4}\u200D\u2620|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F470}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F935}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D4}\u{1F9D6}-\u{1F9DD}]\u200D[\u2640\u2642]|[\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u2328\u23CF\u23ED-\u23EF\u23F1\u23F2\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB\u25FC\u2600-\u2604\u260E\u2611\u2618\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u2692\u2694-\u2697\u2699\u269B\u269C\u26A0\u26A7\u26B0\u26B1\u26C8\u26CF\u26D1\u26D3\u26E9\u26F0\u26F1\u26F4\u26F7\u26F8\u2702\u2708\u2709\u270F\u2712\u2714\u2716\u271D\u2721\u2733\u2734\u2744\u2747\u2763\u27A1\u2934\u2935\u2B05-\u2B07\u3030\u303D\u3297\u3299\u{1F170}\u{1F171}\u{1F17E}\u{1F17F}\u{1F202}\u{1F237}\u{1F321}\u{1F324}-\u{1F32C}\u{1F336}\u{1F37D}\u{1F396}\u{1F397}\u{1F399}-\u{1F39B}\u{1F39E}\u{1F39F}\u{1F3CD}\u{1F3CE}\u{1F3D4}-\u{1F3DF}\u{1F3F5}\u{1F3F7}\u{1F43F}\u{1F4FD}\u{1F549}\u{1F54A}\u{1F56F}\u{1F570}\u{1F573}\u{1F576}-\u{1F579}\u{1F587}\u{1F58A}-\u{1F58D}\u{1F5A5}\u{1F5A8}\u{1F5B1}\u{1F5B2}\u{1F5BC}\u{1F5C2}-\u{1F5C4}\u{1F5D1}-\u{1F5D3}\u{1F5DC}-\u{1F5DE}\u{1F5E1}\u{1F5E3}\u{1F5E8}\u{1F5EF}\u{1F5F3}\u{1F5FA}\u{1F6CB}\u{1F6CD}-\u{1F6CF}\u{1F6E0}-\u{1F6E5}\u{1F6E9}\u{1F6F0}\u{1F6F3}])\uFE0F|\u{1F3F3}\uFE0F\u200D\u{1F308}|\u{1F469}\u200D\u{1F467}|\u{1F469}\u200D\u{1F466}|\u{1F635}\u200D\u{1F4AB}|\u{1F62E}\u200D\u{1F4A8}|\u{1F415}\u200D\u{1F9BA}|\u{1F9D1}(?:\u{1F3FF}|\u{1F3FE}|\u{1F3FD}|\u{1F3FC}|\u{1F3FB})?|\u{1F469}(?:\u{1F3FF}|\u{1F3FE}|\u{1F3FD}|\u{1F3FC}|\u{1F3FB})?|\u{1F1FD}\u{1F1F0}|\u{1F1F6}\u{1F1E6}|\u{1F1F4}\u{1F1F2}|\u{1F408}\u200D\u2B1B|\u2764\uFE0F\u200D[\u{1F525}\u{1FA79}]|\u{1F441}\uFE0F|\u{1F3F3}\uFE0F|\u{1F1FF}[\u{1F1E6}\u{1F1F2}\u{1F1FC}]|\u{1F1FE}[\u{1F1EA}\u{1F1F9}]|\u{1F1FC}[\u{1F1EB}\u{1F1F8}]|\u{1F1FB}[\u{1F1E6}\u{1F1E8}\u{1F1EA}\u{1F1EC}\u{1F1EE}\u{1F1F3}\u{1F1FA}]|\u{1F1FA}[\u{1F1E6}\u{1F1EC}\u{1F1F2}\u{1F1F3}\u{1F1F8}\u{1F1FE}\u{1F1FF}]|\u{1F1F9}[\u{1F1E6}\u{1F1E8}\u{1F1E9}\u{1F1EB}-\u{1F1ED}\u{1F1EF}-\u{1F1F4}\u{1F1F7}\u{1F1F9}\u{1F1FB}\u{1F1FC}\u{1F1FF}]|\u{1F1F8}[\u{1F1E6}-\u{1F1EA}\u{1F1EC}-\u{1F1F4}\u{1F1F7}-\u{1F1F9}\u{1F1FB}\u{1F1FD}-\u{1F1FF}]|\u{1F1F7}[\u{1F1EA}\u{1F1F4}\u{1F1F8}\u{1F1FA}\u{1F1FC}]|\u{1F1F5}[\u{1F1E6}\u{1F1EA}-\u{1F1ED}\u{1F1F0}-\u{1F1F3}\u{1F1F7}-\u{1F1F9}\u{1F1FC}\u{1F1FE}]|\u{1F1F3}[\u{1F1E6}\u{1F1E8}\u{1F1EA}-\u{1F1EC}\u{1F1EE}\u{1F1F1}\u{1F1F4}\u{1F1F5}\u{1F1F7}\u{1F1FA}\u{1F1FF}]|\u{1F1F2}[\u{1F1E6}\u{1F1E8}-\u{1F1ED}\u{1F1F0}-\u{1F1FF}]|\u{1F1F1}[\u{1F1E6}-\u{1F1E8}\u{1F1EE}\u{1F1F0}\u{1F1F7}-\u{1F1FB}\u{1F1FE}]|\u{1F1F0}[\u{1F1EA}\u{1F1EC}-\u{1F1EE}\u{1F1F2}\u{1F1F3}\u{1F1F5}\u{1F1F7}\u{1F1FC}\u{1F1FE}\u{1F1FF}]|\u{1F1EF}[\u{1F1EA}\u{1F1F2}\u{1F1F4}\u{1F1F5}]|\u{1F1EE}[\u{1F1E8}-\u{1F1EA}\u{1F1F1}-\u{1F1F4}\u{1F1F6}-\u{1F1F9}]|\u{1F1ED}[\u{1F1F0}\u{1F1F2}\u{1F1F3}\u{1F1F7}\u{1F1F9}\u{1F1FA}]|\u{1F1EC}[\u{1F1E6}\u{1F1E7}\u{1F1E9}-\u{1F1EE}\u{1F1F1}-\u{1F1F3}\u{1F1F5}-\u{1F1FA}\u{1F1FC}\u{1F1FE}]|\u{1F1EB}[\u{1F1EE}-\u{1F1F0}\u{1F1F2}\u{1F1F4}\u{1F1F7}]|\u{1F1EA}[\u{1F1E6}\u{1F1E8}\u{1F1EA}\u{1F1EC}\u{1F1ED}\u{1F1F7}-\u{1F1FA}]|\u{1F1E9}[\u{1F1EA}\u{1F1EC}\u{1F1EF}\u{1F1F0}\u{1F1F2}\u{1F1F4}\u{1F1FF}]|\u{1F1E8}[\u{1F1E6}\u{1F1E8}\u{1F1E9}\u{1F1EB}-\u{1F1EE}\u{1F1F0}-\u{1F1F5}\u{1F1F7}\u{1F1FA}-\u{1F1FF}]|\u{1F1E7}[\u{1F1E6}\u{1F1E7}\u{1F1E9}-\u{1F1EF}\u{1F1F1}-\u{1F1F4}\u{1F1F6}-\u{1F1F9}\u{1F1FB}\u{1F1FC}\u{1F1FE}\u{1F1FF}]|\u{1F1E6}[\u{1F1E8}-\u{1F1EC}\u{1F1EE}\u{1F1F1}\u{1F1F2}\u{1F1F4}\u{1F1F6}-\u{1F1FA}\u{1F1FC}\u{1F1FD}\u{1F1FF}]|[#\*0-9]\uFE0F\u20E3|\u2764\uFE0F|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F470}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F935}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D4}\u{1F9D6}-\u{1F9DD}][\u{1F3FB}-\u{1F3FF}]|[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}][\uFE0F\u{1F3FB}-\u{1F3FF}]|\u{1F3F4}|[\u270A\u270B\u{1F385}\u{1F3C2}\u{1F3C7}\u{1F442}\u{1F443}\u{1F446}-\u{1F450}\u{1F466}\u{1F467}\u{1F46B}-\u{1F46D}\u{1F472}\u{1F474}-\u{1F476}\u{1F478}\u{1F47C}\u{1F483}\u{1F485}\u{1F48F}\u{1F491}\u{1F4AA}\u{1F57A}\u{1F595}\u{1F596}\u{1F64C}\u{1F64F}\u{1F6C0}\u{1F6CC}\u{1F90C}\u{1F90F}\u{1F918}-\u{1F91C}\u{1F91E}\u{1F91F}\u{1F930}-\u{1F934}\u{1F936}\u{1F977}\u{1F9B5}\u{1F9B6}\u{1F9BB}\u{1F9D2}\u{1F9D3}\u{1F9D5}][\u{1F3FB}-\u{1F3FF}]|[\u261D\u270C\u270D\u{1F574}\u{1F590}][\uFE0F\u{1F3FB}-\u{1F3FF}]|[\u270A\u270B\u{1F385}\u{1F3C2}\u{1F3C7}\u{1F408}\u{1F415}\u{1F43B}\u{1F442}\u{1F443}\u{1F446}-\u{1F450}\u{1F466}\u{1F467}\u{1F46B}-\u{1F46D}\u{1F472}\u{1F474}-\u{1F476}\u{1F478}\u{1F47C}\u{1F483}\u{1F485}\u{1F48F}\u{1F491}\u{1F4AA}\u{1F57A}\u{1F595}\u{1F596}\u{1F62E}\u{1F635}\u{1F636}\u{1F64C}\u{1F64F}\u{1F6C0}\u{1F6CC}\u{1F90C}\u{1F90F}\u{1F918}-\u{1F91C}\u{1F91E}\u{1F91F}\u{1F930}-\u{1F934}\u{1F936}\u{1F977}\u{1F9B5}\u{1F9B6}\u{1F9BB}\u{1F9D2}\u{1F9D3}\u{1F9D5}]|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F470}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F935}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D4}\u{1F9D6}-\u{1F9DD}]|[\u{1F46F}\u{1F93C}\u{1F9DE}\u{1F9DF}]|[\u231A\u231B\u23E9-\u23EC\u23F0\u23F3\u25FD\u25FE\u2614\u2615\u2648-\u2653\u267F\u2693\u26A1\u26AA\u26AB\u26BD\u26BE\u26C4\u26C5\u26CE\u26D4\u26EA\u26F2\u26F3\u26F5\u26FA\u26FD\u2705\u2728\u274C\u274E\u2753-\u2755\u2757\u2795-\u2797\u27B0\u27BF\u2B1B\u2B1C\u2B50\u2B55\u{1F004}\u{1F0CF}\u{1F18E}\u{1F191}-\u{1F19A}\u{1F201}\u{1F21A}\u{1F22F}\u{1F232}-\u{1F236}\u{1F238}-\u{1F23A}\u{1F250}\u{1F251}\u{1F300}-\u{1F320}\u{1F32D}-\u{1F335}\u{1F337}-\u{1F37C}\u{1F37E}-\u{1F384}\u{1F386}-\u{1F393}\u{1F3A0}-\u{1F3C1}\u{1F3C5}\u{1F3C6}\u{1F3C8}\u{1F3C9}\u{1F3CF}-\u{1F3D3}\u{1F3E0}-\u{1F3F0}\u{1F3F8}-\u{1F407}\u{1F409}-\u{1F414}\u{1F416}-\u{1F43A}\u{1F43C}-\u{1F43E}\u{1F440}\u{1F444}\u{1F445}\u{1F451}-\u{1F465}\u{1F46A}\u{1F479}-\u{1F47B}\u{1F47D}-\u{1F480}\u{1F484}\u{1F488}-\u{1F48E}\u{1F490}\u{1F492}-\u{1F4A9}\u{1F4AB}-\u{1F4FC}\u{1F4FF}-\u{1F53D}\u{1F54B}-\u{1F54E}\u{1F550}-\u{1F567}\u{1F5A4}\u{1F5FB}-\u{1F62D}\u{1F62F}-\u{1F634}\u{1F637}-\u{1F644}\u{1F648}-\u{1F64A}\u{1F680}-\u{1F6A2}\u{1F6A4}-\u{1F6B3}\u{1F6B7}-\u{1F6BF}\u{1F6C1}-\u{1F6C5}\u{1F6D0}-\u{1F6D2}\u{1F6D5}-\u{1F6D7}\u{1F6EB}\u{1F6EC}\u{1F6F4}-\u{1F6FC}\u{1F7E0}-\u{1F7EB}\u{1F90D}\u{1F90E}\u{1F910}-\u{1F917}\u{1F91D}\u{1F920}-\u{1F925}\u{1F927}-\u{1F92F}\u{1F93A}\u{1F93F}-\u{1F945}\u{1F947}-\u{1F976}\u{1F978}\u{1F97A}-\u{1F9B4}\u{1F9B7}\u{1F9BA}\u{1F9BC}-\u{1F9CB}\u{1F9D0}\u{1F9E0}-\u{1F9FF}\u{1FA70}-\u{1FA74}\u{1FA78}-\u{1FA7A}\u{1FA80}-\u{1FA86}\u{1FA90}-\u{1FAA8}\u{1FAB0}-\u{1FAB6}\u{1FAC0}-\u{1FAC2}\u{1FAD0}-\u{1FAD6}]/gu; +}; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/emoji-regex/es2015/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/emoji-regex/es2015/index.js new file mode 100644 index 0000000000000000000000000000000000000000..1a4fc8d0dcc3226557510213268f72fd5c6ccfdb --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/emoji-regex/es2015/index.js @@ -0,0 +1,6 @@ +"use strict"; + +module.exports = () => { + // https://mths.be/emoji + return /\u{1F3F4}\u{E0067}\u{E0062}(?:\u{E0077}\u{E006C}\u{E0073}|\u{E0073}\u{E0063}\u{E0074}|\u{E0065}\u{E006E}\u{E0067})\u{E007F}|(?:\u{1F9D1}\u{1F3FF}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FF}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}-\u{1F3FE}]|(?:\u{1F9D1}\u{1F3FE}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FE}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}-\u{1F3FD}\u{1F3FF}]|(?:\u{1F9D1}\u{1F3FD}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FD}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}\u{1F3FC}\u{1F3FE}\u{1F3FF}]|(?:\u{1F9D1}\u{1F3FC}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FC}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}\u{1F3FD}-\u{1F3FF}]|(?:\u{1F9D1}\u{1F3FB}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FB}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FC}-\u{1F3FF}]|\u{1F468}(?:\u{1F3FB}(?:\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FF}]|\u{1F468}[\u{1F3FB}-\u{1F3FF}])|\u{1F91D}\u200D\u{1F468}[\u{1F3FC}-\u{1F3FF}]|[\u2695\u2696\u2708]\uFE0F|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]))?|[\u{1F3FC}-\u{1F3FF}]\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FF}]|\u{1F468}[\u{1F3FB}-\u{1F3FF}])|\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F468}|[\u{1F468}\u{1F469}]\u200D(?:\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}])|\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FE}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FE}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FD}\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FD}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FC}\u{1F3FE}\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FC}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FD}-\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|(?:\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])\uFE0F|\u200D(?:[\u{1F468}\u{1F469}]\u200D[\u{1F466}\u{1F467}]|[\u{1F466}\u{1F467}])|\u{1F3FF}|\u{1F3FE}|\u{1F3FD}|\u{1F3FC})?|(?:\u{1F469}(?:\u{1F3FB}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D[\u{1F468}\u{1F469}]|[\u{1F468}\u{1F469}])|[\u{1F3FC}-\u{1F3FF}]\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D[\u{1F468}\u{1F469}]|[\u{1F468}\u{1F469}]))|\u{1F9D1}[\u{1F3FB}-\u{1F3FF}]\u200D\u{1F91D}\u200D\u{1F9D1})[\u{1F3FB}-\u{1F3FF}]|\u{1F469}\u200D\u{1F469}\u200D(?:\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}])|\u{1F469}(?:\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D[\u{1F468}\u{1F469}]|[\u{1F468}\u{1F469}])|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FE}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FD}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FC}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FB}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F9D1}(?:\u200D(?:\u{1F91D}\u200D\u{1F9D1}|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FE}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FD}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FC}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FB}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F469}\u200D\u{1F466}\u200D\u{1F466}|\u{1F469}\u200D\u{1F469}\u200D[\u{1F466}\u{1F467}]|\u{1F469}\u200D\u{1F467}\u200D[\u{1F466}\u{1F467}]|(?:\u{1F441}\uFE0F\u200D\u{1F5E8}|\u{1F9D1}(?:\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708]|\u{1F3FB}\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|\u{1F469}(?:\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708]|\u{1F3FB}\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|\u{1F636}\u200D\u{1F32B}|\u{1F3F3}\uFE0F\u200D\u26A7|\u{1F43B}\u200D\u2744|(?:[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F470}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F935}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D4}\u{1F9D6}-\u{1F9DD}][\u{1F3FB}-\u{1F3FF}]|[\u{1F46F}\u{1F93C}\u{1F9DE}\u{1F9DF}])\u200D[\u2640\u2642]|[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}][\uFE0F\u{1F3FB}-\u{1F3FF}]\u200D[\u2640\u2642]|\u{1F3F4}\u200D\u2620|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F470}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F935}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D4}\u{1F9D6}-\u{1F9DD}]\u200D[\u2640\u2642]|[\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u2328\u23CF\u23ED-\u23EF\u23F1\u23F2\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB\u25FC\u2600-\u2604\u260E\u2611\u2618\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u2692\u2694-\u2697\u2699\u269B\u269C\u26A0\u26A7\u26B0\u26B1\u26C8\u26CF\u26D1\u26D3\u26E9\u26F0\u26F1\u26F4\u26F7\u26F8\u2702\u2708\u2709\u270F\u2712\u2714\u2716\u271D\u2721\u2733\u2734\u2744\u2747\u2763\u27A1\u2934\u2935\u2B05-\u2B07\u3030\u303D\u3297\u3299\u{1F170}\u{1F171}\u{1F17E}\u{1F17F}\u{1F202}\u{1F237}\u{1F321}\u{1F324}-\u{1F32C}\u{1F336}\u{1F37D}\u{1F396}\u{1F397}\u{1F399}-\u{1F39B}\u{1F39E}\u{1F39F}\u{1F3CD}\u{1F3CE}\u{1F3D4}-\u{1F3DF}\u{1F3F5}\u{1F3F7}\u{1F43F}\u{1F4FD}\u{1F549}\u{1F54A}\u{1F56F}\u{1F570}\u{1F573}\u{1F576}-\u{1F579}\u{1F587}\u{1F58A}-\u{1F58D}\u{1F5A5}\u{1F5A8}\u{1F5B1}\u{1F5B2}\u{1F5BC}\u{1F5C2}-\u{1F5C4}\u{1F5D1}-\u{1F5D3}\u{1F5DC}-\u{1F5DE}\u{1F5E1}\u{1F5E3}\u{1F5E8}\u{1F5EF}\u{1F5F3}\u{1F5FA}\u{1F6CB}\u{1F6CD}-\u{1F6CF}\u{1F6E0}-\u{1F6E5}\u{1F6E9}\u{1F6F0}\u{1F6F3}])\uFE0F|\u{1F3F3}\uFE0F\u200D\u{1F308}|\u{1F469}\u200D\u{1F467}|\u{1F469}\u200D\u{1F466}|\u{1F635}\u200D\u{1F4AB}|\u{1F62E}\u200D\u{1F4A8}|\u{1F415}\u200D\u{1F9BA}|\u{1F9D1}(?:\u{1F3FF}|\u{1F3FE}|\u{1F3FD}|\u{1F3FC}|\u{1F3FB})?|\u{1F469}(?:\u{1F3FF}|\u{1F3FE}|\u{1F3FD}|\u{1F3FC}|\u{1F3FB})?|\u{1F1FD}\u{1F1F0}|\u{1F1F6}\u{1F1E6}|\u{1F1F4}\u{1F1F2}|\u{1F408}\u200D\u2B1B|\u2764\uFE0F\u200D[\u{1F525}\u{1FA79}]|\u{1F441}\uFE0F|\u{1F3F3}\uFE0F|\u{1F1FF}[\u{1F1E6}\u{1F1F2}\u{1F1FC}]|\u{1F1FE}[\u{1F1EA}\u{1F1F9}]|\u{1F1FC}[\u{1F1EB}\u{1F1F8}]|\u{1F1FB}[\u{1F1E6}\u{1F1E8}\u{1F1EA}\u{1F1EC}\u{1F1EE}\u{1F1F3}\u{1F1FA}]|\u{1F1FA}[\u{1F1E6}\u{1F1EC}\u{1F1F2}\u{1F1F3}\u{1F1F8}\u{1F1FE}\u{1F1FF}]|\u{1F1F9}[\u{1F1E6}\u{1F1E8}\u{1F1E9}\u{1F1EB}-\u{1F1ED}\u{1F1EF}-\u{1F1F4}\u{1F1F7}\u{1F1F9}\u{1F1FB}\u{1F1FC}\u{1F1FF}]|\u{1F1F8}[\u{1F1E6}-\u{1F1EA}\u{1F1EC}-\u{1F1F4}\u{1F1F7}-\u{1F1F9}\u{1F1FB}\u{1F1FD}-\u{1F1FF}]|\u{1F1F7}[\u{1F1EA}\u{1F1F4}\u{1F1F8}\u{1F1FA}\u{1F1FC}]|\u{1F1F5}[\u{1F1E6}\u{1F1EA}-\u{1F1ED}\u{1F1F0}-\u{1F1F3}\u{1F1F7}-\u{1F1F9}\u{1F1FC}\u{1F1FE}]|\u{1F1F3}[\u{1F1E6}\u{1F1E8}\u{1F1EA}-\u{1F1EC}\u{1F1EE}\u{1F1F1}\u{1F1F4}\u{1F1F5}\u{1F1F7}\u{1F1FA}\u{1F1FF}]|\u{1F1F2}[\u{1F1E6}\u{1F1E8}-\u{1F1ED}\u{1F1F0}-\u{1F1FF}]|\u{1F1F1}[\u{1F1E6}-\u{1F1E8}\u{1F1EE}\u{1F1F0}\u{1F1F7}-\u{1F1FB}\u{1F1FE}]|\u{1F1F0}[\u{1F1EA}\u{1F1EC}-\u{1F1EE}\u{1F1F2}\u{1F1F3}\u{1F1F5}\u{1F1F7}\u{1F1FC}\u{1F1FE}\u{1F1FF}]|\u{1F1EF}[\u{1F1EA}\u{1F1F2}\u{1F1F4}\u{1F1F5}]|\u{1F1EE}[\u{1F1E8}-\u{1F1EA}\u{1F1F1}-\u{1F1F4}\u{1F1F6}-\u{1F1F9}]|\u{1F1ED}[\u{1F1F0}\u{1F1F2}\u{1F1F3}\u{1F1F7}\u{1F1F9}\u{1F1FA}]|\u{1F1EC}[\u{1F1E6}\u{1F1E7}\u{1F1E9}-\u{1F1EE}\u{1F1F1}-\u{1F1F3}\u{1F1F5}-\u{1F1FA}\u{1F1FC}\u{1F1FE}]|\u{1F1EB}[\u{1F1EE}-\u{1F1F0}\u{1F1F2}\u{1F1F4}\u{1F1F7}]|\u{1F1EA}[\u{1F1E6}\u{1F1E8}\u{1F1EA}\u{1F1EC}\u{1F1ED}\u{1F1F7}-\u{1F1FA}]|\u{1F1E9}[\u{1F1EA}\u{1F1EC}\u{1F1EF}\u{1F1F0}\u{1F1F2}\u{1F1F4}\u{1F1FF}]|\u{1F1E8}[\u{1F1E6}\u{1F1E8}\u{1F1E9}\u{1F1EB}-\u{1F1EE}\u{1F1F0}-\u{1F1F5}\u{1F1F7}\u{1F1FA}-\u{1F1FF}]|\u{1F1E7}[\u{1F1E6}\u{1F1E7}\u{1F1E9}-\u{1F1EF}\u{1F1F1}-\u{1F1F4}\u{1F1F6}-\u{1F1F9}\u{1F1FB}\u{1F1FC}\u{1F1FE}\u{1F1FF}]|\u{1F1E6}[\u{1F1E8}-\u{1F1EC}\u{1F1EE}\u{1F1F1}\u{1F1F2}\u{1F1F4}\u{1F1F6}-\u{1F1FA}\u{1F1FC}\u{1F1FD}\u{1F1FF}]|[#\*0-9]\uFE0F\u20E3|\u2764\uFE0F|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F470}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F935}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D4}\u{1F9D6}-\u{1F9DD}][\u{1F3FB}-\u{1F3FF}]|[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}][\uFE0F\u{1F3FB}-\u{1F3FF}]|\u{1F3F4}|[\u270A\u270B\u{1F385}\u{1F3C2}\u{1F3C7}\u{1F442}\u{1F443}\u{1F446}-\u{1F450}\u{1F466}\u{1F467}\u{1F46B}-\u{1F46D}\u{1F472}\u{1F474}-\u{1F476}\u{1F478}\u{1F47C}\u{1F483}\u{1F485}\u{1F48F}\u{1F491}\u{1F4AA}\u{1F57A}\u{1F595}\u{1F596}\u{1F64C}\u{1F64F}\u{1F6C0}\u{1F6CC}\u{1F90C}\u{1F90F}\u{1F918}-\u{1F91C}\u{1F91E}\u{1F91F}\u{1F930}-\u{1F934}\u{1F936}\u{1F977}\u{1F9B5}\u{1F9B6}\u{1F9BB}\u{1F9D2}\u{1F9D3}\u{1F9D5}][\u{1F3FB}-\u{1F3FF}]|[\u261D\u270C\u270D\u{1F574}\u{1F590}][\uFE0F\u{1F3FB}-\u{1F3FF}]|[\u270A\u270B\u{1F385}\u{1F3C2}\u{1F3C7}\u{1F408}\u{1F415}\u{1F43B}\u{1F442}\u{1F443}\u{1F446}-\u{1F450}\u{1F466}\u{1F467}\u{1F46B}-\u{1F46D}\u{1F472}\u{1F474}-\u{1F476}\u{1F478}\u{1F47C}\u{1F483}\u{1F485}\u{1F48F}\u{1F491}\u{1F4AA}\u{1F57A}\u{1F595}\u{1F596}\u{1F62E}\u{1F635}\u{1F636}\u{1F64C}\u{1F64F}\u{1F6C0}\u{1F6CC}\u{1F90C}\u{1F90F}\u{1F918}-\u{1F91C}\u{1F91E}\u{1F91F}\u{1F930}-\u{1F934}\u{1F936}\u{1F977}\u{1F9B5}\u{1F9B6}\u{1F9BB}\u{1F9D2}\u{1F9D3}\u{1F9D5}]|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F470}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F935}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D4}\u{1F9D6}-\u{1F9DD}]|[\u{1F46F}\u{1F93C}\u{1F9DE}\u{1F9DF}]|[\u231A\u231B\u23E9-\u23EC\u23F0\u23F3\u25FD\u25FE\u2614\u2615\u2648-\u2653\u267F\u2693\u26A1\u26AA\u26AB\u26BD\u26BE\u26C4\u26C5\u26CE\u26D4\u26EA\u26F2\u26F3\u26F5\u26FA\u26FD\u2705\u2728\u274C\u274E\u2753-\u2755\u2757\u2795-\u2797\u27B0\u27BF\u2B1B\u2B1C\u2B50\u2B55\u{1F004}\u{1F0CF}\u{1F18E}\u{1F191}-\u{1F19A}\u{1F201}\u{1F21A}\u{1F22F}\u{1F232}-\u{1F236}\u{1F238}-\u{1F23A}\u{1F250}\u{1F251}\u{1F300}-\u{1F320}\u{1F32D}-\u{1F335}\u{1F337}-\u{1F37C}\u{1F37E}-\u{1F384}\u{1F386}-\u{1F393}\u{1F3A0}-\u{1F3C1}\u{1F3C5}\u{1F3C6}\u{1F3C8}\u{1F3C9}\u{1F3CF}-\u{1F3D3}\u{1F3E0}-\u{1F3F0}\u{1F3F8}-\u{1F407}\u{1F409}-\u{1F414}\u{1F416}-\u{1F43A}\u{1F43C}-\u{1F43E}\u{1F440}\u{1F444}\u{1F445}\u{1F451}-\u{1F465}\u{1F46A}\u{1F479}-\u{1F47B}\u{1F47D}-\u{1F480}\u{1F484}\u{1F488}-\u{1F48E}\u{1F490}\u{1F492}-\u{1F4A9}\u{1F4AB}-\u{1F4FC}\u{1F4FF}-\u{1F53D}\u{1F54B}-\u{1F54E}\u{1F550}-\u{1F567}\u{1F5A4}\u{1F5FB}-\u{1F62D}\u{1F62F}-\u{1F634}\u{1F637}-\u{1F644}\u{1F648}-\u{1F64A}\u{1F680}-\u{1F6A2}\u{1F6A4}-\u{1F6B3}\u{1F6B7}-\u{1F6BF}\u{1F6C1}-\u{1F6C5}\u{1F6D0}-\u{1F6D2}\u{1F6D5}-\u{1F6D7}\u{1F6EB}\u{1F6EC}\u{1F6F4}-\u{1F6FC}\u{1F7E0}-\u{1F7EB}\u{1F90D}\u{1F90E}\u{1F910}-\u{1F917}\u{1F91D}\u{1F920}-\u{1F925}\u{1F927}-\u{1F92F}\u{1F93A}\u{1F93F}-\u{1F945}\u{1F947}-\u{1F976}\u{1F978}\u{1F97A}-\u{1F9B4}\u{1F9B7}\u{1F9BA}\u{1F9BC}-\u{1F9CB}\u{1F9D0}\u{1F9E0}-\u{1F9FF}\u{1FA70}-\u{1FA74}\u{1FA78}-\u{1FA7A}\u{1FA80}-\u{1FA86}\u{1FA90}-\u{1FAA8}\u{1FAB0}-\u{1FAB6}\u{1FAC0}-\u{1FAC2}\u{1FAD0}-\u{1FAD6}]|[\u231A\u231B\u23E9-\u23EC\u23F0\u23F3\u25FD\u25FE\u2614\u2615\u2648-\u2653\u267F\u2693\u26A1\u26AA\u26AB\u26BD\u26BE\u26C4\u26C5\u26CE\u26D4\u26EA\u26F2\u26F3\u26F5\u26FA\u26FD\u2705\u270A\u270B\u2728\u274C\u274E\u2753-\u2755\u2757\u2795-\u2797\u27B0\u27BF\u2B1B\u2B1C\u2B50\u2B55\u{1F004}\u{1F0CF}\u{1F18E}\u{1F191}-\u{1F19A}\u{1F1E6}-\u{1F1FF}\u{1F201}\u{1F21A}\u{1F22F}\u{1F232}-\u{1F236}\u{1F238}-\u{1F23A}\u{1F250}\u{1F251}\u{1F300}-\u{1F320}\u{1F32D}-\u{1F335}\u{1F337}-\u{1F37C}\u{1F37E}-\u{1F393}\u{1F3A0}-\u{1F3CA}\u{1F3CF}-\u{1F3D3}\u{1F3E0}-\u{1F3F0}\u{1F3F4}\u{1F3F8}-\u{1F43E}\u{1F440}\u{1F442}-\u{1F4FC}\u{1F4FF}-\u{1F53D}\u{1F54B}-\u{1F54E}\u{1F550}-\u{1F567}\u{1F57A}\u{1F595}\u{1F596}\u{1F5A4}\u{1F5FB}-\u{1F64F}\u{1F680}-\u{1F6C5}\u{1F6CC}\u{1F6D0}-\u{1F6D2}\u{1F6D5}-\u{1F6D7}\u{1F6EB}\u{1F6EC}\u{1F6F4}-\u{1F6FC}\u{1F7E0}-\u{1F7EB}\u{1F90C}-\u{1F93A}\u{1F93C}-\u{1F945}\u{1F947}-\u{1F978}\u{1F97A}-\u{1F9CB}\u{1F9CD}-\u{1F9FF}\u{1FA70}-\u{1FA74}\u{1FA78}-\u{1FA7A}\u{1FA80}-\u{1FA86}\u{1FA90}-\u{1FAA8}\u{1FAB0}-\u{1FAB6}\u{1FAC0}-\u{1FAC2}\u{1FAD0}-\u{1FAD6}]|[#\*0-9\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u231A\u231B\u2328\u23CF\u23E9-\u23F3\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB-\u25FE\u2600-\u2604\u260E\u2611\u2614\u2615\u2618\u261D\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u2648-\u2653\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u267F\u2692-\u2697\u2699\u269B\u269C\u26A0\u26A1\u26A7\u26AA\u26AB\u26B0\u26B1\u26BD\u26BE\u26C4\u26C5\u26C8\u26CE\u26CF\u26D1\u26D3\u26D4\u26E9\u26EA\u26F0-\u26F5\u26F7-\u26FA\u26FD\u2702\u2705\u2708-\u270D\u270F\u2712\u2714\u2716\u271D\u2721\u2728\u2733\u2734\u2744\u2747\u274C\u274E\u2753-\u2755\u2757\u2763\u2764\u2795-\u2797\u27A1\u27B0\u27BF\u2934\u2935\u2B05-\u2B07\u2B1B\u2B1C\u2B50\u2B55\u3030\u303D\u3297\u3299\u{1F004}\u{1F0CF}\u{1F170}\u{1F171}\u{1F17E}\u{1F17F}\u{1F18E}\u{1F191}-\u{1F19A}\u{1F1E6}-\u{1F1FF}\u{1F201}\u{1F202}\u{1F21A}\u{1F22F}\u{1F232}-\u{1F23A}\u{1F250}\u{1F251}\u{1F300}-\u{1F321}\u{1F324}-\u{1F393}\u{1F396}\u{1F397}\u{1F399}-\u{1F39B}\u{1F39E}-\u{1F3F0}\u{1F3F3}-\u{1F3F5}\u{1F3F7}-\u{1F4FD}\u{1F4FF}-\u{1F53D}\u{1F549}-\u{1F54E}\u{1F550}-\u{1F567}\u{1F56F}\u{1F570}\u{1F573}-\u{1F57A}\u{1F587}\u{1F58A}-\u{1F58D}\u{1F590}\u{1F595}\u{1F596}\u{1F5A4}\u{1F5A5}\u{1F5A8}\u{1F5B1}\u{1F5B2}\u{1F5BC}\u{1F5C2}-\u{1F5C4}\u{1F5D1}-\u{1F5D3}\u{1F5DC}-\u{1F5DE}\u{1F5E1}\u{1F5E3}\u{1F5E8}\u{1F5EF}\u{1F5F3}\u{1F5FA}-\u{1F64F}\u{1F680}-\u{1F6C5}\u{1F6CB}-\u{1F6D2}\u{1F6D5}-\u{1F6D7}\u{1F6E0}-\u{1F6E5}\u{1F6E9}\u{1F6EB}\u{1F6EC}\u{1F6F0}\u{1F6F3}-\u{1F6FC}\u{1F7E0}-\u{1F7EB}\u{1F90C}-\u{1F93A}\u{1F93C}-\u{1F945}\u{1F947}-\u{1F978}\u{1F97A}-\u{1F9CB}\u{1F9CD}-\u{1F9FF}\u{1FA70}-\u{1FA74}\u{1FA78}-\u{1FA7A}\u{1FA80}-\u{1FA86}\u{1FA90}-\u{1FAA8}\u{1FAB0}-\u{1FAB6}\u{1FAC0}-\u{1FAC2}\u{1FAD0}-\u{1FAD6}]\uFE0F|[\u261D\u26F9\u270A-\u270D\u{1F385}\u{1F3C2}-\u{1F3C4}\u{1F3C7}\u{1F3CA}-\u{1F3CC}\u{1F442}\u{1F443}\u{1F446}-\u{1F450}\u{1F466}-\u{1F478}\u{1F47C}\u{1F481}-\u{1F483}\u{1F485}-\u{1F487}\u{1F48F}\u{1F491}\u{1F4AA}\u{1F574}\u{1F575}\u{1F57A}\u{1F590}\u{1F595}\u{1F596}\u{1F645}-\u{1F647}\u{1F64B}-\u{1F64F}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F6C0}\u{1F6CC}\u{1F90C}\u{1F90F}\u{1F918}-\u{1F91F}\u{1F926}\u{1F930}-\u{1F939}\u{1F93C}-\u{1F93E}\u{1F977}\u{1F9B5}\u{1F9B6}\u{1F9B8}\u{1F9B9}\u{1F9BB}\u{1F9CD}-\u{1F9CF}\u{1F9D1}-\u{1F9DD}]/gu; +}; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/emoji-regex/es2015/text.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/emoji-regex/es2015/text.js new file mode 100644 index 0000000000000000000000000000000000000000..8e9f9857583146a8496d3d46fadb480038e2e2dd --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/emoji-regex/es2015/text.js @@ -0,0 +1,6 @@ +"use strict"; + +module.exports = () => { + // https://mths.be/emoji + return /\u{1F3F4}\u{E0067}\u{E0062}(?:\u{E0077}\u{E006C}\u{E0073}|\u{E0073}\u{E0063}\u{E0074}|\u{E0065}\u{E006E}\u{E0067})\u{E007F}|(?:\u{1F9D1}\u{1F3FF}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FF}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}-\u{1F3FE}]|(?:\u{1F9D1}\u{1F3FE}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FE}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}-\u{1F3FD}\u{1F3FF}]|(?:\u{1F9D1}\u{1F3FD}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FD}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}\u{1F3FC}\u{1F3FE}\u{1F3FF}]|(?:\u{1F9D1}\u{1F3FC}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FC}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}\u{1F3FD}-\u{1F3FF}]|(?:\u{1F9D1}\u{1F3FB}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FB}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FC}-\u{1F3FF}]|\u{1F468}(?:\u{1F3FB}(?:\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FF}]|\u{1F468}[\u{1F3FB}-\u{1F3FF}])|\u{1F91D}\u200D\u{1F468}[\u{1F3FC}-\u{1F3FF}]|[\u2695\u2696\u2708]\uFE0F|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]))?|[\u{1F3FC}-\u{1F3FF}]\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FF}]|\u{1F468}[\u{1F3FB}-\u{1F3FF}])|\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F468}|[\u{1F468}\u{1F469}]\u200D(?:\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}])|\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FE}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FE}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FD}\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FD}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FC}\u{1F3FE}\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FC}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FD}-\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|(?:\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])\uFE0F|\u200D(?:[\u{1F468}\u{1F469}]\u200D[\u{1F466}\u{1F467}]|[\u{1F466}\u{1F467}])|\u{1F3FF}|\u{1F3FE}|\u{1F3FD}|\u{1F3FC})?|(?:\u{1F469}(?:\u{1F3FB}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D[\u{1F468}\u{1F469}]|[\u{1F468}\u{1F469}])|[\u{1F3FC}-\u{1F3FF}]\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D[\u{1F468}\u{1F469}]|[\u{1F468}\u{1F469}]))|\u{1F9D1}[\u{1F3FB}-\u{1F3FF}]\u200D\u{1F91D}\u200D\u{1F9D1})[\u{1F3FB}-\u{1F3FF}]|\u{1F469}\u200D\u{1F469}\u200D(?:\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}])|\u{1F469}(?:\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D[\u{1F468}\u{1F469}]|[\u{1F468}\u{1F469}])|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FE}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FD}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FC}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FB}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F9D1}(?:\u200D(?:\u{1F91D}\u200D\u{1F9D1}|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FE}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FD}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FC}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FB}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F469}\u200D\u{1F466}\u200D\u{1F466}|\u{1F469}\u200D\u{1F469}\u200D[\u{1F466}\u{1F467}]|\u{1F469}\u200D\u{1F467}\u200D[\u{1F466}\u{1F467}]|(?:\u{1F441}\uFE0F\u200D\u{1F5E8}|\u{1F9D1}(?:\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708]|\u{1F3FB}\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|\u{1F469}(?:\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708]|\u{1F3FB}\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|\u{1F636}\u200D\u{1F32B}|\u{1F3F3}\uFE0F\u200D\u26A7|\u{1F43B}\u200D\u2744|(?:[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F470}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F935}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D4}\u{1F9D6}-\u{1F9DD}][\u{1F3FB}-\u{1F3FF}]|[\u{1F46F}\u{1F93C}\u{1F9DE}\u{1F9DF}])\u200D[\u2640\u2642]|[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}][\uFE0F\u{1F3FB}-\u{1F3FF}]\u200D[\u2640\u2642]|\u{1F3F4}\u200D\u2620|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F470}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F935}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D4}\u{1F9D6}-\u{1F9DD}]\u200D[\u2640\u2642]|[\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u2328\u23CF\u23ED-\u23EF\u23F1\u23F2\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB\u25FC\u2600-\u2604\u260E\u2611\u2618\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u2692\u2694-\u2697\u2699\u269B\u269C\u26A0\u26A7\u26B0\u26B1\u26C8\u26CF\u26D1\u26D3\u26E9\u26F0\u26F1\u26F4\u26F7\u26F8\u2702\u2708\u2709\u270F\u2712\u2714\u2716\u271D\u2721\u2733\u2734\u2744\u2747\u2763\u27A1\u2934\u2935\u2B05-\u2B07\u3030\u303D\u3297\u3299\u{1F170}\u{1F171}\u{1F17E}\u{1F17F}\u{1F202}\u{1F237}\u{1F321}\u{1F324}-\u{1F32C}\u{1F336}\u{1F37D}\u{1F396}\u{1F397}\u{1F399}-\u{1F39B}\u{1F39E}\u{1F39F}\u{1F3CD}\u{1F3CE}\u{1F3D4}-\u{1F3DF}\u{1F3F5}\u{1F3F7}\u{1F43F}\u{1F4FD}\u{1F549}\u{1F54A}\u{1F56F}\u{1F570}\u{1F573}\u{1F576}-\u{1F579}\u{1F587}\u{1F58A}-\u{1F58D}\u{1F5A5}\u{1F5A8}\u{1F5B1}\u{1F5B2}\u{1F5BC}\u{1F5C2}-\u{1F5C4}\u{1F5D1}-\u{1F5D3}\u{1F5DC}-\u{1F5DE}\u{1F5E1}\u{1F5E3}\u{1F5E8}\u{1F5EF}\u{1F5F3}\u{1F5FA}\u{1F6CB}\u{1F6CD}-\u{1F6CF}\u{1F6E0}-\u{1F6E5}\u{1F6E9}\u{1F6F0}\u{1F6F3}])\uFE0F|\u{1F3F3}\uFE0F\u200D\u{1F308}|\u{1F469}\u200D\u{1F467}|\u{1F469}\u200D\u{1F466}|\u{1F635}\u200D\u{1F4AB}|\u{1F62E}\u200D\u{1F4A8}|\u{1F415}\u200D\u{1F9BA}|\u{1F9D1}(?:\u{1F3FF}|\u{1F3FE}|\u{1F3FD}|\u{1F3FC}|\u{1F3FB})?|\u{1F469}(?:\u{1F3FF}|\u{1F3FE}|\u{1F3FD}|\u{1F3FC}|\u{1F3FB})?|\u{1F1FD}\u{1F1F0}|\u{1F1F6}\u{1F1E6}|\u{1F1F4}\u{1F1F2}|\u{1F408}\u200D\u2B1B|\u2764\uFE0F\u200D[\u{1F525}\u{1FA79}]|\u{1F441}\uFE0F|\u{1F3F3}\uFE0F|\u{1F1FF}[\u{1F1E6}\u{1F1F2}\u{1F1FC}]|\u{1F1FE}[\u{1F1EA}\u{1F1F9}]|\u{1F1FC}[\u{1F1EB}\u{1F1F8}]|\u{1F1FB}[\u{1F1E6}\u{1F1E8}\u{1F1EA}\u{1F1EC}\u{1F1EE}\u{1F1F3}\u{1F1FA}]|\u{1F1FA}[\u{1F1E6}\u{1F1EC}\u{1F1F2}\u{1F1F3}\u{1F1F8}\u{1F1FE}\u{1F1FF}]|\u{1F1F9}[\u{1F1E6}\u{1F1E8}\u{1F1E9}\u{1F1EB}-\u{1F1ED}\u{1F1EF}-\u{1F1F4}\u{1F1F7}\u{1F1F9}\u{1F1FB}\u{1F1FC}\u{1F1FF}]|\u{1F1F8}[\u{1F1E6}-\u{1F1EA}\u{1F1EC}-\u{1F1F4}\u{1F1F7}-\u{1F1F9}\u{1F1FB}\u{1F1FD}-\u{1F1FF}]|\u{1F1F7}[\u{1F1EA}\u{1F1F4}\u{1F1F8}\u{1F1FA}\u{1F1FC}]|\u{1F1F5}[\u{1F1E6}\u{1F1EA}-\u{1F1ED}\u{1F1F0}-\u{1F1F3}\u{1F1F7}-\u{1F1F9}\u{1F1FC}\u{1F1FE}]|\u{1F1F3}[\u{1F1E6}\u{1F1E8}\u{1F1EA}-\u{1F1EC}\u{1F1EE}\u{1F1F1}\u{1F1F4}\u{1F1F5}\u{1F1F7}\u{1F1FA}\u{1F1FF}]|\u{1F1F2}[\u{1F1E6}\u{1F1E8}-\u{1F1ED}\u{1F1F0}-\u{1F1FF}]|\u{1F1F1}[\u{1F1E6}-\u{1F1E8}\u{1F1EE}\u{1F1F0}\u{1F1F7}-\u{1F1FB}\u{1F1FE}]|\u{1F1F0}[\u{1F1EA}\u{1F1EC}-\u{1F1EE}\u{1F1F2}\u{1F1F3}\u{1F1F5}\u{1F1F7}\u{1F1FC}\u{1F1FE}\u{1F1FF}]|\u{1F1EF}[\u{1F1EA}\u{1F1F2}\u{1F1F4}\u{1F1F5}]|\u{1F1EE}[\u{1F1E8}-\u{1F1EA}\u{1F1F1}-\u{1F1F4}\u{1F1F6}-\u{1F1F9}]|\u{1F1ED}[\u{1F1F0}\u{1F1F2}\u{1F1F3}\u{1F1F7}\u{1F1F9}\u{1F1FA}]|\u{1F1EC}[\u{1F1E6}\u{1F1E7}\u{1F1E9}-\u{1F1EE}\u{1F1F1}-\u{1F1F3}\u{1F1F5}-\u{1F1FA}\u{1F1FC}\u{1F1FE}]|\u{1F1EB}[\u{1F1EE}-\u{1F1F0}\u{1F1F2}\u{1F1F4}\u{1F1F7}]|\u{1F1EA}[\u{1F1E6}\u{1F1E8}\u{1F1EA}\u{1F1EC}\u{1F1ED}\u{1F1F7}-\u{1F1FA}]|\u{1F1E9}[\u{1F1EA}\u{1F1EC}\u{1F1EF}\u{1F1F0}\u{1F1F2}\u{1F1F4}\u{1F1FF}]|\u{1F1E8}[\u{1F1E6}\u{1F1E8}\u{1F1E9}\u{1F1EB}-\u{1F1EE}\u{1F1F0}-\u{1F1F5}\u{1F1F7}\u{1F1FA}-\u{1F1FF}]|\u{1F1E7}[\u{1F1E6}\u{1F1E7}\u{1F1E9}-\u{1F1EF}\u{1F1F1}-\u{1F1F4}\u{1F1F6}-\u{1F1F9}\u{1F1FB}\u{1F1FC}\u{1F1FE}\u{1F1FF}]|\u{1F1E6}[\u{1F1E8}-\u{1F1EC}\u{1F1EE}\u{1F1F1}\u{1F1F2}\u{1F1F4}\u{1F1F6}-\u{1F1FA}\u{1F1FC}\u{1F1FD}\u{1F1FF}]|[#\*0-9]\uFE0F\u20E3|\u2764\uFE0F|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F470}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F935}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D4}\u{1F9D6}-\u{1F9DD}][\u{1F3FB}-\u{1F3FF}]|[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}][\uFE0F\u{1F3FB}-\u{1F3FF}]|\u{1F3F4}|[\u270A\u270B\u{1F385}\u{1F3C2}\u{1F3C7}\u{1F442}\u{1F443}\u{1F446}-\u{1F450}\u{1F466}\u{1F467}\u{1F46B}-\u{1F46D}\u{1F472}\u{1F474}-\u{1F476}\u{1F478}\u{1F47C}\u{1F483}\u{1F485}\u{1F48F}\u{1F491}\u{1F4AA}\u{1F57A}\u{1F595}\u{1F596}\u{1F64C}\u{1F64F}\u{1F6C0}\u{1F6CC}\u{1F90C}\u{1F90F}\u{1F918}-\u{1F91C}\u{1F91E}\u{1F91F}\u{1F930}-\u{1F934}\u{1F936}\u{1F977}\u{1F9B5}\u{1F9B6}\u{1F9BB}\u{1F9D2}\u{1F9D3}\u{1F9D5}][\u{1F3FB}-\u{1F3FF}]|[\u261D\u270C\u270D\u{1F574}\u{1F590}][\uFE0F\u{1F3FB}-\u{1F3FF}]|[\u270A\u270B\u{1F385}\u{1F3C2}\u{1F3C7}\u{1F408}\u{1F415}\u{1F43B}\u{1F442}\u{1F443}\u{1F446}-\u{1F450}\u{1F466}\u{1F467}\u{1F46B}-\u{1F46D}\u{1F472}\u{1F474}-\u{1F476}\u{1F478}\u{1F47C}\u{1F483}\u{1F485}\u{1F48F}\u{1F491}\u{1F4AA}\u{1F57A}\u{1F595}\u{1F596}\u{1F62E}\u{1F635}\u{1F636}\u{1F64C}\u{1F64F}\u{1F6C0}\u{1F6CC}\u{1F90C}\u{1F90F}\u{1F918}-\u{1F91C}\u{1F91E}\u{1F91F}\u{1F930}-\u{1F934}\u{1F936}\u{1F977}\u{1F9B5}\u{1F9B6}\u{1F9BB}\u{1F9D2}\u{1F9D3}\u{1F9D5}]|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F470}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F935}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D4}\u{1F9D6}-\u{1F9DD}]|[\u{1F46F}\u{1F93C}\u{1F9DE}\u{1F9DF}]|[\u231A\u231B\u23E9-\u23EC\u23F0\u23F3\u25FD\u25FE\u2614\u2615\u2648-\u2653\u267F\u2693\u26A1\u26AA\u26AB\u26BD\u26BE\u26C4\u26C5\u26CE\u26D4\u26EA\u26F2\u26F3\u26F5\u26FA\u26FD\u2705\u2728\u274C\u274E\u2753-\u2755\u2757\u2795-\u2797\u27B0\u27BF\u2B1B\u2B1C\u2B50\u2B55\u{1F004}\u{1F0CF}\u{1F18E}\u{1F191}-\u{1F19A}\u{1F201}\u{1F21A}\u{1F22F}\u{1F232}-\u{1F236}\u{1F238}-\u{1F23A}\u{1F250}\u{1F251}\u{1F300}-\u{1F320}\u{1F32D}-\u{1F335}\u{1F337}-\u{1F37C}\u{1F37E}-\u{1F384}\u{1F386}-\u{1F393}\u{1F3A0}-\u{1F3C1}\u{1F3C5}\u{1F3C6}\u{1F3C8}\u{1F3C9}\u{1F3CF}-\u{1F3D3}\u{1F3E0}-\u{1F3F0}\u{1F3F8}-\u{1F407}\u{1F409}-\u{1F414}\u{1F416}-\u{1F43A}\u{1F43C}-\u{1F43E}\u{1F440}\u{1F444}\u{1F445}\u{1F451}-\u{1F465}\u{1F46A}\u{1F479}-\u{1F47B}\u{1F47D}-\u{1F480}\u{1F484}\u{1F488}-\u{1F48E}\u{1F490}\u{1F492}-\u{1F4A9}\u{1F4AB}-\u{1F4FC}\u{1F4FF}-\u{1F53D}\u{1F54B}-\u{1F54E}\u{1F550}-\u{1F567}\u{1F5A4}\u{1F5FB}-\u{1F62D}\u{1F62F}-\u{1F634}\u{1F637}-\u{1F644}\u{1F648}-\u{1F64A}\u{1F680}-\u{1F6A2}\u{1F6A4}-\u{1F6B3}\u{1F6B7}-\u{1F6BF}\u{1F6C1}-\u{1F6C5}\u{1F6D0}-\u{1F6D2}\u{1F6D5}-\u{1F6D7}\u{1F6EB}\u{1F6EC}\u{1F6F4}-\u{1F6FC}\u{1F7E0}-\u{1F7EB}\u{1F90D}\u{1F90E}\u{1F910}-\u{1F917}\u{1F91D}\u{1F920}-\u{1F925}\u{1F927}-\u{1F92F}\u{1F93A}\u{1F93F}-\u{1F945}\u{1F947}-\u{1F976}\u{1F978}\u{1F97A}-\u{1F9B4}\u{1F9B7}\u{1F9BA}\u{1F9BC}-\u{1F9CB}\u{1F9D0}\u{1F9E0}-\u{1F9FF}\u{1FA70}-\u{1FA74}\u{1FA78}-\u{1FA7A}\u{1FA80}-\u{1FA86}\u{1FA90}-\u{1FAA8}\u{1FAB0}-\u{1FAB6}\u{1FAC0}-\u{1FAC2}\u{1FAD0}-\u{1FAD6}]|[#\*0-9\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u231A\u231B\u2328\u23CF\u23E9-\u23F3\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB-\u25FE\u2600-\u2604\u260E\u2611\u2614\u2615\u2618\u261D\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u2648-\u2653\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u267F\u2692-\u2697\u2699\u269B\u269C\u26A0\u26A1\u26A7\u26AA\u26AB\u26B0\u26B1\u26BD\u26BE\u26C4\u26C5\u26C8\u26CE\u26CF\u26D1\u26D3\u26D4\u26E9\u26EA\u26F0-\u26F5\u26F7-\u26FA\u26FD\u2702\u2705\u2708-\u270D\u270F\u2712\u2714\u2716\u271D\u2721\u2728\u2733\u2734\u2744\u2747\u274C\u274E\u2753-\u2755\u2757\u2763\u2764\u2795-\u2797\u27A1\u27B0\u27BF\u2934\u2935\u2B05-\u2B07\u2B1B\u2B1C\u2B50\u2B55\u3030\u303D\u3297\u3299\u{1F004}\u{1F0CF}\u{1F170}\u{1F171}\u{1F17E}\u{1F17F}\u{1F18E}\u{1F191}-\u{1F19A}\u{1F1E6}-\u{1F1FF}\u{1F201}\u{1F202}\u{1F21A}\u{1F22F}\u{1F232}-\u{1F23A}\u{1F250}\u{1F251}\u{1F300}-\u{1F321}\u{1F324}-\u{1F393}\u{1F396}\u{1F397}\u{1F399}-\u{1F39B}\u{1F39E}-\u{1F3F0}\u{1F3F3}-\u{1F3F5}\u{1F3F7}-\u{1F4FD}\u{1F4FF}-\u{1F53D}\u{1F549}-\u{1F54E}\u{1F550}-\u{1F567}\u{1F56F}\u{1F570}\u{1F573}-\u{1F57A}\u{1F587}\u{1F58A}-\u{1F58D}\u{1F590}\u{1F595}\u{1F596}\u{1F5A4}\u{1F5A5}\u{1F5A8}\u{1F5B1}\u{1F5B2}\u{1F5BC}\u{1F5C2}-\u{1F5C4}\u{1F5D1}-\u{1F5D3}\u{1F5DC}-\u{1F5DE}\u{1F5E1}\u{1F5E3}\u{1F5E8}\u{1F5EF}\u{1F5F3}\u{1F5FA}-\u{1F64F}\u{1F680}-\u{1F6C5}\u{1F6CB}-\u{1F6D2}\u{1F6D5}-\u{1F6D7}\u{1F6E0}-\u{1F6E5}\u{1F6E9}\u{1F6EB}\u{1F6EC}\u{1F6F0}\u{1F6F3}-\u{1F6FC}\u{1F7E0}-\u{1F7EB}\u{1F90C}-\u{1F93A}\u{1F93C}-\u{1F945}\u{1F947}-\u{1F978}\u{1F97A}-\u{1F9CB}\u{1F9CD}-\u{1F9FF}\u{1FA70}-\u{1FA74}\u{1FA78}-\u{1FA7A}\u{1FA80}-\u{1FA86}\u{1FA90}-\u{1FAA8}\u{1FAB0}-\u{1FAB6}\u{1FAC0}-\u{1FAC2}\u{1FAD0}-\u{1FAD6}]\uFE0F?/gu; +}; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/emoji-regex/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/emoji-regex/index.js new file mode 100644 index 0000000000000000000000000000000000000000..c0490d4c95ac303ae0554c07267a1f6059e10f0a --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/emoji-regex/index.js @@ -0,0 +1,6 @@ +"use strict"; + +module.exports = function () { + // https://mths.be/emoji + return /\uD83C\uDFF4\uDB40\uDC67\uDB40\uDC62(?:\uDB40\uDC77\uDB40\uDC6C\uDB40\uDC73|\uDB40\uDC73\uDB40\uDC63\uDB40\uDC74|\uDB40\uDC65\uDB40\uDC6E\uDB40\uDC67)\uDB40\uDC7F|(?:\uD83E\uDDD1\uD83C\uDFFF\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFF\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB-\uDFFE])|(?:\uD83E\uDDD1\uD83C\uDFFE\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFE\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB-\uDFFD\uDFFF])|(?:\uD83E\uDDD1\uD83C\uDFFD\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFD\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])|(?:\uD83E\uDDD1\uD83C\uDFFC\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFC\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB\uDFFD-\uDFFF])|(?:\uD83E\uDDD1\uD83C\uDFFB\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFB\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFC-\uDFFF])|\uD83D\uDC68(?:\uD83C\uDFFB(?:\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFF])|\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFF]))|\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFC-\uDFFF])|[\u2695\u2696\u2708]\uFE0F|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD]))?|(?:\uD83C[\uDFFC-\uDFFF])\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFF])|\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFF]))|\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83D\uDC68|(?:\uD83D[\uDC68\uDC69])\u200D(?:\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67]))|\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFE])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFE\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFD\uDFFF])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFC\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB\uDFFD-\uDFFF])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|(?:\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])\uFE0F|\u200D(?:(?:\uD83D[\uDC68\uDC69])\u200D(?:\uD83D[\uDC66\uDC67])|\uD83D[\uDC66\uDC67])|\uD83C\uDFFF|\uD83C\uDFFE|\uD83C\uDFFD|\uD83C\uDFFC)?|(?:\uD83D\uDC69(?:\uD83C\uDFFB\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D(?:\uD83D[\uDC68\uDC69])|\uD83D[\uDC68\uDC69])|(?:\uD83C[\uDFFC-\uDFFF])\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D(?:\uD83D[\uDC68\uDC69])|\uD83D[\uDC68\uDC69]))|\uD83E\uDDD1(?:\uD83C[\uDFFB-\uDFFF])\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1)(?:\uD83C[\uDFFB-\uDFFF])|\uD83D\uDC69\u200D\uD83D\uDC69\u200D(?:\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67]))|\uD83D\uDC69(?:\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D(?:\uD83D[\uDC68\uDC69])|\uD83D[\uDC68\uDC69])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFE\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFC\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFB\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD]))|\uD83E\uDDD1(?:\u200D(?:\uD83E\uDD1D\u200D\uD83E\uDDD1|\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFE\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFC\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFB\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD]))|\uD83D\uDC69\u200D\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC69\u200D\uD83D\uDC69\u200D(?:\uD83D[\uDC66\uDC67])|\uD83D\uDC69\u200D\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67])|(?:\uD83D\uDC41\uFE0F\u200D\uD83D\uDDE8|\uD83E\uDDD1(?:\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708]|\uD83C\uDFFB\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|\uD83D\uDC69(?:\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708]|\uD83C\uDFFB\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|\uD83D\uDE36\u200D\uD83C\uDF2B|\uD83C\uDFF3\uFE0F\u200D\u26A7|\uD83D\uDC3B\u200D\u2744|(?:(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC70\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD35\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD4\uDDD6-\uDDDD])(?:\uD83C[\uDFFB-\uDFFF])|\uD83D\uDC6F|\uD83E[\uDD3C\uDDDE\uDDDF])\u200D[\u2640\u2642]|(?:\u26F9|\uD83C[\uDFCB\uDFCC]|\uD83D\uDD75)(?:\uFE0F|\uD83C[\uDFFB-\uDFFF])\u200D[\u2640\u2642]|\uD83C\uDFF4\u200D\u2620|(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC70\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD35\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD4\uDDD6-\uDDDD])\u200D[\u2640\u2642]|[\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u2328\u23CF\u23ED-\u23EF\u23F1\u23F2\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB\u25FC\u2600-\u2604\u260E\u2611\u2618\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u2692\u2694-\u2697\u2699\u269B\u269C\u26A0\u26A7\u26B0\u26B1\u26C8\u26CF\u26D1\u26D3\u26E9\u26F0\u26F1\u26F4\u26F7\u26F8\u2702\u2708\u2709\u270F\u2712\u2714\u2716\u271D\u2721\u2733\u2734\u2744\u2747\u2763\u27A1\u2934\u2935\u2B05-\u2B07\u3030\u303D\u3297\u3299]|\uD83C[\uDD70\uDD71\uDD7E\uDD7F\uDE02\uDE37\uDF21\uDF24-\uDF2C\uDF36\uDF7D\uDF96\uDF97\uDF99-\uDF9B\uDF9E\uDF9F\uDFCD\uDFCE\uDFD4-\uDFDF\uDFF5\uDFF7]|\uD83D[\uDC3F\uDCFD\uDD49\uDD4A\uDD6F\uDD70\uDD73\uDD76-\uDD79\uDD87\uDD8A-\uDD8D\uDDA5\uDDA8\uDDB1\uDDB2\uDDBC\uDDC2-\uDDC4\uDDD1-\uDDD3\uDDDC-\uDDDE\uDDE1\uDDE3\uDDE8\uDDEF\uDDF3\uDDFA\uDECB\uDECD-\uDECF\uDEE0-\uDEE5\uDEE9\uDEF0\uDEF3])\uFE0F|\uD83C\uDFF3\uFE0F\u200D\uD83C\uDF08|\uD83D\uDC69\u200D\uD83D\uDC67|\uD83D\uDC69\u200D\uD83D\uDC66|\uD83D\uDE35\u200D\uD83D\uDCAB|\uD83D\uDE2E\u200D\uD83D\uDCA8|\uD83D\uDC15\u200D\uD83E\uDDBA|\uD83E\uDDD1(?:\uD83C\uDFFF|\uD83C\uDFFE|\uD83C\uDFFD|\uD83C\uDFFC|\uD83C\uDFFB)?|\uD83D\uDC69(?:\uD83C\uDFFF|\uD83C\uDFFE|\uD83C\uDFFD|\uD83C\uDFFC|\uD83C\uDFFB)?|\uD83C\uDDFD\uD83C\uDDF0|\uD83C\uDDF6\uD83C\uDDE6|\uD83C\uDDF4\uD83C\uDDF2|\uD83D\uDC08\u200D\u2B1B|\u2764\uFE0F\u200D(?:\uD83D\uDD25|\uD83E\uDE79)|\uD83D\uDC41\uFE0F|\uD83C\uDFF3\uFE0F|\uD83C\uDDFF(?:\uD83C[\uDDE6\uDDF2\uDDFC])|\uD83C\uDDFE(?:\uD83C[\uDDEA\uDDF9])|\uD83C\uDDFC(?:\uD83C[\uDDEB\uDDF8])|\uD83C\uDDFB(?:\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDEE\uDDF3\uDDFA])|\uD83C\uDDFA(?:\uD83C[\uDDE6\uDDEC\uDDF2\uDDF3\uDDF8\uDDFE\uDDFF])|\uD83C\uDDF9(?:\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDED\uDDEF-\uDDF4\uDDF7\uDDF9\uDDFB\uDDFC\uDDFF])|\uD83C\uDDF8(?:\uD83C[\uDDE6-\uDDEA\uDDEC-\uDDF4\uDDF7-\uDDF9\uDDFB\uDDFD-\uDDFF])|\uD83C\uDDF7(?:\uD83C[\uDDEA\uDDF4\uDDF8\uDDFA\uDDFC])|\uD83C\uDDF5(?:\uD83C[\uDDE6\uDDEA-\uDDED\uDDF0-\uDDF3\uDDF7-\uDDF9\uDDFC\uDDFE])|\uD83C\uDDF3(?:\uD83C[\uDDE6\uDDE8\uDDEA-\uDDEC\uDDEE\uDDF1\uDDF4\uDDF5\uDDF7\uDDFA\uDDFF])|\uD83C\uDDF2(?:\uD83C[\uDDE6\uDDE8-\uDDED\uDDF0-\uDDFF])|\uD83C\uDDF1(?:\uD83C[\uDDE6-\uDDE8\uDDEE\uDDF0\uDDF7-\uDDFB\uDDFE])|\uD83C\uDDF0(?:\uD83C[\uDDEA\uDDEC-\uDDEE\uDDF2\uDDF3\uDDF5\uDDF7\uDDFC\uDDFE\uDDFF])|\uD83C\uDDEF(?:\uD83C[\uDDEA\uDDF2\uDDF4\uDDF5])|\uD83C\uDDEE(?:\uD83C[\uDDE8-\uDDEA\uDDF1-\uDDF4\uDDF6-\uDDF9])|\uD83C\uDDED(?:\uD83C[\uDDF0\uDDF2\uDDF3\uDDF7\uDDF9\uDDFA])|\uD83C\uDDEC(?:\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEE\uDDF1-\uDDF3\uDDF5-\uDDFA\uDDFC\uDDFE])|\uD83C\uDDEB(?:\uD83C[\uDDEE-\uDDF0\uDDF2\uDDF4\uDDF7])|\uD83C\uDDEA(?:\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDED\uDDF7-\uDDFA])|\uD83C\uDDE9(?:\uD83C[\uDDEA\uDDEC\uDDEF\uDDF0\uDDF2\uDDF4\uDDFF])|\uD83C\uDDE8(?:\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDEE\uDDF0-\uDDF5\uDDF7\uDDFA-\uDDFF])|\uD83C\uDDE7(?:\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEF\uDDF1-\uDDF4\uDDF6-\uDDF9\uDDFB\uDDFC\uDDFE\uDDFF])|\uD83C\uDDE6(?:\uD83C[\uDDE8-\uDDEC\uDDEE\uDDF1\uDDF2\uDDF4\uDDF6-\uDDFA\uDDFC\uDDFD\uDDFF])|[#\*0-9]\uFE0F\u20E3|\u2764\uFE0F|(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC70\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD35\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD4\uDDD6-\uDDDD])(?:\uD83C[\uDFFB-\uDFFF])|(?:\u26F9|\uD83C[\uDFCB\uDFCC]|\uD83D\uDD75)(?:\uFE0F|\uD83C[\uDFFB-\uDFFF])|\uD83C\uDFF4|(?:[\u270A\u270B]|\uD83C[\uDF85\uDFC2\uDFC7]|\uD83D[\uDC42\uDC43\uDC46-\uDC50\uDC66\uDC67\uDC6B-\uDC6D\uDC72\uDC74-\uDC76\uDC78\uDC7C\uDC83\uDC85\uDC8F\uDC91\uDCAA\uDD7A\uDD95\uDD96\uDE4C\uDE4F\uDEC0\uDECC]|\uD83E[\uDD0C\uDD0F\uDD18-\uDD1C\uDD1E\uDD1F\uDD30-\uDD34\uDD36\uDD77\uDDB5\uDDB6\uDDBB\uDDD2\uDDD3\uDDD5])(?:\uD83C[\uDFFB-\uDFFF])|(?:[\u261D\u270C\u270D]|\uD83D[\uDD74\uDD90])(?:\uFE0F|\uD83C[\uDFFB-\uDFFF])|[\u270A\u270B]|\uD83C[\uDF85\uDFC2\uDFC7]|\uD83D[\uDC08\uDC15\uDC3B\uDC42\uDC43\uDC46-\uDC50\uDC66\uDC67\uDC6B-\uDC6D\uDC72\uDC74-\uDC76\uDC78\uDC7C\uDC83\uDC85\uDC8F\uDC91\uDCAA\uDD7A\uDD95\uDD96\uDE2E\uDE35\uDE36\uDE4C\uDE4F\uDEC0\uDECC]|\uD83E[\uDD0C\uDD0F\uDD18-\uDD1C\uDD1E\uDD1F\uDD30-\uDD34\uDD36\uDD77\uDDB5\uDDB6\uDDBB\uDDD2\uDDD3\uDDD5]|\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC70\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD35\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD4\uDDD6-\uDDDD]|\uD83D\uDC6F|\uD83E[\uDD3C\uDDDE\uDDDF]|[\u231A\u231B\u23E9-\u23EC\u23F0\u23F3\u25FD\u25FE\u2614\u2615\u2648-\u2653\u267F\u2693\u26A1\u26AA\u26AB\u26BD\u26BE\u26C4\u26C5\u26CE\u26D4\u26EA\u26F2\u26F3\u26F5\u26FA\u26FD\u2705\u2728\u274C\u274E\u2753-\u2755\u2757\u2795-\u2797\u27B0\u27BF\u2B1B\u2B1C\u2B50\u2B55]|\uD83C[\uDC04\uDCCF\uDD8E\uDD91-\uDD9A\uDE01\uDE1A\uDE2F\uDE32-\uDE36\uDE38-\uDE3A\uDE50\uDE51\uDF00-\uDF20\uDF2D-\uDF35\uDF37-\uDF7C\uDF7E-\uDF84\uDF86-\uDF93\uDFA0-\uDFC1\uDFC5\uDFC6\uDFC8\uDFC9\uDFCF-\uDFD3\uDFE0-\uDFF0\uDFF8-\uDFFF]|\uD83D[\uDC00-\uDC07\uDC09-\uDC14\uDC16-\uDC3A\uDC3C-\uDC3E\uDC40\uDC44\uDC45\uDC51-\uDC65\uDC6A\uDC79-\uDC7B\uDC7D-\uDC80\uDC84\uDC88-\uDC8E\uDC90\uDC92-\uDCA9\uDCAB-\uDCFC\uDCFF-\uDD3D\uDD4B-\uDD4E\uDD50-\uDD67\uDDA4\uDDFB-\uDE2D\uDE2F-\uDE34\uDE37-\uDE44\uDE48-\uDE4A\uDE80-\uDEA2\uDEA4-\uDEB3\uDEB7-\uDEBF\uDEC1-\uDEC5\uDED0-\uDED2\uDED5-\uDED7\uDEEB\uDEEC\uDEF4-\uDEFC\uDFE0-\uDFEB]|\uD83E[\uDD0D\uDD0E\uDD10-\uDD17\uDD1D\uDD20-\uDD25\uDD27-\uDD2F\uDD3A\uDD3F-\uDD45\uDD47-\uDD76\uDD78\uDD7A-\uDDB4\uDDB7\uDDBA\uDDBC-\uDDCB\uDDD0\uDDE0-\uDDFF\uDE70-\uDE74\uDE78-\uDE7A\uDE80-\uDE86\uDE90-\uDEA8\uDEB0-\uDEB6\uDEC0-\uDEC2\uDED0-\uDED6]|(?:[\u231A\u231B\u23E9-\u23EC\u23F0\u23F3\u25FD\u25FE\u2614\u2615\u2648-\u2653\u267F\u2693\u26A1\u26AA\u26AB\u26BD\u26BE\u26C4\u26C5\u26CE\u26D4\u26EA\u26F2\u26F3\u26F5\u26FA\u26FD\u2705\u270A\u270B\u2728\u274C\u274E\u2753-\u2755\u2757\u2795-\u2797\u27B0\u27BF\u2B1B\u2B1C\u2B50\u2B55]|\uD83C[\uDC04\uDCCF\uDD8E\uDD91-\uDD9A\uDDE6-\uDDFF\uDE01\uDE1A\uDE2F\uDE32-\uDE36\uDE38-\uDE3A\uDE50\uDE51\uDF00-\uDF20\uDF2D-\uDF35\uDF37-\uDF7C\uDF7E-\uDF93\uDFA0-\uDFCA\uDFCF-\uDFD3\uDFE0-\uDFF0\uDFF4\uDFF8-\uDFFF]|\uD83D[\uDC00-\uDC3E\uDC40\uDC42-\uDCFC\uDCFF-\uDD3D\uDD4B-\uDD4E\uDD50-\uDD67\uDD7A\uDD95\uDD96\uDDA4\uDDFB-\uDE4F\uDE80-\uDEC5\uDECC\uDED0-\uDED2\uDED5-\uDED7\uDEEB\uDEEC\uDEF4-\uDEFC\uDFE0-\uDFEB]|\uD83E[\uDD0C-\uDD3A\uDD3C-\uDD45\uDD47-\uDD78\uDD7A-\uDDCB\uDDCD-\uDDFF\uDE70-\uDE74\uDE78-\uDE7A\uDE80-\uDE86\uDE90-\uDEA8\uDEB0-\uDEB6\uDEC0-\uDEC2\uDED0-\uDED6])|(?:[#\*0-9\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u231A\u231B\u2328\u23CF\u23E9-\u23F3\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB-\u25FE\u2600-\u2604\u260E\u2611\u2614\u2615\u2618\u261D\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u2648-\u2653\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u267F\u2692-\u2697\u2699\u269B\u269C\u26A0\u26A1\u26A7\u26AA\u26AB\u26B0\u26B1\u26BD\u26BE\u26C4\u26C5\u26C8\u26CE\u26CF\u26D1\u26D3\u26D4\u26E9\u26EA\u26F0-\u26F5\u26F7-\u26FA\u26FD\u2702\u2705\u2708-\u270D\u270F\u2712\u2714\u2716\u271D\u2721\u2728\u2733\u2734\u2744\u2747\u274C\u274E\u2753-\u2755\u2757\u2763\u2764\u2795-\u2797\u27A1\u27B0\u27BF\u2934\u2935\u2B05-\u2B07\u2B1B\u2B1C\u2B50\u2B55\u3030\u303D\u3297\u3299]|\uD83C[\uDC04\uDCCF\uDD70\uDD71\uDD7E\uDD7F\uDD8E\uDD91-\uDD9A\uDDE6-\uDDFF\uDE01\uDE02\uDE1A\uDE2F\uDE32-\uDE3A\uDE50\uDE51\uDF00-\uDF21\uDF24-\uDF93\uDF96\uDF97\uDF99-\uDF9B\uDF9E-\uDFF0\uDFF3-\uDFF5\uDFF7-\uDFFF]|\uD83D[\uDC00-\uDCFD\uDCFF-\uDD3D\uDD49-\uDD4E\uDD50-\uDD67\uDD6F\uDD70\uDD73-\uDD7A\uDD87\uDD8A-\uDD8D\uDD90\uDD95\uDD96\uDDA4\uDDA5\uDDA8\uDDB1\uDDB2\uDDBC\uDDC2-\uDDC4\uDDD1-\uDDD3\uDDDC-\uDDDE\uDDE1\uDDE3\uDDE8\uDDEF\uDDF3\uDDFA-\uDE4F\uDE80-\uDEC5\uDECB-\uDED2\uDED5-\uDED7\uDEE0-\uDEE5\uDEE9\uDEEB\uDEEC\uDEF0\uDEF3-\uDEFC\uDFE0-\uDFEB]|\uD83E[\uDD0C-\uDD3A\uDD3C-\uDD45\uDD47-\uDD78\uDD7A-\uDDCB\uDDCD-\uDDFF\uDE70-\uDE74\uDE78-\uDE7A\uDE80-\uDE86\uDE90-\uDEA8\uDEB0-\uDEB6\uDEC0-\uDEC2\uDED0-\uDED6])\uFE0F|(?:[\u261D\u26F9\u270A-\u270D]|\uD83C[\uDF85\uDFC2-\uDFC4\uDFC7\uDFCA-\uDFCC]|\uD83D[\uDC42\uDC43\uDC46-\uDC50\uDC66-\uDC78\uDC7C\uDC81-\uDC83\uDC85-\uDC87\uDC8F\uDC91\uDCAA\uDD74\uDD75\uDD7A\uDD90\uDD95\uDD96\uDE45-\uDE47\uDE4B-\uDE4F\uDEA3\uDEB4-\uDEB6\uDEC0\uDECC]|\uD83E[\uDD0C\uDD0F\uDD18-\uDD1F\uDD26\uDD30-\uDD39\uDD3C-\uDD3E\uDD77\uDDB5\uDDB6\uDDB8\uDDB9\uDDBB\uDDCD-\uDDCF\uDDD1-\uDDDD])/g; +}; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/emoji-regex/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/emoji-regex/package.json new file mode 100644 index 0000000000000000000000000000000000000000..eac892a16a253be133fbfe7c176840e8e75e9bb4 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/emoji-regex/package.json @@ -0,0 +1,52 @@ +{ + "name": "emoji-regex", + "version": "9.2.2", + "description": "A regular expression to match all Emoji-only symbols as per the Unicode Standard.", + "homepage": "https://mths.be/emoji-regex", + "main": "index.js", + "types": "index.d.ts", + "keywords": [ + "unicode", + "regex", + "regexp", + "regular expressions", + "code points", + "symbols", + "characters", + "emoji" + ], + "license": "MIT", + "author": { + "name": "Mathias Bynens", + "url": "https://mathiasbynens.be/" + }, + "repository": { + "type": "git", + "url": "https://github.com/mathiasbynens/emoji-regex.git" + }, + "bugs": "https://github.com/mathiasbynens/emoji-regex/issues", + "files": [ + "LICENSE-MIT.txt", + "index.js", + "index.d.ts", + "RGI_Emoji.js", + "RGI_Emoji.d.ts", + "text.js", + "text.d.ts", + "es2015" + ], + "scripts": { + "build": "rm -rf -- es2015; babel src -d .; NODE_ENV=es2015 babel src es2015_types -D -d ./es2015; node script/inject-sequences.js", + "test": "mocha", + "test:watch": "npm run test -- --watch" + }, + "devDependencies": { + "@babel/cli": "^7.4.4", + "@babel/core": "^7.4.4", + "@babel/plugin-proposal-unicode-property-regex": "^7.4.4", + "@babel/preset-env": "^7.4.4", + "@unicode/unicode-13.0.0": "^1.0.3", + "mocha": "^6.1.4", + "regexgen": "^1.3.0" + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/emoji-regex/text.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/emoji-regex/text.js new file mode 100644 index 0000000000000000000000000000000000000000..9bc63ce74753f9a436a995148f53382df77f956e --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/emoji-regex/text.js @@ -0,0 +1,6 @@ +"use strict"; + +module.exports = function () { + // https://mths.be/emoji + return /\uD83C\uDFF4\uDB40\uDC67\uDB40\uDC62(?:\uDB40\uDC77\uDB40\uDC6C\uDB40\uDC73|\uDB40\uDC73\uDB40\uDC63\uDB40\uDC74|\uDB40\uDC65\uDB40\uDC6E\uDB40\uDC67)\uDB40\uDC7F|(?:\uD83E\uDDD1\uD83C\uDFFF\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFF\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB-\uDFFE])|(?:\uD83E\uDDD1\uD83C\uDFFE\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFE\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB-\uDFFD\uDFFF])|(?:\uD83E\uDDD1\uD83C\uDFFD\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFD\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])|(?:\uD83E\uDDD1\uD83C\uDFFC\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFC\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB\uDFFD-\uDFFF])|(?:\uD83E\uDDD1\uD83C\uDFFB\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFB\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFC-\uDFFF])|\uD83D\uDC68(?:\uD83C\uDFFB(?:\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFF])|\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFF]))|\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFC-\uDFFF])|[\u2695\u2696\u2708]\uFE0F|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD]))?|(?:\uD83C[\uDFFC-\uDFFF])\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFF])|\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFF]))|\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83D\uDC68|(?:\uD83D[\uDC68\uDC69])\u200D(?:\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67]))|\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFE])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFE\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFD\uDFFF])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFC\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB\uDFFD-\uDFFF])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|(?:\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])\uFE0F|\u200D(?:(?:\uD83D[\uDC68\uDC69])\u200D(?:\uD83D[\uDC66\uDC67])|\uD83D[\uDC66\uDC67])|\uD83C\uDFFF|\uD83C\uDFFE|\uD83C\uDFFD|\uD83C\uDFFC)?|(?:\uD83D\uDC69(?:\uD83C\uDFFB\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D(?:\uD83D[\uDC68\uDC69])|\uD83D[\uDC68\uDC69])|(?:\uD83C[\uDFFC-\uDFFF])\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D(?:\uD83D[\uDC68\uDC69])|\uD83D[\uDC68\uDC69]))|\uD83E\uDDD1(?:\uD83C[\uDFFB-\uDFFF])\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1)(?:\uD83C[\uDFFB-\uDFFF])|\uD83D\uDC69\u200D\uD83D\uDC69\u200D(?:\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67]))|\uD83D\uDC69(?:\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D(?:\uD83D[\uDC68\uDC69])|\uD83D[\uDC68\uDC69])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFE\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFC\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFB\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD]))|\uD83E\uDDD1(?:\u200D(?:\uD83E\uDD1D\u200D\uD83E\uDDD1|\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFE\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFC\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFB\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD]))|\uD83D\uDC69\u200D\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC69\u200D\uD83D\uDC69\u200D(?:\uD83D[\uDC66\uDC67])|\uD83D\uDC69\u200D\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67])|(?:\uD83D\uDC41\uFE0F\u200D\uD83D\uDDE8|\uD83E\uDDD1(?:\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708]|\uD83C\uDFFB\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|\uD83D\uDC69(?:\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708]|\uD83C\uDFFB\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|\uD83D\uDE36\u200D\uD83C\uDF2B|\uD83C\uDFF3\uFE0F\u200D\u26A7|\uD83D\uDC3B\u200D\u2744|(?:(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC70\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD35\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD4\uDDD6-\uDDDD])(?:\uD83C[\uDFFB-\uDFFF])|\uD83D\uDC6F|\uD83E[\uDD3C\uDDDE\uDDDF])\u200D[\u2640\u2642]|(?:\u26F9|\uD83C[\uDFCB\uDFCC]|\uD83D\uDD75)(?:\uFE0F|\uD83C[\uDFFB-\uDFFF])\u200D[\u2640\u2642]|\uD83C\uDFF4\u200D\u2620|(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC70\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD35\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD4\uDDD6-\uDDDD])\u200D[\u2640\u2642]|[\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u2328\u23CF\u23ED-\u23EF\u23F1\u23F2\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB\u25FC\u2600-\u2604\u260E\u2611\u2618\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u2692\u2694-\u2697\u2699\u269B\u269C\u26A0\u26A7\u26B0\u26B1\u26C8\u26CF\u26D1\u26D3\u26E9\u26F0\u26F1\u26F4\u26F7\u26F8\u2702\u2708\u2709\u270F\u2712\u2714\u2716\u271D\u2721\u2733\u2734\u2744\u2747\u2763\u27A1\u2934\u2935\u2B05-\u2B07\u3030\u303D\u3297\u3299]|\uD83C[\uDD70\uDD71\uDD7E\uDD7F\uDE02\uDE37\uDF21\uDF24-\uDF2C\uDF36\uDF7D\uDF96\uDF97\uDF99-\uDF9B\uDF9E\uDF9F\uDFCD\uDFCE\uDFD4-\uDFDF\uDFF5\uDFF7]|\uD83D[\uDC3F\uDCFD\uDD49\uDD4A\uDD6F\uDD70\uDD73\uDD76-\uDD79\uDD87\uDD8A-\uDD8D\uDDA5\uDDA8\uDDB1\uDDB2\uDDBC\uDDC2-\uDDC4\uDDD1-\uDDD3\uDDDC-\uDDDE\uDDE1\uDDE3\uDDE8\uDDEF\uDDF3\uDDFA\uDECB\uDECD-\uDECF\uDEE0-\uDEE5\uDEE9\uDEF0\uDEF3])\uFE0F|\uD83C\uDFF3\uFE0F\u200D\uD83C\uDF08|\uD83D\uDC69\u200D\uD83D\uDC67|\uD83D\uDC69\u200D\uD83D\uDC66|\uD83D\uDE35\u200D\uD83D\uDCAB|\uD83D\uDE2E\u200D\uD83D\uDCA8|\uD83D\uDC15\u200D\uD83E\uDDBA|\uD83E\uDDD1(?:\uD83C\uDFFF|\uD83C\uDFFE|\uD83C\uDFFD|\uD83C\uDFFC|\uD83C\uDFFB)?|\uD83D\uDC69(?:\uD83C\uDFFF|\uD83C\uDFFE|\uD83C\uDFFD|\uD83C\uDFFC|\uD83C\uDFFB)?|\uD83C\uDDFD\uD83C\uDDF0|\uD83C\uDDF6\uD83C\uDDE6|\uD83C\uDDF4\uD83C\uDDF2|\uD83D\uDC08\u200D\u2B1B|\u2764\uFE0F\u200D(?:\uD83D\uDD25|\uD83E\uDE79)|\uD83D\uDC41\uFE0F|\uD83C\uDFF3\uFE0F|\uD83C\uDDFF(?:\uD83C[\uDDE6\uDDF2\uDDFC])|\uD83C\uDDFE(?:\uD83C[\uDDEA\uDDF9])|\uD83C\uDDFC(?:\uD83C[\uDDEB\uDDF8])|\uD83C\uDDFB(?:\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDEE\uDDF3\uDDFA])|\uD83C\uDDFA(?:\uD83C[\uDDE6\uDDEC\uDDF2\uDDF3\uDDF8\uDDFE\uDDFF])|\uD83C\uDDF9(?:\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDED\uDDEF-\uDDF4\uDDF7\uDDF9\uDDFB\uDDFC\uDDFF])|\uD83C\uDDF8(?:\uD83C[\uDDE6-\uDDEA\uDDEC-\uDDF4\uDDF7-\uDDF9\uDDFB\uDDFD-\uDDFF])|\uD83C\uDDF7(?:\uD83C[\uDDEA\uDDF4\uDDF8\uDDFA\uDDFC])|\uD83C\uDDF5(?:\uD83C[\uDDE6\uDDEA-\uDDED\uDDF0-\uDDF3\uDDF7-\uDDF9\uDDFC\uDDFE])|\uD83C\uDDF3(?:\uD83C[\uDDE6\uDDE8\uDDEA-\uDDEC\uDDEE\uDDF1\uDDF4\uDDF5\uDDF7\uDDFA\uDDFF])|\uD83C\uDDF2(?:\uD83C[\uDDE6\uDDE8-\uDDED\uDDF0-\uDDFF])|\uD83C\uDDF1(?:\uD83C[\uDDE6-\uDDE8\uDDEE\uDDF0\uDDF7-\uDDFB\uDDFE])|\uD83C\uDDF0(?:\uD83C[\uDDEA\uDDEC-\uDDEE\uDDF2\uDDF3\uDDF5\uDDF7\uDDFC\uDDFE\uDDFF])|\uD83C\uDDEF(?:\uD83C[\uDDEA\uDDF2\uDDF4\uDDF5])|\uD83C\uDDEE(?:\uD83C[\uDDE8-\uDDEA\uDDF1-\uDDF4\uDDF6-\uDDF9])|\uD83C\uDDED(?:\uD83C[\uDDF0\uDDF2\uDDF3\uDDF7\uDDF9\uDDFA])|\uD83C\uDDEC(?:\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEE\uDDF1-\uDDF3\uDDF5-\uDDFA\uDDFC\uDDFE])|\uD83C\uDDEB(?:\uD83C[\uDDEE-\uDDF0\uDDF2\uDDF4\uDDF7])|\uD83C\uDDEA(?:\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDED\uDDF7-\uDDFA])|\uD83C\uDDE9(?:\uD83C[\uDDEA\uDDEC\uDDEF\uDDF0\uDDF2\uDDF4\uDDFF])|\uD83C\uDDE8(?:\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDEE\uDDF0-\uDDF5\uDDF7\uDDFA-\uDDFF])|\uD83C\uDDE7(?:\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEF\uDDF1-\uDDF4\uDDF6-\uDDF9\uDDFB\uDDFC\uDDFE\uDDFF])|\uD83C\uDDE6(?:\uD83C[\uDDE8-\uDDEC\uDDEE\uDDF1\uDDF2\uDDF4\uDDF6-\uDDFA\uDDFC\uDDFD\uDDFF])|[#\*0-9]\uFE0F\u20E3|\u2764\uFE0F|(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC70\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD35\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD4\uDDD6-\uDDDD])(?:\uD83C[\uDFFB-\uDFFF])|(?:\u26F9|\uD83C[\uDFCB\uDFCC]|\uD83D\uDD75)(?:\uFE0F|\uD83C[\uDFFB-\uDFFF])|\uD83C\uDFF4|(?:[\u270A\u270B]|\uD83C[\uDF85\uDFC2\uDFC7]|\uD83D[\uDC42\uDC43\uDC46-\uDC50\uDC66\uDC67\uDC6B-\uDC6D\uDC72\uDC74-\uDC76\uDC78\uDC7C\uDC83\uDC85\uDC8F\uDC91\uDCAA\uDD7A\uDD95\uDD96\uDE4C\uDE4F\uDEC0\uDECC]|\uD83E[\uDD0C\uDD0F\uDD18-\uDD1C\uDD1E\uDD1F\uDD30-\uDD34\uDD36\uDD77\uDDB5\uDDB6\uDDBB\uDDD2\uDDD3\uDDD5])(?:\uD83C[\uDFFB-\uDFFF])|(?:[\u261D\u270C\u270D]|\uD83D[\uDD74\uDD90])(?:\uFE0F|\uD83C[\uDFFB-\uDFFF])|[\u270A\u270B]|\uD83C[\uDF85\uDFC2\uDFC7]|\uD83D[\uDC08\uDC15\uDC3B\uDC42\uDC43\uDC46-\uDC50\uDC66\uDC67\uDC6B-\uDC6D\uDC72\uDC74-\uDC76\uDC78\uDC7C\uDC83\uDC85\uDC8F\uDC91\uDCAA\uDD7A\uDD95\uDD96\uDE2E\uDE35\uDE36\uDE4C\uDE4F\uDEC0\uDECC]|\uD83E[\uDD0C\uDD0F\uDD18-\uDD1C\uDD1E\uDD1F\uDD30-\uDD34\uDD36\uDD77\uDDB5\uDDB6\uDDBB\uDDD2\uDDD3\uDDD5]|\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC70\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD35\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD4\uDDD6-\uDDDD]|\uD83D\uDC6F|\uD83E[\uDD3C\uDDDE\uDDDF]|[\u231A\u231B\u23E9-\u23EC\u23F0\u23F3\u25FD\u25FE\u2614\u2615\u2648-\u2653\u267F\u2693\u26A1\u26AA\u26AB\u26BD\u26BE\u26C4\u26C5\u26CE\u26D4\u26EA\u26F2\u26F3\u26F5\u26FA\u26FD\u2705\u2728\u274C\u274E\u2753-\u2755\u2757\u2795-\u2797\u27B0\u27BF\u2B1B\u2B1C\u2B50\u2B55]|\uD83C[\uDC04\uDCCF\uDD8E\uDD91-\uDD9A\uDE01\uDE1A\uDE2F\uDE32-\uDE36\uDE38-\uDE3A\uDE50\uDE51\uDF00-\uDF20\uDF2D-\uDF35\uDF37-\uDF7C\uDF7E-\uDF84\uDF86-\uDF93\uDFA0-\uDFC1\uDFC5\uDFC6\uDFC8\uDFC9\uDFCF-\uDFD3\uDFE0-\uDFF0\uDFF8-\uDFFF]|\uD83D[\uDC00-\uDC07\uDC09-\uDC14\uDC16-\uDC3A\uDC3C-\uDC3E\uDC40\uDC44\uDC45\uDC51-\uDC65\uDC6A\uDC79-\uDC7B\uDC7D-\uDC80\uDC84\uDC88-\uDC8E\uDC90\uDC92-\uDCA9\uDCAB-\uDCFC\uDCFF-\uDD3D\uDD4B-\uDD4E\uDD50-\uDD67\uDDA4\uDDFB-\uDE2D\uDE2F-\uDE34\uDE37-\uDE44\uDE48-\uDE4A\uDE80-\uDEA2\uDEA4-\uDEB3\uDEB7-\uDEBF\uDEC1-\uDEC5\uDED0-\uDED2\uDED5-\uDED7\uDEEB\uDEEC\uDEF4-\uDEFC\uDFE0-\uDFEB]|\uD83E[\uDD0D\uDD0E\uDD10-\uDD17\uDD1D\uDD20-\uDD25\uDD27-\uDD2F\uDD3A\uDD3F-\uDD45\uDD47-\uDD76\uDD78\uDD7A-\uDDB4\uDDB7\uDDBA\uDDBC-\uDDCB\uDDD0\uDDE0-\uDDFF\uDE70-\uDE74\uDE78-\uDE7A\uDE80-\uDE86\uDE90-\uDEA8\uDEB0-\uDEB6\uDEC0-\uDEC2\uDED0-\uDED6]|(?:[#\*0-9\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u231A\u231B\u2328\u23CF\u23E9-\u23F3\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB-\u25FE\u2600-\u2604\u260E\u2611\u2614\u2615\u2618\u261D\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u2648-\u2653\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u267F\u2692-\u2697\u2699\u269B\u269C\u26A0\u26A1\u26A7\u26AA\u26AB\u26B0\u26B1\u26BD\u26BE\u26C4\u26C5\u26C8\u26CE\u26CF\u26D1\u26D3\u26D4\u26E9\u26EA\u26F0-\u26F5\u26F7-\u26FA\u26FD\u2702\u2705\u2708-\u270D\u270F\u2712\u2714\u2716\u271D\u2721\u2728\u2733\u2734\u2744\u2747\u274C\u274E\u2753-\u2755\u2757\u2763\u2764\u2795-\u2797\u27A1\u27B0\u27BF\u2934\u2935\u2B05-\u2B07\u2B1B\u2B1C\u2B50\u2B55\u3030\u303D\u3297\u3299]|\uD83C[\uDC04\uDCCF\uDD70\uDD71\uDD7E\uDD7F\uDD8E\uDD91-\uDD9A\uDDE6-\uDDFF\uDE01\uDE02\uDE1A\uDE2F\uDE32-\uDE3A\uDE50\uDE51\uDF00-\uDF21\uDF24-\uDF93\uDF96\uDF97\uDF99-\uDF9B\uDF9E-\uDFF0\uDFF3-\uDFF5\uDFF7-\uDFFF]|\uD83D[\uDC00-\uDCFD\uDCFF-\uDD3D\uDD49-\uDD4E\uDD50-\uDD67\uDD6F\uDD70\uDD73-\uDD7A\uDD87\uDD8A-\uDD8D\uDD90\uDD95\uDD96\uDDA4\uDDA5\uDDA8\uDDB1\uDDB2\uDDBC\uDDC2-\uDDC4\uDDD1-\uDDD3\uDDDC-\uDDDE\uDDE1\uDDE3\uDDE8\uDDEF\uDDF3\uDDFA-\uDE4F\uDE80-\uDEC5\uDECB-\uDED2\uDED5-\uDED7\uDEE0-\uDEE5\uDEE9\uDEEB\uDEEC\uDEF0\uDEF3-\uDEFC\uDFE0-\uDFEB]|\uD83E[\uDD0C-\uDD3A\uDD3C-\uDD45\uDD47-\uDD78\uDD7A-\uDDCB\uDDCD-\uDDFF\uDE70-\uDE74\uDE78-\uDE7A\uDE80-\uDE86\uDE90-\uDEA8\uDEB0-\uDEB6\uDEC0-\uDEC2\uDED0-\uDED6])\uFE0F?/g; +}; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/string-width/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/string-width/index.js new file mode 100644 index 0000000000000000000000000000000000000000..9294488f8848827dd983a82f0c3f5f0a04ec7570 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/string-width/index.js @@ -0,0 +1,54 @@ +import stripAnsi from 'strip-ansi'; +import eastAsianWidth from 'eastasianwidth'; +import emojiRegex from 'emoji-regex'; + +export default function stringWidth(string, options = {}) { + if (typeof string !== 'string' || string.length === 0) { + return 0; + } + + options = { + ambiguousIsNarrow: true, + ...options + }; + + string = stripAnsi(string); + + if (string.length === 0) { + return 0; + } + + string = string.replace(emojiRegex(), ' '); + + const ambiguousCharacterWidth = options.ambiguousIsNarrow ? 1 : 2; + let width = 0; + + for (const character of string) { + const codePoint = character.codePointAt(0); + + // Ignore control characters + if (codePoint <= 0x1F || (codePoint >= 0x7F && codePoint <= 0x9F)) { + continue; + } + + // Ignore combining characters + if (codePoint >= 0x300 && codePoint <= 0x36F) { + continue; + } + + const code = eastAsianWidth.eastAsianWidth(character); + switch (code) { + case 'F': + case 'W': + width += 2; + break; + case 'A': + width += ambiguousCharacterWidth; + break; + default: + width += 1; + } + } + + return width; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/string-width/license b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/string-width/license new file mode 100644 index 0000000000000000000000000000000000000000..fa7ceba3eb4a9657a9db7f3ffca4e4e97a9019de --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/string-width/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (https://sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/string-width/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/string-width/package.json new file mode 100644 index 0000000000000000000000000000000000000000..f46d6770f9ebb20e518ce2a123a953895e3d654a --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/string-width/package.json @@ -0,0 +1,59 @@ +{ + "name": "string-width", + "version": "5.1.2", + "description": "Get the visual width of a string - the number of columns required to display it", + "license": "MIT", + "repository": "sindresorhus/string-width", + "funding": "https://github.com/sponsors/sindresorhus", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "https://sindresorhus.com" + }, + "type": "module", + "exports": "./index.js", + "engines": { + "node": ">=12" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "string", + "character", + "unicode", + "width", + "visual", + "column", + "columns", + "fullwidth", + "full-width", + "full", + "ansi", + "escape", + "codes", + "cli", + "command-line", + "terminal", + "console", + "cjk", + "chinese", + "japanese", + "korean", + "fixed-width" + ], + "dependencies": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + }, + "devDependencies": { + "ava": "^3.15.0", + "tsd": "^0.14.0", + "xo": "^0.38.2" + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/strip-ansi/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/strip-ansi/index.js new file mode 100644 index 0000000000000000000000000000000000000000..ba19750e64e061f4f7afa8dbd468234b1770efbc --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/strip-ansi/index.js @@ -0,0 +1,14 @@ +import ansiRegex from 'ansi-regex'; + +const regex = ansiRegex(); + +export default function stripAnsi(string) { + if (typeof string !== 'string') { + throw new TypeError(`Expected a \`string\`, got \`${typeof string}\``); + } + + // Even though the regex is global, we don't need to reset the `.lastIndex` + // because unlike `.exec()` and `.test()`, `.replace()` does it automatically + // and doing it manually has a performance penalty. + return string.replace(regex, ''); +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/strip-ansi/license b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/strip-ansi/license new file mode 100644 index 0000000000000000000000000000000000000000..fa7ceba3eb4a9657a9db7f3ffca4e4e97a9019de --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/strip-ansi/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (https://sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/strip-ansi/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/strip-ansi/package.json new file mode 100644 index 0000000000000000000000000000000000000000..2a59216e424fcb8382ed9bc51dd42866de194d81 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/node_modules/strip-ansi/package.json @@ -0,0 +1,59 @@ +{ + "name": "strip-ansi", + "version": "7.1.2", + "description": "Strip ANSI escape codes from a string", + "license": "MIT", + "repository": "chalk/strip-ansi", + "funding": "https://github.com/chalk/strip-ansi?sponsor=1", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "https://sindresorhus.com" + }, + "type": "module", + "exports": "./index.js", + "types": "./index.d.ts", + "sideEffects": false, + "engines": { + "node": ">=12" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "strip", + "trim", + "remove", + "ansi", + "styles", + "color", + "colour", + "colors", + "terminal", + "console", + "string", + "tty", + "escape", + "formatting", + "rgb", + "256", + "shell", + "xterm", + "log", + "logging", + "command-line", + "text" + ], + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "devDependencies": { + "ava": "^3.15.0", + "tsd": "^0.17.0", + "xo": "^0.44.0" + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/package.json new file mode 100644 index 0000000000000000000000000000000000000000..7a952532def5d499f626274e49fa8cc3032b6429 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/cliui/package.json @@ -0,0 +1,86 @@ +{ + "name": "@isaacs/cliui", + "version": "8.0.2", + "description": "easily create complex multi-column command-line-interfaces", + "main": "build/index.cjs", + "exports": { + ".": [ + { + "import": "./index.mjs", + "require": "./build/index.cjs" + }, + "./build/index.cjs" + ] + }, + "type": "module", + "module": "./index.mjs", + "scripts": { + "check": "standardx '**/*.ts' && standardx '**/*.js' && standardx '**/*.cjs'", + "fix": "standardx --fix '**/*.ts' && standardx --fix '**/*.js' && standardx --fix '**/*.cjs'", + "pretest": "rimraf build && tsc -p tsconfig.test.json && cross-env NODE_ENV=test npm run build:cjs", + "test": "c8 mocha ./test/*.cjs", + "test:esm": "c8 mocha ./test/**/*.mjs", + "postest": "check", + "coverage": "c8 report --check-coverage", + "precompile": "rimraf build", + "compile": "tsc", + "postcompile": "npm run build:cjs", + "build:cjs": "rollup -c", + "prepare": "npm run compile" + }, + "repository": "yargs/cliui", + "standard": { + "ignore": [ + "**/example/**" + ], + "globals": [ + "it" + ] + }, + "keywords": [ + "cli", + "command-line", + "layout", + "design", + "console", + "wrap", + "table" + ], + "author": "Ben Coe ", + "license": "ISC", + "dependencies": { + "string-width": "^5.1.2", + "string-width-cjs": "npm:string-width@^4.2.0", + "strip-ansi": "^7.0.1", + "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", + "wrap-ansi": "^8.1.0", + "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" + }, + "devDependencies": { + "@types/node": "^14.0.27", + "@typescript-eslint/eslint-plugin": "^4.0.0", + "@typescript-eslint/parser": "^4.0.0", + "c8": "^7.3.0", + "chai": "^4.2.0", + "chalk": "^4.1.0", + "cross-env": "^7.0.2", + "eslint": "^7.6.0", + "eslint-plugin-import": "^2.22.0", + "eslint-plugin-node": "^11.1.0", + "gts": "^3.0.0", + "mocha": "^10.0.0", + "rimraf": "^3.0.2", + "rollup": "^2.23.1", + "rollup-plugin-ts": "^3.0.2", + "standardx": "^7.0.0", + "typescript": "^4.0.0" + }, + "files": [ + "build", + "index.mjs", + "!*.d.ts" + ], + "engines": { + "node": ">=12" + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/fs-minipass/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/fs-minipass/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..19129e315fe593965a2fdd50ec0d1253bcbd2ece --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/fs-minipass/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/fs-minipass/dist/commonjs/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/fs-minipass/dist/commonjs/index.js new file mode 100644 index 0000000000000000000000000000000000000000..2b3178c5263b4c17d25973002dbb5f0f2389be0b --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/fs-minipass/dist/commonjs/index.js @@ -0,0 +1,430 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.WriteStreamSync = exports.WriteStream = exports.ReadStreamSync = exports.ReadStream = void 0; +const events_1 = __importDefault(require("events")); +const fs_1 = __importDefault(require("fs")); +const minipass_1 = require("minipass"); +const writev = fs_1.default.writev; +const _autoClose = Symbol('_autoClose'); +const _close = Symbol('_close'); +const _ended = Symbol('_ended'); +const _fd = Symbol('_fd'); +const _finished = Symbol('_finished'); +const _flags = Symbol('_flags'); +const _flush = Symbol('_flush'); +const _handleChunk = Symbol('_handleChunk'); +const _makeBuf = Symbol('_makeBuf'); +const _mode = Symbol('_mode'); +const _needDrain = Symbol('_needDrain'); +const _onerror = Symbol('_onerror'); +const _onopen = Symbol('_onopen'); +const _onread = Symbol('_onread'); +const _onwrite = Symbol('_onwrite'); +const _open = Symbol('_open'); +const _path = Symbol('_path'); +const _pos = Symbol('_pos'); +const _queue = Symbol('_queue'); +const _read = Symbol('_read'); +const _readSize = Symbol('_readSize'); +const _reading = Symbol('_reading'); +const _remain = Symbol('_remain'); +const _size = Symbol('_size'); +const _write = Symbol('_write'); +const _writing = Symbol('_writing'); +const _defaultFlag = Symbol('_defaultFlag'); +const _errored = Symbol('_errored'); +class ReadStream extends minipass_1.Minipass { + [_errored] = false; + [_fd]; + [_path]; + [_readSize]; + [_reading] = false; + [_size]; + [_remain]; + [_autoClose]; + constructor(path, opt) { + opt = opt || {}; + super(opt); + this.readable = true; + this.writable = false; + if (typeof path !== 'string') { + throw new TypeError('path must be a string'); + } + this[_errored] = false; + this[_fd] = typeof opt.fd === 'number' ? opt.fd : undefined; + this[_path] = path; + this[_readSize] = opt.readSize || 16 * 1024 * 1024; + this[_reading] = false; + this[_size] = typeof opt.size === 'number' ? opt.size : Infinity; + this[_remain] = this[_size]; + this[_autoClose] = + typeof opt.autoClose === 'boolean' ? opt.autoClose : true; + if (typeof this[_fd] === 'number') { + this[_read](); + } + else { + this[_open](); + } + } + get fd() { + return this[_fd]; + } + get path() { + return this[_path]; + } + //@ts-ignore + write() { + throw new TypeError('this is a readable stream'); + } + //@ts-ignore + end() { + throw new TypeError('this is a readable stream'); + } + [_open]() { + fs_1.default.open(this[_path], 'r', (er, fd) => this[_onopen](er, fd)); + } + [_onopen](er, fd) { + if (er) { + this[_onerror](er); + } + else { + this[_fd] = fd; + this.emit('open', fd); + this[_read](); + } + } + [_makeBuf]() { + return Buffer.allocUnsafe(Math.min(this[_readSize], this[_remain])); + } + [_read]() { + if (!this[_reading]) { + this[_reading] = true; + const buf = this[_makeBuf](); + /* c8 ignore start */ + if (buf.length === 0) { + return process.nextTick(() => this[_onread](null, 0, buf)); + } + /* c8 ignore stop */ + fs_1.default.read(this[_fd], buf, 0, buf.length, null, (er, br, b) => this[_onread](er, br, b)); + } + } + [_onread](er, br, buf) { + this[_reading] = false; + if (er) { + this[_onerror](er); + } + else if (this[_handleChunk](br, buf)) { + this[_read](); + } + } + [_close]() { + if (this[_autoClose] && typeof this[_fd] === 'number') { + const fd = this[_fd]; + this[_fd] = undefined; + fs_1.default.close(fd, er => er ? this.emit('error', er) : this.emit('close')); + } + } + [_onerror](er) { + this[_reading] = true; + this[_close](); + this.emit('error', er); + } + [_handleChunk](br, buf) { + let ret = false; + // no effect if infinite + this[_remain] -= br; + if (br > 0) { + ret = super.write(br < buf.length ? buf.subarray(0, br) : buf); + } + if (br === 0 || this[_remain] <= 0) { + ret = false; + this[_close](); + super.end(); + } + return ret; + } + emit(ev, ...args) { + switch (ev) { + case 'prefinish': + case 'finish': + return false; + case 'drain': + if (typeof this[_fd] === 'number') { + this[_read](); + } + return false; + case 'error': + if (this[_errored]) { + return false; + } + this[_errored] = true; + return super.emit(ev, ...args); + default: + return super.emit(ev, ...args); + } + } +} +exports.ReadStream = ReadStream; +class ReadStreamSync extends ReadStream { + [_open]() { + let threw = true; + try { + this[_onopen](null, fs_1.default.openSync(this[_path], 'r')); + threw = false; + } + finally { + if (threw) { + this[_close](); + } + } + } + [_read]() { + let threw = true; + try { + if (!this[_reading]) { + this[_reading] = true; + do { + const buf = this[_makeBuf](); + /* c8 ignore start */ + const br = buf.length === 0 + ? 0 + : fs_1.default.readSync(this[_fd], buf, 0, buf.length, null); + /* c8 ignore stop */ + if (!this[_handleChunk](br, buf)) { + break; + } + } while (true); + this[_reading] = false; + } + threw = false; + } + finally { + if (threw) { + this[_close](); + } + } + } + [_close]() { + if (this[_autoClose] && typeof this[_fd] === 'number') { + const fd = this[_fd]; + this[_fd] = undefined; + fs_1.default.closeSync(fd); + this.emit('close'); + } + } +} +exports.ReadStreamSync = ReadStreamSync; +class WriteStream extends events_1.default { + readable = false; + writable = true; + [_errored] = false; + [_writing] = false; + [_ended] = false; + [_queue] = []; + [_needDrain] = false; + [_path]; + [_mode]; + [_autoClose]; + [_fd]; + [_defaultFlag]; + [_flags]; + [_finished] = false; + [_pos]; + constructor(path, opt) { + opt = opt || {}; + super(opt); + this[_path] = path; + this[_fd] = typeof opt.fd === 'number' ? opt.fd : undefined; + this[_mode] = opt.mode === undefined ? 0o666 : opt.mode; + this[_pos] = typeof opt.start === 'number' ? opt.start : undefined; + this[_autoClose] = + typeof opt.autoClose === 'boolean' ? opt.autoClose : true; + // truncating makes no sense when writing into the middle + const defaultFlag = this[_pos] !== undefined ? 'r+' : 'w'; + this[_defaultFlag] = opt.flags === undefined; + this[_flags] = opt.flags === undefined ? defaultFlag : opt.flags; + if (this[_fd] === undefined) { + this[_open](); + } + } + emit(ev, ...args) { + if (ev === 'error') { + if (this[_errored]) { + return false; + } + this[_errored] = true; + } + return super.emit(ev, ...args); + } + get fd() { + return this[_fd]; + } + get path() { + return this[_path]; + } + [_onerror](er) { + this[_close](); + this[_writing] = true; + this.emit('error', er); + } + [_open]() { + fs_1.default.open(this[_path], this[_flags], this[_mode], (er, fd) => this[_onopen](er, fd)); + } + [_onopen](er, fd) { + if (this[_defaultFlag] && + this[_flags] === 'r+' && + er && + er.code === 'ENOENT') { + this[_flags] = 'w'; + this[_open](); + } + else if (er) { + this[_onerror](er); + } + else { + this[_fd] = fd; + this.emit('open', fd); + if (!this[_writing]) { + this[_flush](); + } + } + } + end(buf, enc) { + if (buf) { + //@ts-ignore + this.write(buf, enc); + } + this[_ended] = true; + // synthetic after-write logic, where drain/finish live + if (!this[_writing] && + !this[_queue].length && + typeof this[_fd] === 'number') { + this[_onwrite](null, 0); + } + return this; + } + write(buf, enc) { + if (typeof buf === 'string') { + buf = Buffer.from(buf, enc); + } + if (this[_ended]) { + this.emit('error', new Error('write() after end()')); + return false; + } + if (this[_fd] === undefined || this[_writing] || this[_queue].length) { + this[_queue].push(buf); + this[_needDrain] = true; + return false; + } + this[_writing] = true; + this[_write](buf); + return true; + } + [_write](buf) { + fs_1.default.write(this[_fd], buf, 0, buf.length, this[_pos], (er, bw) => this[_onwrite](er, bw)); + } + [_onwrite](er, bw) { + if (er) { + this[_onerror](er); + } + else { + if (this[_pos] !== undefined && typeof bw === 'number') { + this[_pos] += bw; + } + if (this[_queue].length) { + this[_flush](); + } + else { + this[_writing] = false; + if (this[_ended] && !this[_finished]) { + this[_finished] = true; + this[_close](); + this.emit('finish'); + } + else if (this[_needDrain]) { + this[_needDrain] = false; + this.emit('drain'); + } + } + } + } + [_flush]() { + if (this[_queue].length === 0) { + if (this[_ended]) { + this[_onwrite](null, 0); + } + } + else if (this[_queue].length === 1) { + this[_write](this[_queue].pop()); + } + else { + const iovec = this[_queue]; + this[_queue] = []; + writev(this[_fd], iovec, this[_pos], (er, bw) => this[_onwrite](er, bw)); + } + } + [_close]() { + if (this[_autoClose] && typeof this[_fd] === 'number') { + const fd = this[_fd]; + this[_fd] = undefined; + fs_1.default.close(fd, er => er ? this.emit('error', er) : this.emit('close')); + } + } +} +exports.WriteStream = WriteStream; +class WriteStreamSync extends WriteStream { + [_open]() { + let fd; + // only wrap in a try{} block if we know we'll retry, to avoid + // the rethrow obscuring the error's source frame in most cases. + if (this[_defaultFlag] && this[_flags] === 'r+') { + try { + fd = fs_1.default.openSync(this[_path], this[_flags], this[_mode]); + } + catch (er) { + if (er?.code === 'ENOENT') { + this[_flags] = 'w'; + return this[_open](); + } + else { + throw er; + } + } + } + else { + fd = fs_1.default.openSync(this[_path], this[_flags], this[_mode]); + } + this[_onopen](null, fd); + } + [_close]() { + if (this[_autoClose] && typeof this[_fd] === 'number') { + const fd = this[_fd]; + this[_fd] = undefined; + fs_1.default.closeSync(fd); + this.emit('close'); + } + } + [_write](buf) { + // throw the original, but try to close if it fails + let threw = true; + try { + this[_onwrite](null, fs_1.default.writeSync(this[_fd], buf, 0, buf.length, this[_pos])); + threw = false; + } + finally { + if (threw) { + try { + this[_close](); + } + catch { + // ok error + } + } + } + } +} +exports.WriteStreamSync = WriteStreamSync; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/fs-minipass/dist/commonjs/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/fs-minipass/dist/commonjs/package.json new file mode 100644 index 0000000000000000000000000000000000000000..5bbefffbabee392d1855491b84dc0a716b6a3bf2 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/fs-minipass/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/fs-minipass/dist/esm/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/fs-minipass/dist/esm/index.js new file mode 100644 index 0000000000000000000000000000000000000000..287a0f614dcc6543de243533cd22512136f738f1 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/fs-minipass/dist/esm/index.js @@ -0,0 +1,420 @@ +import EE from 'events'; +import fs from 'fs'; +import { Minipass } from 'minipass'; +const writev = fs.writev; +const _autoClose = Symbol('_autoClose'); +const _close = Symbol('_close'); +const _ended = Symbol('_ended'); +const _fd = Symbol('_fd'); +const _finished = Symbol('_finished'); +const _flags = Symbol('_flags'); +const _flush = Symbol('_flush'); +const _handleChunk = Symbol('_handleChunk'); +const _makeBuf = Symbol('_makeBuf'); +const _mode = Symbol('_mode'); +const _needDrain = Symbol('_needDrain'); +const _onerror = Symbol('_onerror'); +const _onopen = Symbol('_onopen'); +const _onread = Symbol('_onread'); +const _onwrite = Symbol('_onwrite'); +const _open = Symbol('_open'); +const _path = Symbol('_path'); +const _pos = Symbol('_pos'); +const _queue = Symbol('_queue'); +const _read = Symbol('_read'); +const _readSize = Symbol('_readSize'); +const _reading = Symbol('_reading'); +const _remain = Symbol('_remain'); +const _size = Symbol('_size'); +const _write = Symbol('_write'); +const _writing = Symbol('_writing'); +const _defaultFlag = Symbol('_defaultFlag'); +const _errored = Symbol('_errored'); +export class ReadStream extends Minipass { + [_errored] = false; + [_fd]; + [_path]; + [_readSize]; + [_reading] = false; + [_size]; + [_remain]; + [_autoClose]; + constructor(path, opt) { + opt = opt || {}; + super(opt); + this.readable = true; + this.writable = false; + if (typeof path !== 'string') { + throw new TypeError('path must be a string'); + } + this[_errored] = false; + this[_fd] = typeof opt.fd === 'number' ? opt.fd : undefined; + this[_path] = path; + this[_readSize] = opt.readSize || 16 * 1024 * 1024; + this[_reading] = false; + this[_size] = typeof opt.size === 'number' ? opt.size : Infinity; + this[_remain] = this[_size]; + this[_autoClose] = + typeof opt.autoClose === 'boolean' ? opt.autoClose : true; + if (typeof this[_fd] === 'number') { + this[_read](); + } + else { + this[_open](); + } + } + get fd() { + return this[_fd]; + } + get path() { + return this[_path]; + } + //@ts-ignore + write() { + throw new TypeError('this is a readable stream'); + } + //@ts-ignore + end() { + throw new TypeError('this is a readable stream'); + } + [_open]() { + fs.open(this[_path], 'r', (er, fd) => this[_onopen](er, fd)); + } + [_onopen](er, fd) { + if (er) { + this[_onerror](er); + } + else { + this[_fd] = fd; + this.emit('open', fd); + this[_read](); + } + } + [_makeBuf]() { + return Buffer.allocUnsafe(Math.min(this[_readSize], this[_remain])); + } + [_read]() { + if (!this[_reading]) { + this[_reading] = true; + const buf = this[_makeBuf](); + /* c8 ignore start */ + if (buf.length === 0) { + return process.nextTick(() => this[_onread](null, 0, buf)); + } + /* c8 ignore stop */ + fs.read(this[_fd], buf, 0, buf.length, null, (er, br, b) => this[_onread](er, br, b)); + } + } + [_onread](er, br, buf) { + this[_reading] = false; + if (er) { + this[_onerror](er); + } + else if (this[_handleChunk](br, buf)) { + this[_read](); + } + } + [_close]() { + if (this[_autoClose] && typeof this[_fd] === 'number') { + const fd = this[_fd]; + this[_fd] = undefined; + fs.close(fd, er => er ? this.emit('error', er) : this.emit('close')); + } + } + [_onerror](er) { + this[_reading] = true; + this[_close](); + this.emit('error', er); + } + [_handleChunk](br, buf) { + let ret = false; + // no effect if infinite + this[_remain] -= br; + if (br > 0) { + ret = super.write(br < buf.length ? buf.subarray(0, br) : buf); + } + if (br === 0 || this[_remain] <= 0) { + ret = false; + this[_close](); + super.end(); + } + return ret; + } + emit(ev, ...args) { + switch (ev) { + case 'prefinish': + case 'finish': + return false; + case 'drain': + if (typeof this[_fd] === 'number') { + this[_read](); + } + return false; + case 'error': + if (this[_errored]) { + return false; + } + this[_errored] = true; + return super.emit(ev, ...args); + default: + return super.emit(ev, ...args); + } + } +} +export class ReadStreamSync extends ReadStream { + [_open]() { + let threw = true; + try { + this[_onopen](null, fs.openSync(this[_path], 'r')); + threw = false; + } + finally { + if (threw) { + this[_close](); + } + } + } + [_read]() { + let threw = true; + try { + if (!this[_reading]) { + this[_reading] = true; + do { + const buf = this[_makeBuf](); + /* c8 ignore start */ + const br = buf.length === 0 + ? 0 + : fs.readSync(this[_fd], buf, 0, buf.length, null); + /* c8 ignore stop */ + if (!this[_handleChunk](br, buf)) { + break; + } + } while (true); + this[_reading] = false; + } + threw = false; + } + finally { + if (threw) { + this[_close](); + } + } + } + [_close]() { + if (this[_autoClose] && typeof this[_fd] === 'number') { + const fd = this[_fd]; + this[_fd] = undefined; + fs.closeSync(fd); + this.emit('close'); + } + } +} +export class WriteStream extends EE { + readable = false; + writable = true; + [_errored] = false; + [_writing] = false; + [_ended] = false; + [_queue] = []; + [_needDrain] = false; + [_path]; + [_mode]; + [_autoClose]; + [_fd]; + [_defaultFlag]; + [_flags]; + [_finished] = false; + [_pos]; + constructor(path, opt) { + opt = opt || {}; + super(opt); + this[_path] = path; + this[_fd] = typeof opt.fd === 'number' ? opt.fd : undefined; + this[_mode] = opt.mode === undefined ? 0o666 : opt.mode; + this[_pos] = typeof opt.start === 'number' ? opt.start : undefined; + this[_autoClose] = + typeof opt.autoClose === 'boolean' ? opt.autoClose : true; + // truncating makes no sense when writing into the middle + const defaultFlag = this[_pos] !== undefined ? 'r+' : 'w'; + this[_defaultFlag] = opt.flags === undefined; + this[_flags] = opt.flags === undefined ? defaultFlag : opt.flags; + if (this[_fd] === undefined) { + this[_open](); + } + } + emit(ev, ...args) { + if (ev === 'error') { + if (this[_errored]) { + return false; + } + this[_errored] = true; + } + return super.emit(ev, ...args); + } + get fd() { + return this[_fd]; + } + get path() { + return this[_path]; + } + [_onerror](er) { + this[_close](); + this[_writing] = true; + this.emit('error', er); + } + [_open]() { + fs.open(this[_path], this[_flags], this[_mode], (er, fd) => this[_onopen](er, fd)); + } + [_onopen](er, fd) { + if (this[_defaultFlag] && + this[_flags] === 'r+' && + er && + er.code === 'ENOENT') { + this[_flags] = 'w'; + this[_open](); + } + else if (er) { + this[_onerror](er); + } + else { + this[_fd] = fd; + this.emit('open', fd); + if (!this[_writing]) { + this[_flush](); + } + } + } + end(buf, enc) { + if (buf) { + //@ts-ignore + this.write(buf, enc); + } + this[_ended] = true; + // synthetic after-write logic, where drain/finish live + if (!this[_writing] && + !this[_queue].length && + typeof this[_fd] === 'number') { + this[_onwrite](null, 0); + } + return this; + } + write(buf, enc) { + if (typeof buf === 'string') { + buf = Buffer.from(buf, enc); + } + if (this[_ended]) { + this.emit('error', new Error('write() after end()')); + return false; + } + if (this[_fd] === undefined || this[_writing] || this[_queue].length) { + this[_queue].push(buf); + this[_needDrain] = true; + return false; + } + this[_writing] = true; + this[_write](buf); + return true; + } + [_write](buf) { + fs.write(this[_fd], buf, 0, buf.length, this[_pos], (er, bw) => this[_onwrite](er, bw)); + } + [_onwrite](er, bw) { + if (er) { + this[_onerror](er); + } + else { + if (this[_pos] !== undefined && typeof bw === 'number') { + this[_pos] += bw; + } + if (this[_queue].length) { + this[_flush](); + } + else { + this[_writing] = false; + if (this[_ended] && !this[_finished]) { + this[_finished] = true; + this[_close](); + this.emit('finish'); + } + else if (this[_needDrain]) { + this[_needDrain] = false; + this.emit('drain'); + } + } + } + } + [_flush]() { + if (this[_queue].length === 0) { + if (this[_ended]) { + this[_onwrite](null, 0); + } + } + else if (this[_queue].length === 1) { + this[_write](this[_queue].pop()); + } + else { + const iovec = this[_queue]; + this[_queue] = []; + writev(this[_fd], iovec, this[_pos], (er, bw) => this[_onwrite](er, bw)); + } + } + [_close]() { + if (this[_autoClose] && typeof this[_fd] === 'number') { + const fd = this[_fd]; + this[_fd] = undefined; + fs.close(fd, er => er ? this.emit('error', er) : this.emit('close')); + } + } +} +export class WriteStreamSync extends WriteStream { + [_open]() { + let fd; + // only wrap in a try{} block if we know we'll retry, to avoid + // the rethrow obscuring the error's source frame in most cases. + if (this[_defaultFlag] && this[_flags] === 'r+') { + try { + fd = fs.openSync(this[_path], this[_flags], this[_mode]); + } + catch (er) { + if (er?.code === 'ENOENT') { + this[_flags] = 'w'; + return this[_open](); + } + else { + throw er; + } + } + } + else { + fd = fs.openSync(this[_path], this[_flags], this[_mode]); + } + this[_onopen](null, fd); + } + [_close]() { + if (this[_autoClose] && typeof this[_fd] === 'number') { + const fd = this[_fd]; + this[_fd] = undefined; + fs.closeSync(fd); + this.emit('close'); + } + } + [_write](buf) { + // throw the original, but try to close if it fails + let threw = true; + try { + this[_onwrite](null, fs.writeSync(this[_fd], buf, 0, buf.length, this[_pos])); + threw = false; + } + finally { + if (threw) { + try { + this[_close](); + } + catch { + // ok error + } + } + } + } +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/fs-minipass/dist/esm/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/fs-minipass/dist/esm/package.json new file mode 100644 index 0000000000000000000000000000000000000000..3dbc1ca591c0557e35b6004aeba250e6a70b56e3 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/fs-minipass/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/fs-minipass/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/fs-minipass/package.json new file mode 100644 index 0000000000000000000000000000000000000000..cc4576c4afe776908d56c98c690156f25db77ab7 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/fs-minipass/package.json @@ -0,0 +1,72 @@ +{ + "name": "@isaacs/fs-minipass", + "version": "4.0.1", + "main": "./dist/commonjs/index.js", + "scripts": { + "prepare": "tshy", + "pretest": "npm run prepare", + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "format": "prettier --write . --loglevel warn", + "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts" + }, + "keywords": [], + "author": "Isaac Z. Schlueter", + "license": "ISC", + "repository": { + "type": "git", + "url": "https://github.com/npm/fs-minipass.git" + }, + "description": "fs read and write streams based on minipass", + "dependencies": { + "minipass": "^7.0.4" + }, + "devDependencies": { + "@types/node": "^20.11.30", + "mutate-fs": "^2.1.1", + "prettier": "^3.2.5", + "tap": "^18.7.1", + "tshy": "^1.12.0", + "typedoc": "^0.25.12" + }, + "files": [ + "dist" + ], + "engines": { + "node": ">=18.0.0" + }, + "tshy": { + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts" + } + }, + "exports": { + "./package.json": "./package.json", + ".": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } + }, + "types": "./dist/commonjs/index.d.ts", + "type": "module", + "prettier": { + "semi": false, + "printWidth": 75, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/string-locale-compare/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/string-locale-compare/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..05eeeb88c2ef4cc9bacbcc46d6cfcb6962f8b385 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/string-locale-compare/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/string-locale-compare/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/string-locale-compare/index.js new file mode 100644 index 0000000000000000000000000000000000000000..0f68ab6774e1a1ea6120e1a74ab3590dffe9dbe0 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/string-locale-compare/index.js @@ -0,0 +1,42 @@ +const hasIntl = typeof Intl === 'object' && !!Intl +const Collator = hasIntl && Intl.Collator +const cache = new Map() + +const collatorCompare = (locale, opts) => { + const collator = new Collator(locale, opts) + return (a, b) => collator.compare(a, b) +} + +const localeCompare = (locale, opts) => (a, b) => a.localeCompare(b, locale, opts) + +const knownOptions = [ + 'sensitivity', + 'numeric', + 'ignorePunctuation', + 'caseFirst', +] + +const { hasOwnProperty } = Object.prototype + +module.exports = (locale, options = {}) => { + if (!locale || typeof locale !== 'string') + throw new TypeError('locale required') + + const opts = knownOptions.reduce((opts, k) => { + if (hasOwnProperty.call(options, k)) { + opts[k] = options[k] + } + return opts + }, {}) + const key = `${locale}\n${JSON.stringify(opts)}` + + if (cache.has(key)) + return cache.get(key) + + const compare = hasIntl + ? collatorCompare(locale, opts) + : localeCompare(locale, opts) + cache.set(key, compare) + + return compare +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/string-locale-compare/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/string-locale-compare/package.json new file mode 100644 index 0000000000000000000000000000000000000000..58de848a00377d41b2c9377718eedbfc2c4b34a8 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@isaacs/string-locale-compare/package.json @@ -0,0 +1,28 @@ +{ + "name": "@isaacs/string-locale-compare", + "version": "1.1.0", + "files": [ + "index.js" + ], + "main": "index.js", + "description": "Compare strings with Intl.Collator if available, falling back to String.localeCompare otherwise", + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/string-locale-compare" + }, + "author": "Isaac Z. Schlueter (https://izs.me)", + "license": "ISC", + "scripts": { + "test": "tap", + "snap": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags" + }, + "tap": { + "check-coverage": true + }, + "devDependencies": { + "tap": "^15.0.9" + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/agent/lib/agents.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/agent/lib/agents.js new file mode 100644 index 0000000000000000000000000000000000000000..c541b93001517e2722e5c73c577dc5a5e53c5234 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/agent/lib/agents.js @@ -0,0 +1,206 @@ +'use strict' + +const net = require('net') +const tls = require('tls') +const { once } = require('events') +const timers = require('timers/promises') +const { normalizeOptions, cacheOptions } = require('./options') +const { getProxy, getProxyAgent, proxyCache } = require('./proxy.js') +const Errors = require('./errors.js') +const { Agent: AgentBase } = require('agent-base') + +module.exports = class Agent extends AgentBase { + #options + #timeouts + #proxy + #noProxy + #ProxyAgent + + constructor (options = {}) { + const { timeouts, proxy, noProxy, ...normalizedOptions } = normalizeOptions(options) + + super(normalizedOptions) + + this.#options = normalizedOptions + this.#timeouts = timeouts + + if (proxy) { + this.#proxy = new URL(proxy) + this.#noProxy = noProxy + this.#ProxyAgent = getProxyAgent(proxy) + } + } + + get proxy () { + return this.#proxy ? { url: this.#proxy } : {} + } + + #getProxy (options) { + if (!this.#proxy) { + return + } + + const proxy = getProxy(`${options.protocol}//${options.host}:${options.port}`, { + proxy: this.#proxy, + noProxy: this.#noProxy, + }) + + if (!proxy) { + return + } + + const cacheKey = cacheOptions({ + ...options, + ...this.#options, + timeouts: this.#timeouts, + proxy, + }) + + if (proxyCache.has(cacheKey)) { + return proxyCache.get(cacheKey) + } + + let ProxyAgent = this.#ProxyAgent + if (Array.isArray(ProxyAgent)) { + ProxyAgent = this.isSecureEndpoint(options) ? ProxyAgent[1] : ProxyAgent[0] + } + + const proxyAgent = new ProxyAgent(proxy, { + ...this.#options, + socketOptions: { family: this.#options.family }, + }) + proxyCache.set(cacheKey, proxyAgent) + + return proxyAgent + } + + // takes an array of promises and races them against the connection timeout + // which will throw the necessary error if it is hit. This will return the + // result of the promise race. + async #timeoutConnection ({ promises, options, timeout }, ac = new AbortController()) { + if (timeout) { + const connectionTimeout = timers.setTimeout(timeout, null, { signal: ac.signal }) + .then(() => { + throw new Errors.ConnectionTimeoutError(`${options.host}:${options.port}`) + }).catch((err) => { + if (err.name === 'AbortError') { + return + } + throw err + }) + promises.push(connectionTimeout) + } + + let result + try { + result = await Promise.race(promises) + ac.abort() + } catch (err) { + ac.abort() + throw err + } + return result + } + + async connect (request, options) { + // if the connection does not have its own lookup function + // set, then use the one from our options + options.lookup ??= this.#options.lookup + + let socket + let timeout = this.#timeouts.connection + const isSecureEndpoint = this.isSecureEndpoint(options) + + const proxy = this.#getProxy(options) + if (proxy) { + // some of the proxies will wait for the socket to fully connect before + // returning so we have to await this while also racing it against the + // connection timeout. + const start = Date.now() + socket = await this.#timeoutConnection({ + options, + timeout, + promises: [proxy.connect(request, options)], + }) + // see how much time proxy.connect took and subtract it from + // the timeout + if (timeout) { + timeout = timeout - (Date.now() - start) + } + } else { + socket = (isSecureEndpoint ? tls : net).connect(options) + } + + socket.setKeepAlive(this.keepAlive, this.keepAliveMsecs) + socket.setNoDelay(this.keepAlive) + + const abortController = new AbortController() + const { signal } = abortController + + const connectPromise = socket[isSecureEndpoint ? 'secureConnecting' : 'connecting'] + ? once(socket, isSecureEndpoint ? 'secureConnect' : 'connect', { signal }) + : Promise.resolve() + + await this.#timeoutConnection({ + options, + timeout, + promises: [ + connectPromise, + once(socket, 'error', { signal }).then((err) => { + throw err[0] + }), + ], + }, abortController) + + if (this.#timeouts.idle) { + socket.setTimeout(this.#timeouts.idle, () => { + socket.destroy(new Errors.IdleTimeoutError(`${options.host}:${options.port}`)) + }) + } + + return socket + } + + addRequest (request, options) { + const proxy = this.#getProxy(options) + // it would be better to call proxy.addRequest here but this causes the + // http-proxy-agent to call its super.addRequest which causes the request + // to be added to the agent twice. since we only support 3 agents + // currently (see the required agents in proxy.js) we have manually + // checked that the only public methods we need to call are called in the + // next block. this could change in the future and presumably we would get + // failing tests until we have properly called the necessary methods on + // each of our proxy agents + if (proxy?.setRequestProps) { + proxy.setRequestProps(request, options) + } + + request.setHeader('connection', this.keepAlive ? 'keep-alive' : 'close') + + if (this.#timeouts.response) { + let responseTimeout + request.once('finish', () => { + setTimeout(() => { + request.destroy(new Errors.ResponseTimeoutError(request, this.#proxy)) + }, this.#timeouts.response) + }) + request.once('response', () => { + clearTimeout(responseTimeout) + }) + } + + if (this.#timeouts.transfer) { + let transferTimeout + request.once('response', (res) => { + setTimeout(() => { + res.destroy(new Errors.TransferTimeoutError(request, this.#proxy)) + }, this.#timeouts.transfer) + res.once('close', () => { + clearTimeout(transferTimeout) + }) + }) + } + + return super.addRequest(request, options) + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/agent/lib/dns.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/agent/lib/dns.js new file mode 100644 index 0000000000000000000000000000000000000000..3c6946c566d736f3ce4ab54773885185e6ad7048 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/agent/lib/dns.js @@ -0,0 +1,53 @@ +'use strict' + +const { LRUCache } = require('lru-cache') +const dns = require('dns') + +// this is a factory so that each request can have its own opts (i.e. ttl) +// while still sharing the cache across all requests +const cache = new LRUCache({ max: 50 }) + +const getOptions = ({ + family = 0, + hints = dns.ADDRCONFIG, + all = false, + verbatim = undefined, + ttl = 5 * 60 * 1000, + lookup = dns.lookup, +}) => ({ + // hints and lookup are returned since both are top level properties to (net|tls).connect + hints, + lookup: (hostname, ...args) => { + const callback = args.pop() // callback is always last arg + const lookupOptions = args[0] ?? {} + + const options = { + family, + hints, + all, + verbatim, + ...(typeof lookupOptions === 'number' ? { family: lookupOptions } : lookupOptions), + } + + const key = JSON.stringify({ hostname, ...options }) + + if (cache.has(key)) { + const cached = cache.get(key) + return process.nextTick(callback, null, ...cached) + } + + lookup(hostname, options, (err, ...result) => { + if (err) { + return callback(err) + } + + cache.set(key, result, { ttl }) + return callback(null, ...result) + }) + }, +}) + +module.exports = { + cache, + getOptions, +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/agent/lib/errors.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/agent/lib/errors.js new file mode 100644 index 0000000000000000000000000000000000000000..70475aec8eb3575fc499b6e77ad0c15dd0db11b3 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/agent/lib/errors.js @@ -0,0 +1,61 @@ +'use strict' + +class InvalidProxyProtocolError extends Error { + constructor (url) { + super(`Invalid protocol \`${url.protocol}\` connecting to proxy \`${url.host}\``) + this.code = 'EINVALIDPROXY' + this.proxy = url + } +} + +class ConnectionTimeoutError extends Error { + constructor (host) { + super(`Timeout connecting to host \`${host}\``) + this.code = 'ECONNECTIONTIMEOUT' + this.host = host + } +} + +class IdleTimeoutError extends Error { + constructor (host) { + super(`Idle timeout reached for host \`${host}\``) + this.code = 'EIDLETIMEOUT' + this.host = host + } +} + +class ResponseTimeoutError extends Error { + constructor (request, proxy) { + let msg = 'Response timeout ' + if (proxy) { + msg += `from proxy \`${proxy.host}\` ` + } + msg += `connecting to host \`${request.host}\`` + super(msg) + this.code = 'ERESPONSETIMEOUT' + this.proxy = proxy + this.request = request + } +} + +class TransferTimeoutError extends Error { + constructor (request, proxy) { + let msg = 'Transfer timeout ' + if (proxy) { + msg += `from proxy \`${proxy.host}\` ` + } + msg += `for \`${request.host}\`` + super(msg) + this.code = 'ETRANSFERTIMEOUT' + this.proxy = proxy + this.request = request + } +} + +module.exports = { + InvalidProxyProtocolError, + ConnectionTimeoutError, + IdleTimeoutError, + ResponseTimeoutError, + TransferTimeoutError, +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/agent/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/agent/lib/index.js new file mode 100644 index 0000000000000000000000000000000000000000..b33d6eaef07a21d090b0800420799cda4dd7aa6a --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/agent/lib/index.js @@ -0,0 +1,56 @@ +'use strict' + +const { LRUCache } = require('lru-cache') +const { normalizeOptions, cacheOptions } = require('./options') +const { getProxy, proxyCache } = require('./proxy.js') +const dns = require('./dns.js') +const Agent = require('./agents.js') + +const agentCache = new LRUCache({ max: 20 }) + +const getAgent = (url, { agent, proxy, noProxy, ...options } = {}) => { + // false has meaning so this can't be a simple truthiness check + if (agent != null) { + return agent + } + + url = new URL(url) + + const proxyForUrl = getProxy(url, { proxy, noProxy }) + const normalizedOptions = { + ...normalizeOptions(options), + proxy: proxyForUrl, + } + + const cacheKey = cacheOptions({ + ...normalizedOptions, + secureEndpoint: url.protocol === 'https:', + }) + + if (agentCache.has(cacheKey)) { + return agentCache.get(cacheKey) + } + + const newAgent = new Agent(normalizedOptions) + agentCache.set(cacheKey, newAgent) + + return newAgent +} + +module.exports = { + getAgent, + Agent, + // these are exported for backwards compatability + HttpAgent: Agent, + HttpsAgent: Agent, + cache: { + proxy: proxyCache, + agent: agentCache, + dns: dns.cache, + clear: () => { + proxyCache.clear() + agentCache.clear() + dns.cache.clear() + }, + }, +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/agent/lib/options.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/agent/lib/options.js new file mode 100644 index 0000000000000000000000000000000000000000..0bf53f725f0846029fb0e62b219616152f6a294e --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/agent/lib/options.js @@ -0,0 +1,86 @@ +'use strict' + +const dns = require('./dns') + +const normalizeOptions = (opts) => { + const family = parseInt(opts.family ?? '0', 10) + const keepAlive = opts.keepAlive ?? true + + const normalized = { + // nodejs http agent options. these are all the defaults + // but kept here to increase the likelihood of cache hits + // https://nodejs.org/api/http.html#new-agentoptions + keepAliveMsecs: keepAlive ? 1000 : undefined, + maxSockets: opts.maxSockets ?? 15, + maxTotalSockets: Infinity, + maxFreeSockets: keepAlive ? 256 : undefined, + scheduling: 'fifo', + // then spread the rest of the options + ...opts, + // we already set these to their defaults that we want + family, + keepAlive, + // our custom timeout options + timeouts: { + // the standard timeout option is mapped to our idle timeout + // and then deleted below + idle: opts.timeout ?? 0, + connection: 0, + response: 0, + transfer: 0, + ...opts.timeouts, + }, + // get the dns options that go at the top level of socket connection + ...dns.getOptions({ family, ...opts.dns }), + } + + // remove timeout since we already used it to set our own idle timeout + delete normalized.timeout + + return normalized +} + +const createKey = (obj) => { + let key = '' + const sorted = Object.entries(obj).sort((a, b) => a[0] - b[0]) + for (let [k, v] of sorted) { + if (v == null) { + v = 'null' + } else if (v instanceof URL) { + v = v.toString() + } else if (typeof v === 'object') { + v = createKey(v) + } + key += `${k}:${v}:` + } + return key +} + +const cacheOptions = ({ secureEndpoint, ...options }) => createKey({ + secureEndpoint: !!secureEndpoint, + // socket connect options + family: options.family, + hints: options.hints, + localAddress: options.localAddress, + // tls specific connect options + strictSsl: secureEndpoint ? !!options.rejectUnauthorized : false, + ca: secureEndpoint ? options.ca : null, + cert: secureEndpoint ? options.cert : null, + key: secureEndpoint ? options.key : null, + // http agent options + keepAlive: options.keepAlive, + keepAliveMsecs: options.keepAliveMsecs, + maxSockets: options.maxSockets, + maxTotalSockets: options.maxTotalSockets, + maxFreeSockets: options.maxFreeSockets, + scheduling: options.scheduling, + // timeout options + timeouts: options.timeouts, + // proxy + proxy: options.proxy, +}) + +module.exports = { + normalizeOptions, + cacheOptions, +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/agent/lib/proxy.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/agent/lib/proxy.js new file mode 100644 index 0000000000000000000000000000000000000000..6272e929e57bcf4d1105fe171ba631a5fbbfd586 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/agent/lib/proxy.js @@ -0,0 +1,88 @@ +'use strict' + +const { HttpProxyAgent } = require('http-proxy-agent') +const { HttpsProxyAgent } = require('https-proxy-agent') +const { SocksProxyAgent } = require('socks-proxy-agent') +const { LRUCache } = require('lru-cache') +const { InvalidProxyProtocolError } = require('./errors.js') + +const PROXY_CACHE = new LRUCache({ max: 20 }) + +const SOCKS_PROTOCOLS = new Set(SocksProxyAgent.protocols) + +const PROXY_ENV_KEYS = new Set(['https_proxy', 'http_proxy', 'proxy', 'no_proxy']) + +const PROXY_ENV = Object.entries(process.env).reduce((acc, [key, value]) => { + key = key.toLowerCase() + if (PROXY_ENV_KEYS.has(key)) { + acc[key] = value + } + return acc +}, {}) + +const getProxyAgent = (url) => { + url = new URL(url) + + const protocol = url.protocol.slice(0, -1) + if (SOCKS_PROTOCOLS.has(protocol)) { + return SocksProxyAgent + } + if (protocol === 'https' || protocol === 'http') { + return [HttpProxyAgent, HttpsProxyAgent] + } + + throw new InvalidProxyProtocolError(url) +} + +const isNoProxy = (url, noProxy) => { + if (typeof noProxy === 'string') { + noProxy = noProxy.split(',').map((p) => p.trim()).filter(Boolean) + } + + if (!noProxy || !noProxy.length) { + return false + } + + const hostSegments = url.hostname.split('.').reverse() + + return noProxy.some((no) => { + const noSegments = no.split('.').filter(Boolean).reverse() + if (!noSegments.length) { + return false + } + + for (let i = 0; i < noSegments.length; i++) { + if (hostSegments[i] !== noSegments[i]) { + return false + } + } + + return true + }) +} + +const getProxy = (url, { proxy, noProxy }) => { + url = new URL(url) + + if (!proxy) { + proxy = url.protocol === 'https:' + ? PROXY_ENV.https_proxy + : PROXY_ENV.https_proxy || PROXY_ENV.http_proxy || PROXY_ENV.proxy + } + + if (!noProxy) { + noProxy = PROXY_ENV.no_proxy + } + + if (!proxy || isNoProxy(url, noProxy)) { + return null + } + + return new URL(proxy) +} + +module.exports = { + getProxyAgent, + getProxy, + proxyCache: PROXY_CACHE, +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/agent/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/agent/package.json new file mode 100644 index 0000000000000000000000000000000000000000..67670a0c1c484e44df515160970e400bc627d231 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/agent/package.json @@ -0,0 +1,60 @@ +{ + "name": "@npmcli/agent", + "version": "4.0.0", + "description": "the http/https agent used by the npm cli", + "main": "lib/index.js", + "scripts": { + "gencerts": "bash scripts/create-cert.sh", + "test": "tap", + "lint": "npm run eslint", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run eslint -- --fix", + "snap": "tap", + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" + }, + "author": "GitHub Inc.", + "license": "ISC", + "bugs": { + "url": "https://github.com/npm/agent/issues" + }, + "homepage": "https://github.com/npm/agent#readme", + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^20.17.0 || >=22.9.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.25.0", + "publish": "true" + }, + "dependencies": { + "agent-base": "^7.1.0", + "http-proxy-agent": "^7.0.0", + "https-proxy-agent": "^7.0.1", + "lru-cache": "^11.2.1", + "socks-proxy-agent": "^8.0.3" + }, + "devDependencies": { + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.25.0", + "minipass-fetch": "^4.0.1", + "nock": "^14.0.3", + "socksv5": "^0.0.6", + "tap": "^16.3.0" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/agent.git" + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/LICENSE.md b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/LICENSE.md new file mode 100644 index 0000000000000000000000000000000000000000..5fc208ff122e08e2ca9777f80b0551617b30ba2a --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/LICENSE.md @@ -0,0 +1,20 @@ + + +ISC License + +Copyright npm, Inc. + +Permission to use, copy, modify, and/or distribute this +software for any purpose with or without fee is hereby +granted, provided that the above copyright notice and this +permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL +WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO +EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/README.md b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/README.md new file mode 100644 index 0000000000000000000000000000000000000000..bdffaf4041ab683c9b8568c310efc15694990b84 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/README.md @@ -0,0 +1,315 @@ +# @npmcli/arborist + +[![npm version](https://img.shields.io/npm/v/@npmcli/arborist.svg)](https://npm.im/@npmcli/arborist) +[![license](https://img.shields.io/npm/l/@npmcli/arborist.svg)](https://npm.im/@npmcli/arborist) +[![CI - @npmcli/arborist](https://github.com/npm/cli/actions/workflows/ci-npmcli-arborist.yml/badge.svg)](https://github.com/npm/cli/actions/workflows/ci-npmcli-arborist.yml) + +Inspect and manage `node_modules` trees. + +![a tree with the word ARBORIST superimposed on it](https://raw.githubusercontent.com/npm/cli/latest/workspaces/arborist/docs/logo.svg?sanitize=true) + +There's more documentation [in the docs +folder](https://github.com/npm/cli/tree/latest/workspaces/arborist/docs). + +## USAGE + +```js +const Arborist = require('@npmcli/arborist') + +const arb = new Arborist({ + // options object + + // where we're doing stuff. defaults to cwd. + path: '/path/to/package/root', + + // url to the default registry. defaults to npm's default registry + registry: 'https://registry.npmjs.org', + + // scopes can be mapped to a different registry + '@foo:registry': 'https://registry.foo.com/', + + // Auth can be provided in a couple of different ways. If none are + // provided, then requests are anonymous, and private packages will 404. + // Arborist doesn't do anything with these, it just passes them down + // the chain to pacote and npm-registry-fetch. + + // Safest: a bearer token provided by a registry: + // 1. an npm auth token, used with the default registry + token: 'deadbeefcafebad', + // 2. an alias for the same thing: + _authToken: 'deadbeefcafebad', + + // insecure options: + // 3. basic auth, username:password, base64 encoded + auth: 'aXNhYWNzOm5vdCBteSByZWFsIHBhc3N3b3Jk', + // 4. username and base64 encoded password + username: 'isaacs', + password: 'bm90IG15IHJlYWwgcGFzc3dvcmQ=', + + // auth configs can also be scoped to a given registry with this + // rather unusual pattern: + '//registry.foo.com:token': 'blahblahblah', + '//basic.auth.only.foo.com:_auth': 'aXNhYWNzOm5vdCBteSByZWFsIHBhc3N3b3Jk', + '//registry.foo.com:always-auth': true, +}) + +// READING + +// returns a promise. reads the actual contents of node_modules +arb.loadActual().then(tree => { + // tree is also stored at arb.virtualTree +}) + +// read just what the package-lock.json/npm-shrinkwrap says +// This *also* loads the yarn.lock file, but that's only relevant +// when building the ideal tree. +arb.loadVirtual().then(tree => { + // tree is also stored at arb.virtualTree + // now arb.virtualTree is loaded + // this fails if there's no package-lock.json or package.json in the folder + // note that loading this way should only be done if there's no + // node_modules folder +}) + +// OPTIMIZING AND DESIGNING + +// build an ideal tree from the package.json and various lockfiles. +arb.buildIdealTree(options).then(() => { + // next step is to reify that ideal tree onto disk. + // options can be: + // rm: array of package names to remove at top level + // add: Array of package specifiers to add at the top level. Each of + // these will be resolved with pacote.manifest if the name can't be + // determined from the spec. (Eg, `github:foo/bar` vs `foo@somespec`.) + // The dep will be saved in the location where it already exists, + // (or pkg.dependencies) unless a different saveType is specified. + // saveType: Save added packages in a specific dependency set. + // - null (default) Wherever they exist already, or 'dependencies' + // - prod: definitely in 'dependencies' + // - optional: in 'optionalDependencies' + // - dev: devDependencies + // - peer: save in peerDependencies, and remove any optional flag from + // peerDependenciesMeta if one exists + // - peerOptional: save in peerDependencies, and add a + // peerDepsMeta[name].optional flag + // saveBundle: add newly added deps to the bundleDependencies list + // update: Either `true` to just go ahead and update everything, or an + // object with any or all of the following fields: + // - all: boolean. set to true to just update everything + // - names: names of packages update (like `npm update foo`) + // prune: boolean, default true. Prune extraneous nodes from the tree. + // preferDedupe: prefer to deduplicate packages if possible, rather than + // choosing a newer version of a dependency. Defaults to false, ie, + // always try to get the latest and greatest deps. + // legacyBundling: Nest every dep under the node requiring it, npm v2 style. + // No unnecessary deduplication. Default false. + + // At the end of this process, arb.idealTree is set. +}) + +// WRITING + +// Make the idealTree be the thing that's on disk +arb.reify({ + // write the lockfile(s) back to disk, and package.json with any updates + // defaults to 'true' + save: true, +}).then(() => { + // node modules has been written to match the idealTree +}) +``` + +## DATA STRUCTURES + +A `node_modules` tree is a logical graph of dependencies overlaid on a +physical tree of folders. + +A `Node` represents a package folder on disk, either at the root of the +package, or within a `node_modules` folder. The physical structure of the +folder tree is represented by the `node.parent` reference to the containing +folder, and `node.children` map of nodes within its `node_modules` +folder, where the key in the map is the name of the folder in +`node_modules`, and the value is the child node. + +A node without a parent is a top of tree. + +A `Link` represents a symbolic link to a package on disk. This can be a +symbolic link to a package folder within the current tree, or elsewhere on +disk. The `link.target` is a reference to the actual node. Links differ +from Nodes in that dependencies are resolved from the _target_ location, +rather than from the link location. + +An `Edge` represents a dependency relationship. Each node has an `edgesIn` +set, and an `edgesOut` map. Each edge has a `type` which specifies what +kind of dependency it represents: `'prod'` for regular dependencies, +`'peer'` for peerDependencies, `'dev'` for devDependencies, and +`'optional'` for optionalDependencies. `edge.from` is a reference to the +node that has the dependency, and `edge.to` is a reference to the node that +requires the dependency. + +As nodes are moved around in the tree, the graph edges are automatically +updated to point at the new module resolution targets. In other words, +`edge.from`, `edge.name`, and `edge.spec` are immutable; `edge.to` is +updated automatically when a node's parent changes. + +### class Node + +All arborist trees are `Node` objects. A `Node` refers +to a package folder, which may have children in `node_modules`. + +* `node.name` The name of this node's folder in `node_modules`. +* `node.parent` Physical parent node in the tree. The package in whose + `node_modules` folder this package lives. Null if node is top of tree. + + Setting `node.parent` will automatically update `node.location` and all + graph edges affected by the move. + +* `node.meta` A `Shrinkwrap` object which looks up `resolved` and + `integrity` values for all modules in this tree. Only relevant on `root` + nodes. + +* `node.children` Map of packages located in the node's `node_modules` + folder. +* `node.package` The contents of this node's `package.json` file. +* `node.path` File path to this package. If the node is a link, then this + is the path to the link, not to the link target. If the node is _not_ a + link, then this matches `node.realpath`. +* `node.realpath` The full real filepath on disk where this node lives. +* `node.location` A slash-normalized relative path from the root node to + this node's path. +* `node.isLink` Whether this represents a symlink. Always `false` for Node + objects, always `true` for Link objects. +* `node.isRoot` True if this node is a root node. (Ie, if `node.root === + node`.) +* `node.root` The root node where we are working. If not assigned to some + other value, resolves to the node itself. (Ie, the root node's `root` + property refers to itself.) +* `node.isTop` True if this node is the top of its tree (ie, has no + `parent`, false otherwise). +* `node.top` The top node in this node's tree. This will be equal to + `node.root` for simple trees, but link targets will frequently be outside + of (or nested somewhere within) a `node_modules` hierarchy, and so will + have a different `top`. +* `node.dev`, `node.optional`, `node.devOptional`, `node.peer`, Indicators + as to whether this node is a dev, optional, and/or peer dependency. + These flags are relevant when pruning dependencies out of the tree or + deciding what to reify. See **Package Dependency Flags** below for + explanations. +* `node.edgesOut` Edges in the dependency graph indicating nodes that this + node depends on, which resolve its dependencies. +* `node.edgesIn` Edges in the dependency graph indicating nodes that depend + on this node. + +* `extraneous` True if this package is not required by any other for any + reason. False for top of tree. + +* `node.resolve(name)` Identify the node that will be returned when code + in this package runs `require(name)` + +* `node.errors` Array of errors encountered while parsing package.json or + version specifiers. + +### class Link + +Link objects represent a symbolic link within the `node_modules` folder. +They have most of the same properties and methods as `Node` objects, with a +few differences. + +* `link.target` A Node object representing the package that the link + references. If this is a Node already present within the tree, then it + will be the same object. If it's outside of the tree, then it will be + treated as the top of its own tree. +* `link.isLink` Always true. +* `link.children` This is always an empty map, since links don't have their + own children directly. + +### class Edge + +Edge objects represent a dependency relationship a package node to the +point in the tree where the dependency will be loaded. As nodes are moved +within the tree, Edges automatically update to point to the appropriate +location. + +* `new Edge({ from, type, name, spec })` Creates a new edge with the + specified fields. After instantiation, none of the fields can be + changed directly. +* `edge.from` The node that has the dependency. +* `edge.type` The type of dependency. One of `'prod'`, `'dev'`, `'peer'`, + or `'optional'`. +* `edge.name` The name of the dependency. Ie, the key in the + relevant `package.json` dependencies object. +* `edge.spec` The specifier that is required. This can be a version, + range, tag name, git url, or tarball URL. Any specifier allowed by npm + is supported. +* `edge.to` Automatically set to the node in the tree that matches the + `name` field. +* `edge.valid` True if `edge.to` satisfies the specifier. +* `edge.error` A string indicating the type of error if there is a problem, + or `null` if it's valid. Values, in order of precedence: + * `DETACHED` Indicates that the edge has been detached from its + `edge.from` node, typically because a new edge was created when a + dependency specifier was modified. + * `MISSING` Indicates that the dependency is unmet. Note that this is + _not_ set for unmet dependencies of the `optional` type. + * `PEER LOCAL` Indicates that a `peerDependency` is found in the + node's local `node_modules` folder, and the node is not the top of + the tree. This violates the `peerDependency` contract, because it + means that the dependency is not a peer. + * `INVALID` Indicates that the dependency does not satisfy `edge.spec`. +* `edge.reload()` Re-resolve to find the appropriate value for `edge.to`. + Called automatically from the `Node` class when the tree is mutated. + +### Package Dependency Flags + +The dependency type of a node can be determined efficiently by looking at +the `dev`, `optional`, and `devOptional` flags on the node object. These +are updated by arborist when necessary whenever the tree is modified in +such a way that the dependency graph can change, and are relevant when +pruning nodes from the tree. + +| extraneous | peer | dev | optional | devOptional | meaning | prune? | +|:----------:|:----:|:---:|:--------:|:----------------:|:-------------------------------------------------------------------------------------------------|:-------------------------------------------------------| +| | | | | | production dep | never | +| X | N/A | N/A | N/A | N/A | nothing depends on this, it is trash | always | +| | | X | | X
not in lock | devDependency, or only depended
on by devDependencies | if pruning dev | +| | | | X | X
not in lock | optionalDependency, or only depended
on by optionalDeps | if pruning optional | +| | | X | X | X
not in lock | Optional dependency of dep(s) in the
dev hierarchy | if pruning EITHER
dev OR optional | +| | | | | X
in lock | BOTH a non-optional dep within the
dev hierarchy, AND a dep within
the optional hierarchy | if pruning BOTH
dev AND optional | +| | X | | | | peer dependency, or only depended
on by peer dependencies | if pruning peers | +| | X | X | | X
not in lock | peer dependency of dev node hierarchy | if pruning peer OR
dev deps | +| | X | | X | X
not in lock | peer dependency of optional nodes, or
peerOptional dep | if pruning peer OR
optional deps | +| | X | X | X | X
not in lock | peer optional deps of the dev dep hierarchy | if pruning peer OR
optional OR dev | +| | X | | | X
in lock | BOTH a non-optional peer dep within the
dev hierarchy, AND a peer optional dep | if pruning peer deps OR:
BOTH optional AND dev deps | + +* If none of these flags are set, then the node is required by the + dependency and/or peerDependency hierarchy. It should not be pruned. +* If _both_ `node.dev` and `node.optional` are set, then the node is an + optional dependency of one of the packages in the devDependency + hierarchy. It should be pruned if _either_ dev or optional deps are + being removed. +* If `node.dev` is set, but `node.optional` is not, then the node is + required in the devDependency hierarchy. It should be pruned if dev + dependencies are being removed. +* If `node.optional` is set, but `node.dev` is not, then the node is + required in the optionalDependency hierarchy. It should be pruned if + optional dependencies are being removed. +* If `node.devOptional` is set, then the node is a (non-optional) + dependency within the devDependency hierarchy, _and_ a dependency + within the `optionalDependency` hierarchy. It should be pruned if + _both_ dev and optional dependencies are being removed. +* If `node.peer` is set, then all the same semantics apply as above, except + that the dep is brought in by a peer dep at some point, rather than a + normal non-peer dependency. + +Note: `devOptional` is only set in the shrinkwrap/package-lock file if +_neither_ `dev` nor `optional` are set, as it would be redundant. + +## BIN + +Arborist ships with a cli that can be used to run arborist specific commands outside of the context of the npm CLI. This script is currently not part of the public API and is subject to breaking changes outside of major version bumps. + +To see the usage run: + +``` +npx @npmcli/arborist --help +``` diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/actual.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/actual.js new file mode 100644 index 0000000000000000000000000000000000000000..866b2cd82fa4734cbc2d458c227caa14668170c5 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/actual.js @@ -0,0 +1,19 @@ +const Arborist = require('../') + +const printTree = require('./lib/print-tree.js') + +module.exports = (options, time) => new Arborist(options) + .loadActual(options) + .then(time) + .then(async ({ timing, result: tree }) => { + printTree(tree) + if (options.save) { + await tree.meta.save() + } + if (options.saveHidden) { + tree.meta.hiddenLockfile = true + tree.meta.filename = options.path + '/node_modules/.package-lock.json' + await tree.meta.save() + } + return `read ${tree.inventory.size} deps in ${timing.ms}` + }) diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/audit.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/audit.js new file mode 100644 index 0000000000000000000000000000000000000000..0e32833d4aa3a1b364641c39fbc458438eef84ea --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/audit.js @@ -0,0 +1,51 @@ +const Arborist = require('../') + +const printTree = require('./lib/print-tree.js') +const log = require('./lib/logging.js') + +const Vuln = require('../lib/vuln.js') +const printReport = report => { + for (const vuln of report.values()) { + log.info(printVuln(vuln)) + } + if (report.topVulns.size) { + log.info('\n# top-level vulnerabilities') + for (const vuln of report.topVulns.values()) { + log.info(printVuln(vuln)) + } + } +} + +const printVuln = vuln => { + return { + __proto__: { constructor: Vuln }, + name: vuln.name, + issues: [...vuln.advisories].map(a => printAdvisory(a)), + range: vuln.simpleRange, + nodes: [...vuln.nodes].map(node => `${node.name} ${node.location || '#ROOT'}`), + ...(vuln.topNodes.size === 0 ? {} : { + topNodes: [...vuln.topNodes].map(node => `${node.location || '#ROOT'}`), + }), + } +} + +const printAdvisory = a => `${a.title}${a.url ? ' ' + a.url : ''}` + +module.exports = (options, time) => { + const arb = new Arborist(options) + return arb + .audit(options) + .then(time) + .then(async ({ timing, result: tree }) => { + if (options.fix) { + printTree(tree) + } + printReport(arb.auditReport) + if (tree.meta && options.save) { + await tree.meta.save() + } + return options.fix + ? `resolved ${tree.inventory.size} deps in ${timing.seconds}` + : `done in ${timing.seconds}` + }) +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/funding.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/funding.js new file mode 100644 index 0000000000000000000000000000000000000000..cf25976d94ca69a2d7c9d7042cc8d44b9b18cc39 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/funding.js @@ -0,0 +1,38 @@ +const Arborist = require('../') + +const log = require('./lib/logging.js') + +module.exports = (options, time) => { + const query = options._.shift() + const a = new Arborist(options) + return a + .loadVirtual() + .then(tree => { + // only load the actual tree if the virtual one doesn't have modern metadata + if (!tree.meta || !(tree.meta.originalLockfileVersion >= 2)) { + log.error('old metadata, load actual') + throw 'load actual' + } else { + log.error('meta ok, return virtual tree') + return tree + } + }) + .catch(() => a.loadActual()) + .then(time) + .then(({ timing, result: tree }) => { + if (!query) { + for (const node of tree.inventory.values()) { + if (node.package.funding) { + log.info(node.name, node.location, node.package.funding) + } + } + } else { + for (const node of tree.inventory.query('name', query)) { + if (node.package.funding) { + log.info(node.name, node.location, node.package.funding) + } + } + } + return `read ${tree.inventory.size} deps in ${timing.ms}` + }) +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/ideal.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/ideal.js new file mode 100644 index 0000000000000000000000000000000000000000..1dd206e81ff1bdcaf1f83ef481bec923eb1f1bd9 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/ideal.js @@ -0,0 +1,14 @@ +const Arborist = require('../') + +const printTree = require('./lib/print-tree.js') + +module.exports = (options, time) => new Arborist(options) + .buildIdealTree(options) + .then(time) + .then(async ({ timing, result: tree }) => { + printTree(tree) + if (tree.meta && options.save) { + await tree.meta.save() + } + return `resolved ${tree.inventory.size} deps in ${timing.seconds}` + }) diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/index.js new file mode 100644 index 0000000000000000000000000000000000000000..0b559687ac06affcf2ec86dee563946315a5c451 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/index.js @@ -0,0 +1,112 @@ +#!/usr/bin/env node + +const fs = require('node:fs') +const path = require('node:path') +const { time } = require('proc-log') + +const { bin, arb: options } = require('./lib/options') +const version = require('../package.json').version + +const usage = (message = '') => `Arborist - the npm tree doctor + +Version: ${version} +${message && '\n' + message + '\n'} +# USAGE + arborist [path] [options...] + +# COMMANDS + + * reify: reify ideal tree to node_modules (install, update, rm, ...) + * prune: prune the ideal tree and reify (like npm prune) + * ideal: generate and print the ideal tree + * actual: read and print the actual tree in node_modules + * virtual: read and print the virtual tree in the local shrinkwrap file + * shrinkwrap: load a local shrinkwrap and print its data + * audit: perform a security audit on project dependencies + * funding: query funding information in the local package tree. A second + positional argument after the path name can limit to a package name. + * license: query license information in the local package tree. A second + positional argument after the path name can limit to a license type. + * help: print this text + * version: print the version + +# OPTIONS + + Most npm options are supported, but in camelCase rather than css-case. For + example, instead of '--dry-run', use '--dryRun'. + + Additionally: + + * --loglevel=warn|--quiet will suppress the printing of package trees + * --logfile will output logs to a file + * --timing will show timing information + * Instead of 'npm install ', use 'arborist reify --add='. + The '--add=' option can be specified multiple times. + * Instead of 'npm rm ', use 'arborist reify --rm='. + The '--rm=' option can be specified multiple times. + * Instead of 'npm update', use 'arborist reify --update-all'. + * 'npm audit fix' is 'arborist audit --fix' +` + +const commands = { + version: () => console.log(version), + help: () => console.log(usage()), + exit: () => { + process.exitCode = 1 + console.error( + usage(`Error: command '${bin.command}' does not exist.`) + ) + }, +} + +const commandFiles = fs.readdirSync(__dirname).filter((f) => path.extname(f) === '.js' && f !== __filename) + +for (const file of commandFiles) { + const command = require(`./${file}`) + const name = path.basename(file, '.js') + const totalTime = `bin:${name}:init` + const scriptTime = `bin:${name}:script` + + commands[name] = () => { + const timers = require('./lib/timers') + const log = require('./lib/logging') + + log.info(name, options) + + const timeEnd = time.start(totalTime) + const scriptEnd = time.start(scriptTime) + + return command(options, (result) => { + scriptEnd() + return { + result, + timing: { + seconds: `${timers.get(scriptTime) / 1e9}s`, + ms: `${timers.get(scriptTime) / 1e6}ms`, + }, + } + }) + .then((result) => { + log.info(result) + return result + }) + .catch((err) => { + process.exitCode = 1 + log.error(err) + return err + }) + .then((r) => { + timeEnd() + if (bin.loglevel !== 'silent') { + console[process.exitCode ? 'error' : 'log'](r) + } + return r + }) + } +} + +if (commands[bin.command]) { + commands[bin.command]() +} else { + commands.exit() +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/lib/logging.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/lib/logging.js new file mode 100644 index 0000000000000000000000000000000000000000..431eea3ec404aa4bf59b4453c30993ef79303003 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/lib/logging.js @@ -0,0 +1,77 @@ +const { log } = require('proc-log') +const fs = require('node:fs') +const { dirname } = require('node:path') +const os = require('node:os') +const { inspect, format } = require('node:util') + +const { bin: options } = require('./options.js') + +// add a meta method to proc-log for passing optional +// metadata through to log handlers +const META = Symbol('meta') +const parseArgs = (...args) => { + const { [META]: isMeta } = args[args.length - 1] || {} + return isMeta + ? [args[args.length - 1], ...args.slice(0, args.length - 1)] + : [{}, ...args] +} +log.meta = (meta = {}) => ({ [META]: true, ...meta }) + +const levels = new Map([ + 'silly', + 'verbose', + 'info', + 'http', + 'notice', + 'warn', + 'error', + 'silent', +].map((level, index) => [level, index])) + +const addLogListener = (write, { eol = os.EOL, loglevel = 'silly', colors = false } = {}) => { + const levelIndex = levels.get(loglevel) + + const magenta = m => colors ? `\x1B[35m${m}\x1B[39m` : m + const dim = m => colors ? `\x1B[2m${m}\x1B[22m` : m + const red = m => colors ? `\x1B[31m${m}\x1B[39m` : m + + const formatter = (level, ...args) => { + const depth = level === 'error' && args[0] && args[0].code === 'ERESOLVE' ? Infinity : 10 + + if (level === 'info' && args[0] === 'timeEnd') { + args[1] = dim(args[1]) + } else if (level === 'error' && args[0] === 'timeError') { + args[1] = red(args[1]) + } + + const messages = args.map(a => typeof a === 'string' ? a : inspect(a, { depth, colors })) + const pref = `${process.pid} ${magenta(level)} ` + + return pref + format(...messages).trim().split('\n').join(`${eol}${pref}`) + eol + } + + process.on('log', (...args) => { + const [meta, level, ...logArgs] = parseArgs(...args) + + if (levelIndex <= levels.get(level) || meta.force) { + write(formatter(level, ...logArgs)) + } + }) +} + +if (options.loglevel !== 'silent') { + addLogListener((v) => process.stderr.write(v), { + eol: '\n', + colors: options.colors, + loglevel: options.loglevel, + }) +} + +if (options.logfile) { + log.silly('logfile', options.logfile) + fs.mkdirSync(dirname(options.logfile), { recursive: true }) + const fd = fs.openSync(options.logfile, 'a') + addLogListener((str) => fs.writeSync(fd, str)) +} + +module.exports = log diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/lib/options.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/lib/options.js new file mode 100644 index 0000000000000000000000000000000000000000..419c81850071bed989315a4ec969729850a23ce9 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/lib/options.js @@ -0,0 +1,123 @@ +const nopt = require('nopt') +const path = require('node:path') + +const has = (o, k) => Object.prototype.hasOwnProperty.call(o, k) + +const cleanPath = (val) => { + const k = Symbol('key') + const data = {} + nopt.typeDefs.path.validate(data, k, val) + return data[k] +} + +const parse = (...noptArgs) => { + const binOnlyOpts = { + command: String, + loglevel: String, + colors: Boolean, + timing: ['always', Boolean], + logfile: String, + } + + const arbOpts = { + add: Array, + rm: Array, + omit: Array, + update: Array, + workspaces: Array, + global: Boolean, + force: Boolean, + 'global-style': Boolean, + 'prefer-dedupe': Boolean, + 'legacy-peer-deps': Boolean, + 'update-all': Boolean, + before: Date, + path: path, + cache: path, + ...binOnlyOpts, + } + + const short = { + quiet: ['--loglevel', 'warn'], + logs: ['--logfile', 'true'], + w: '--workspaces', + g: '--global', + f: '--force', + } + + const defaults = { + // key order is important for command and path + // since they shift positional args + // command is 1st, path is 2nd + command: (o) => o.argv.remain.shift(), + path: (o) => cleanPath(o.argv.remain.shift() || '.'), + colors: has(process.env, 'NO_COLOR') ? false : !!process.stderr.isTTY, + loglevel: 'silly', + timing: (o) => o.loglevel === 'silly', + cache: `${process.env.HOME}/.npm/_cacache`, + } + + const derived = [ + // making update either `all` or an array of names but not both + ({ updateAll: all, update: names, ...o }) => { + if (all || names) { + o.update = all != null ? { all } : { names } + } + return o + }, + ({ logfile, ...o }) => { + // logfile is parsed as a string so if its true or set but empty + // then set the default logfile + if (logfile === 'true' || logfile === '') { + logfile = `arb-log-${new Date().toISOString().replace(/[.:]/g, '_')}.log` + } + // then parse it the same as nopt parses other paths + if (logfile) { + o.logfile = cleanPath(logfile) + } + return o + }, + ] + + const transforms = [ + // Camelcase all top level keys + (o) => { + const entries = Object.entries(o).map(([k, v]) => [ + k.replace(/-./g, s => s[1].toUpperCase()), + v, + ]) + return Object.fromEntries(entries) + }, + // Set defaults on unset keys + (o) => { + for (const [k, v] of Object.entries(defaults)) { + if (!has(o, k)) { + o[k] = typeof v === 'function' ? v(o) : v + } + } + return o + }, + // Set/unset derived values + ...derived.map((derive) => (o) => derive(o) || o), + // Separate bin and arborist options + ({ argv: { remain: _ }, ...o }) => { + const bin = { _ } + for (const k of Object.keys(binOnlyOpts)) { + if (has(o, k)) { + bin[k] = o[k] + delete o[k] + } + } + return { bin, arb: o } + }, + ] + + let options = nopt(arbOpts, short, ...noptArgs) + for (const t of transforms) { + options = t(options) + } + + return options +} + +module.exports = parse() diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/lib/print-tree.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/lib/print-tree.js new file mode 100644 index 0000000000000000000000000000000000000000..a110e6bcb8f030df44af8111717a5a977bada6ae --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/lib/print-tree.js @@ -0,0 +1,4 @@ +const { inspect } = require('node:util') +const log = require('./logging.js') + +module.exports = tree => log.info(inspect(tree.toJSON(), { depth: Infinity })) diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/lib/timers.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/lib/timers.js new file mode 100644 index 0000000000000000000000000000000000000000..a7ec534f5c5a79718e25593c7fe4c7caa103c436 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/lib/timers.js @@ -0,0 +1,33 @@ +const { bin: options } = require('./options.js') +const log = require('./logging.js') + +const timers = new Map() +const finished = new Map() + +process.on('time', (level, name) => { + if (level === 'start') { + if (timers.has(name)) { + throw new Error('conflicting timer! ' + name) + } + timers.set(name, process.hrtime.bigint()) + } else if (level === 'end') { + if (!timers.has(name)) { + throw new Error('timer not started! ' + name) + } + const elapsed = Number(process.hrtime.bigint() - timers.get(name)) + timers.delete(name) + finished.set(name, elapsed) + if (options.timing) { + log.info('timeEnd', `${name} ${elapsed / 1e9}s`, log.meta({ force: options.timing === 'always' })) + } + } +}) + +process.on('exit', () => { + for (const name of timers.keys()) { + log.error('timeError', 'Dangling timer:', name) + process.exitCode = 1 + } +}) + +module.exports = finished diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/license.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/license.js new file mode 100644 index 0000000000000000000000000000000000000000..77d5796793a46189f990306a22fbde065735e9b8 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/license.js @@ -0,0 +1,48 @@ +const localeCompare = require('@isaacs/string-locale-compare')('en') +const Arborist = require('../') +const log = require('./lib/logging.js') + +module.exports = (options, time) => { + const query = options._.shift() + const a = new Arborist(options) + return a + .loadVirtual() + .then(tree => { + // only load the actual tree if the virtual one doesn't have modern metadata + if (!tree.meta || !(tree.meta.originalLockfileVersion >= 2)) { + throw 'load actual' + } else { + return tree + } + }).catch((er) => { + log.error('loading actual tree', er) + return a.loadActual() + }) + .then(time) + .then(({ result: tree }) => { + const output = [] + if (!query) { + const set = [] + for (const license of tree.inventory.query('license')) { + set.push([tree.inventory.query('license', license).size, license]) + } + + for (const [count, license] of set.sort((a, b) => + a[1] && b[1] ? b[0] - a[0] || localeCompare(a[1], b[1]) + : a[1] ? -1 + : b[1] ? 1 + : 0)) { + output.push(`${count} ${license}`) + log.info(count, license) + } + } else { + for (const node of tree.inventory.query('license', query === 'undefined' ? undefined : query)) { + const msg = `${node.name} ${node.location} ${node.package.description || ''}` + output.push(msg) + log.info(msg) + } + } + + return output.join('\n') + }) +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/prune.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/prune.js new file mode 100644 index 0000000000000000000000000000000000000000..3c52bc13af1e1b4f14bb1e2007a10f4f972300f1 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/prune.js @@ -0,0 +1,48 @@ +const Arborist = require('../') + +const printTree = require('./lib/print-tree.js') +const log = require('./lib/logging.js') + +const printDiff = diff => { + const { depth } = require('treeverse') + depth({ + tree: diff, + visit: d => { + if (d.location === '') { + return + } + switch (d.action) { + case 'REMOVE': + log.info('REMOVE', d.actual.location) + break + case 'ADD': + log.info('ADD', d.ideal.location, d.ideal.resolved) + break + case 'CHANGE': + log.info('CHANGE', d.actual.location, { + from: d.actual.resolved, + to: d.ideal.resolved, + }) + break + } + }, + getChildren: d => d.children, + }) +} + +module.exports = (options, time) => { + const arb = new Arborist(options) + return arb + .prune(options) + .then(time) + .then(async ({ timing, result: tree }) => { + printTree(tree) + if (options.dryRun) { + printDiff(arb.diff) + } + if (tree.meta && options.save) { + await tree.meta.save() + } + return `resolved ${tree.inventory.size} deps in ${timing.seconds}` + }) +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/reify.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/reify.js new file mode 100644 index 0000000000000000000000000000000000000000..3f3aafe8ab9bb63ffb4de675060103179b9c8dc6 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/reify.js @@ -0,0 +1,48 @@ +const Arborist = require('../') + +const printTree = require('./lib/print-tree.js') +const log = require('./lib/logging.js') + +const printDiff = diff => { + const { depth } = require('treeverse') + depth({ + tree: diff, + visit: d => { + if (d.location === '') { + return + } + switch (d.action) { + case 'REMOVE': + log.info('REMOVE', d.actual.location) + break + case 'ADD': + log.info('ADD', d.ideal.location, d.ideal.resolved) + break + case 'CHANGE': + log.info('CHANGE', d.actual.location, { + from: d.actual.resolved, + to: d.ideal.resolved, + }) + break + } + }, + getChildren: d => d.children, + }) +} + +module.exports = (options, time) => { + const arb = new Arborist(options) + return arb + .reify(options) + .then(time) + .then(async ({ timing, result: tree }) => { + printTree(tree) + if (options.dryRun) { + printDiff(arb.diff) + } + if (tree.meta && options.save) { + await tree.meta.save() + } + return `resolved ${tree.inventory.size} deps in ${timing.seconds}` + }) +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/shrinkwrap.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/shrinkwrap.js new file mode 100644 index 0000000000000000000000000000000000000000..56603224e99882229cd8364e6d301e1386234af9 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/shrinkwrap.js @@ -0,0 +1,7 @@ +const Shrinkwrap = require('../lib/shrinkwrap.js') + +module.exports = (options, time) => Shrinkwrap + .load(options) + .then((s) => s.commit()) + .then(time) + .then(({ result: s }) => JSON.stringify(s, 0, 2)) diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/virtual.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/virtual.js new file mode 100644 index 0000000000000000000000000000000000000000..95b1de282e6036207abc8b854dd1e50e876c9973 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/bin/virtual.js @@ -0,0 +1,14 @@ +const Arborist = require('../') + +const printTree = require('./lib/print-tree.js') + +module.exports = (options, time) => new Arborist(options) + .loadVirtual() + .then(time) + .then(async ({ timing, result: tree }) => { + printTree(tree) + if (options.save) { + await tree.meta.save() + } + return `read ${tree.inventory.size} deps in ${timing.ms}` + }) diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/add-rm-pkg-deps.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/add-rm-pkg-deps.js new file mode 100644 index 0000000000000000000000000000000000000000..2e30eb1de762645d367b3744ede062f2357ffa7a --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/add-rm-pkg-deps.js @@ -0,0 +1,143 @@ +// add and remove dependency specs to/from pkg manifest + +const { log } = require('proc-log') +const localeCompare = require('@isaacs/string-locale-compare')('en') + +const add = ({ pkg, add, saveBundle, saveType }) => { + for (const { name, rawSpec } of add) { + let addSaveType = saveType + // if the user does not give us a type, we infer which type(s) + // to keep based on the same order of priority we do when + // building the tree as defined in the _loadDeps method of + // the node class. + if (!addSaveType) { + addSaveType = inferSaveType(pkg, name) + } + + if (addSaveType === 'prod') { + // a production dependency can only exist as production (rpj ensures it + // doesn't coexist w/ optional) + deleteSubKey(pkg, 'devDependencies', name, 'dependencies') + deleteSubKey(pkg, 'peerDependencies', name, 'dependencies') + } else if (addSaveType === 'dev') { + // a dev dependency may co-exist as peer, or optional, but not production + deleteSubKey(pkg, 'dependencies', name, 'devDependencies') + } else if (addSaveType === 'optional') { + // an optional dependency may co-exist as dev (rpj ensures it doesn't + // coexist w/ prod) + deleteSubKey(pkg, 'peerDependencies', name, 'optionalDependencies') + } else { // peer or peerOptional is all that's left + // a peer dependency may coexist as dev + deleteSubKey(pkg, 'dependencies', name, 'peerDependencies') + deleteSubKey(pkg, 'optionalDependencies', name, 'peerDependencies') + } + + const depType = saveTypeMap.get(addSaveType) + + pkg[depType] = pkg[depType] || {} + if (rawSpec !== '*' || pkg[depType][name] === undefined) { + pkg[depType][name] = rawSpec + } + if (addSaveType === 'optional') { + // Affordance for previous npm versions that require this behaviour + pkg.dependencies = pkg.dependencies || {} + pkg.dependencies[name] = pkg.optionalDependencies[name] + } + + if (addSaveType === 'peer' || addSaveType === 'peerOptional') { + const pdm = pkg.peerDependenciesMeta || {} + if (addSaveType === 'peer' && pdm[name] && pdm[name].optional) { + pdm[name].optional = false + } else if (addSaveType === 'peerOptional') { + pdm[name] = pdm[name] || {} + pdm[name].optional = true + pkg.peerDependenciesMeta = pdm + } + // peerDeps are often also a devDep, so that they can be tested when + // using package managers that don't auto-install peer deps + if (pkg.devDependencies && pkg.devDependencies[name] !== undefined) { + pkg.devDependencies[name] = pkg.peerDependencies[name] + } + } + + if (saveBundle && addSaveType !== 'peer' && addSaveType !== 'peerOptional') { + // keep it sorted, keep it unique + const bd = new Set(pkg.bundleDependencies || []) + bd.add(name) + pkg.bundleDependencies = [...bd].sort(localeCompare) + } + } + + return pkg +} + +// Canonical source of both the map between saveType and where it correlates to +// in the package, and the names of all our dependencies attributes +const saveTypeMap = new Map([ + ['dev', 'devDependencies'], + ['optional', 'optionalDependencies'], + ['prod', 'dependencies'], + ['peerOptional', 'peerDependencies'], + ['peer', 'peerDependencies'], +]) + +// Finds where the package is already in the spec and infers saveType from that +const inferSaveType = (pkg, name) => { + for (const saveType of saveTypeMap.keys()) { + if (hasSubKey(pkg, saveTypeMap.get(saveType), name)) { + if ( + saveType === 'peerOptional' && + (!hasSubKey(pkg, 'peerDependenciesMeta', name) || + !pkg.peerDependenciesMeta[name].optional) + ) { + return 'peer' + } + return saveType + } + } + return 'prod' +} + +const hasSubKey = (pkg, depType, name) => { + return pkg[depType] && Object.prototype.hasOwnProperty.call(pkg[depType], name) +} + +// Removes a subkey and warns about it if it's being replaced +const deleteSubKey = (pkg, depType, name, replacedBy) => { + if (hasSubKey(pkg, depType, name)) { + if (replacedBy) { + log.warn('idealTree', `Removing ${depType}.${name} in favor of ${replacedBy}.${name}`) + } + delete pkg[depType][name] + + // clean up peerDepsMeta if we are removing something from peerDependencies + if (depType === 'peerDependencies' && pkg.peerDependenciesMeta) { + delete pkg.peerDependenciesMeta[name] + if (!Object.keys(pkg.peerDependenciesMeta).length) { + delete pkg.peerDependenciesMeta + } + } + + if (!Object.keys(pkg[depType]).length) { + delete pkg[depType] + } + } +} + +const rm = (pkg, rm) => { + for (const depType of new Set(saveTypeMap.values())) { + for (const name of rm) { + deleteSubKey(pkg, depType, name) + } + } + if (pkg.bundleDependencies) { + pkg.bundleDependencies = pkg.bundleDependencies + .filter(name => !rm.includes(name)) + if (!pkg.bundleDependencies.length) { + delete pkg.bundleDependencies + } + } + return pkg +} + +module.exports = { add, rm, saveTypeMap, hasSubKey } diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js new file mode 100644 index 0000000000000000000000000000000000000000..3a066d9b6d336fdbe6ef279ca4d4712148c367af --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js @@ -0,0 +1,1607 @@ +// mixin implementing the buildIdealTree method +const localeCompare = require('@isaacs/string-locale-compare')('en') +const PackageJson = require('@npmcli/package-json') +const npa = require('npm-package-arg') +const pacote = require('pacote') +const cacache = require('cacache') +const { callLimit: promiseCallLimit } = require('promise-call-limit') +const realpath = require('../../lib/realpath.js') +const { resolve, dirname, sep } = require('node:path') +const treeCheck = require('../tree-check.js') +const { readdirScoped } = require('@npmcli/fs') +const { lstat, readlink } = require('node:fs/promises') +const { depth } = require('treeverse') +const { log, time } = require('proc-log') +const { redact } = require('@npmcli/redact') +const semver = require('semver') + +const { + OK, + REPLACE, + CONFLICT, +} = require('../can-place-dep.js') +const PlaceDep = require('../place-dep.js') + +const debug = require('../debug.js') +const fromPath = require('../from-path.js') +const calcDepFlags = require('../calc-dep-flags.js') +const Shrinkwrap = require('../shrinkwrap.js') +const { defaultLockfileVersion } = Shrinkwrap +const Node = require('../node.js') +const Link = require('../link.js') +const addRmPkgDeps = require('../add-rm-pkg-deps.js') +const optionalSet = require('../optional-set.js') +const { checkEngine, checkPlatform } = require('npm-install-checks') +const relpath = require('../relpath.js') +const resetDepFlags = require('../reset-dep-flags.js') + +// note: some of these symbols are shared so we can hit +// them with unit tests and reuse them across mixins +const _updateAll = Symbol.for('updateAll') +const _flagsSuspect = Symbol.for('flagsSuspect') +const _setWorkspaces = Symbol.for('setWorkspaces') +const _updateNames = Symbol.for('updateNames') +const _resolvedAdd = Symbol.for('resolvedAdd') +const _usePackageLock = Symbol.for('usePackageLock') +const _rpcache = Symbol.for('realpathCache') +const _stcache = Symbol.for('statCache') + +// used by Reify mixin +const _addNodeToTrashList = Symbol.for('addNodeToTrashList') + +// Push items in, pop them sorted by depth and then path +// Sorts physically shallower deps up to the front of the queue, because +// they'll affect things deeper in, then alphabetical for consistency between +// installs +class DepsQueue { + #deps = [] + #sorted = true + + get length () { + return this.#deps.length + } + + push (item) { + if (!this.#deps.includes(item)) { + this.#sorted = false + this.#deps.push(item) + } + } + + pop () { + if (!this.#sorted) { + this.#deps.sort((a, b) => (a.depth - b.depth) || localeCompare(a.path, b.path)) + this.#sorted = true + } + return this.#deps.shift() + } +} + +module.exports = cls => class IdealTreeBuilder extends cls { + #complete + #currentDep = null + #depsQueue = new DepsQueue() + #depsSeen = new Set() + #explicitRequests = new Set() + #follow + #installStrategy + #linkNodes = new Set() + #loadFailures = new Set() + #manifests = new Map() + #mutateTree = false + // a map of each module in a peer set to the thing that depended on + // that set of peers in the first place. Use a WeakMap so that we + // don't hold onto references for nodes that are garbage collected. + #peerSetSource = new WeakMap() + #preferDedupe = false + #prune + #strictPeerDeps + #virtualRoots = new Map() + + constructor (options) { + super(options) + + // normalize trailing slash + const registry = options.registry || 'https://registry.npmjs.org' + options.registry = this.registry = registry.replace(/\/+$/, '') + '/' + + const { + follow = false, + installStrategy = 'hoisted', + idealTree = null, + installLinks = false, + legacyPeerDeps = false, + packageLock = true, + strictPeerDeps = false, + workspaces, + global, + } = options + + this.#strictPeerDeps = !!strictPeerDeps + + this.idealTree = idealTree + this.installLinks = installLinks + this.legacyPeerDeps = legacyPeerDeps + + this[_usePackageLock] = packageLock + this.#installStrategy = global ? 'shallow' : installStrategy + this.#follow = !!follow + + if (workspaces?.length && global) { + throw new Error('Cannot operate on workspaces in global mode') + } + + this[_updateAll] = false + this[_updateNames] = [] + this[_resolvedAdd] = [] + } + + get explicitRequests () { + return new Set(this.#explicitRequests) + } + + // public method + async buildIdealTree (options = {}) { + if (this.idealTree) { + return this.idealTree + } + + // allow the user to set reify options on the ctor as well. + // XXX: deprecate separate reify() options object. + options = { ...this.options, ...options } + + // an empty array or any falsey value is the same as null + if (!options.add || options.add.length === 0) { + options.add = null + } + if (!options.rm || options.rm.length === 0) { + options.rm = null + } + + const timeEnd = time.start('idealTree') + + if (!options.add && !options.rm && !options.update && this.options.global) { + throw new Error('global requires add, rm, or update option') + } + + // first get the virtual tree, if possible. If there's a lockfile, then + // that defines the ideal tree, unless the root package.json is not + // satisfied by what the ideal tree provides. + // from there, we start adding nodes to it to satisfy the deps requested + // by the package.json in the root. + + this.#parseSettings(options) + + // start tracker block + this.addTracker('idealTree') + + try { + await this.#initTree() + await this.#inflateAncientLockfile() + await this.#applyUserRequests(options) + await this.#buildDeps() + await this.#fixDepFlags() + await this.#pruneFailedOptional() + await this.#checkEngineAndPlatform() + } finally { + timeEnd() + this.finishTracker('idealTree') + } + + return treeCheck(this.idealTree) + } + + async #checkEngineAndPlatform () { + const { engineStrict, npmVersion, nodeVersion, omit = [], cpu, os, libc } = this.options + const omitSet = new Set(omit) + + for (const node of this.idealTree.inventory.values()) { + if (!node.optional && !node.shouldOmit(omitSet)) { + try { + // if devEngines is present in the root node we ignore the engines check + if (!(node.isRoot && node.package.devEngines)) { + checkEngine(node.package, npmVersion, nodeVersion, this.options.force) + } + } catch (err) { + if (engineStrict) { + throw err + } + log.warn(err.code, err.message, { + package: err.pkgid, + required: err.required, + current: err.current, + }) + } + checkPlatform(node.package, this.options.force) + } + if (node.optional && !node.inert) { + // Mark any optional packages we can't install as inert. + // We ignore the --force and --engine-strict flags. + try { + checkEngine(node.package, npmVersion, nodeVersion, false) + checkPlatform(node.package, false, { cpu, os, libc }) + } catch (error) { + const set = optionalSet(node) + for (const node of set) { + node.inert = true + } + } + } + } + } + + #parseSettings (options) { + const update = options.update === true ? { all: true } + : Array.isArray(options.update) ? { names: options.update } + : options.update || {} + + if (update.all || !Array.isArray(update.names)) { + update.names = [] + } + + this.#complete = !!options.complete + this.#preferDedupe = !!options.preferDedupe + + // validates list of update names, they must + // be dep names only, no semver ranges are supported + for (const name of update.names) { + const spec = npa(name) + const validationError = + new TypeError(`Update arguments must only contain package names, eg: + npm update ${spec.name}`) + validationError.code = 'EUPDATEARGS' + + // If they gave us anything other than a bare package name + if (spec.raw !== spec.name) { + throw validationError + } + } + this[_updateNames] = update.names + + this[_updateAll] = update.all + // we prune by default unless explicitly set to boolean false + this.#prune = options.prune !== false + + // set if we add anything, but also set here if we know we'll make + // changes and thus have to maybe prune later. + this.#mutateTree = !!( + options.add || + options.rm || + update.all || + update.names.length + ) + } + + // load the initial tree, either the virtualTree from a shrinkwrap, + // or just the root node from a package.json + async #initTree () { + const timeEnd = time.start('idealTree:init') + let root + if (this.options.global) { + root = await this.#globalRootNode() + } else { + try { + const { content: pkg } = await PackageJson.normalize(this.path) + root = await this.#rootNodeFromPackage(pkg) + } catch (err) { + if (err.code === 'EJSONPARSE') { + throw err + } + root = await this.#rootNodeFromPackage({}) + } + } + return this[_setWorkspaces](root) + // ok to not have a virtual tree. probably initial install. + // When updating all, we load the shrinkwrap, but don't bother + // to build out the full virtual tree from it, since we'll be + // reconstructing it anyway. + .then(root => { + if (this.options.global) { + return root + } else if (!this[_usePackageLock] || this[_updateAll]) { + return Shrinkwrap.reset({ + path: this.path, + lockfileVersion: this.options.lockfileVersion, + resolveOptions: this.options, + }).then(meta => Object.assign(root, { meta })) + } else { + return this.loadVirtual({ root }) + .then(tree => { + this.#applyRootOverridesToWorkspaces(tree) + return tree + }) + } + }) + + // if we don't have a lockfile to go from, then start with the + // actual tree, so we only make the minimum required changes. + // don't do this for global installs or updates, because in those + // cases we don't use a lockfile anyway. + // Load on a new Arborist object, so the Nodes aren't the same, + // or else it'll get super confusing when we change them! + .then(async root => { + if ((!this[_updateAll] && !this.options.global && !root.meta.loadedFromDisk) || (this.options.global && this[_updateNames].length)) { + await new this.constructor(this.options).loadActual({ root }) + const tree = root.target + // even though we didn't load it from a package-lock.json FILE, + // we still loaded it "from disk", meaning we have to reset + // dep flags before assuming that any mutations were reflected. + if (tree.children.size) { + root.meta.loadedFromDisk = true + // set these so that we don't try to ancient lockfile reload it + root.meta.originalLockfileVersion = root.meta.lockfileVersion = this.options.lockfileVersion || defaultLockfileVersion + } + } + root.meta.inferFormattingOptions(root.package) + return root + }) + + .then(tree => { + // search the virtual tree for missing/invalid edges, if any are found add their source to + // the depsQueue so that we'll fix it later + depth({ + tree, + getChildren: (node) => { + const children = [] + for (const edge of node.edgesOut.values()) { + children.push(edge.to) + } + return children + }, + filter: node => node, + visit: node => { + for (const edge of node.edgesOut.values()) { + if (!edge.to || !edge.valid) { + this.#depsQueue.push(node) + break // no need to continue the loop after the first hit + } + } + }, + }) + // null the virtual tree, because we're about to hack away at it + // if you want another one, load another copy. + this.idealTree = tree + this.virtualTree = null + timeEnd() + return tree + }) + } + + async #globalRootNode () { + const root = await this.#rootNodeFromPackage({ dependencies: {} }) + // this is a gross kludge to handle the fact that we don't save + // metadata on the root node in global installs, because the "root" + // node is something like /usr/local/lib. + const meta = new Shrinkwrap({ + path: this.path, + lockfileVersion: this.options.lockfileVersion, + resolveOptions: this.options, + }) + meta.reset() + root.meta = meta + return root + } + + async #rootNodeFromPackage (pkg) { + // if the path doesn't exist, then we explode at this point. Note that + // this is not a problem for reify(), since it creates the root path + // before ever loading trees. + // TODO: make buildIdealTree() and loadActual handle a missing root path, + // or a symlink to a missing target, and let reify() create it as needed. + const real = await realpath(this.path, this[_rpcache], this[_stcache]) + const Cls = real === this.path ? Node : Link + const root = new Cls({ + path: this.path, + realpath: real, + pkg, + extraneous: false, + dev: false, + devOptional: false, + peer: false, + optional: false, + global: this.options.global, + installLinks: this.installLinks, + legacyPeerDeps: this.legacyPeerDeps, + loadOverrides: true, + }) + if (root.isLink) { + root.target = new Node({ + path: real, + realpath: real, + pkg, + extraneous: false, + dev: false, + devOptional: false, + peer: false, + optional: false, + global: this.options.global, + installLinks: this.installLinks, + legacyPeerDeps: this.legacyPeerDeps, + root, + }) + } + return root + } + + // process the add/rm requests by modifying the root node, and the + // update.names request by queueing nodes dependent on those named. + async #applyUserRequests (options) { + const timeEnd = time.start('idealTree:userRequests') + const tree = this.idealTree.target + + if (!this.options.workspaces.length) { + await this.#applyUserRequestsToNode(tree, options) + } else { + const nodes = this.workspaceNodes(tree, this.options.workspaces) + if (this.options.includeWorkspaceRoot) { + nodes.push(tree) + } + const appliedRequests = nodes.map( + node => this.#applyUserRequestsToNode(node, options) + ) + await Promise.all(appliedRequests) + } + + timeEnd() + } + + async #applyUserRequestsToNode (tree, options) { + // If we have a list of package names to update, and we know it's + // going to update them wherever they are, add any paths into those + // named nodes to the buildIdealTree queue. + if (!this.options.global && this[_updateNames].length) { + this.#queueNamedUpdates() + } + + // global updates only update the globalTop nodes, but we need to know + // that they're there, and not reinstall the world unnecessarily. + const globalExplicitUpdateNames = [] + if (this.options.global && (this[_updateAll] || this[_updateNames].length)) { + const nm = resolve(this.path, 'node_modules') + const paths = await readdirScoped(nm).catch(() => []) + for (const p of paths) { + const name = p.replace(/\\/g, '/') + const updateName = this[_updateNames].includes(name) + if (this[_updateAll] || updateName) { + if (updateName) { + globalExplicitUpdateNames.push(name) + } + const dir = resolve(nm, name) + const st = await lstat(dir) + .catch(/* istanbul ignore next */ () => null) + if (st && st.isSymbolicLink()) { + const target = await readlink(dir) + const real = resolve(dirname(dir), target) + tree.package.dependencies[name] = `file:${real}` + } else { + tree.package.dependencies[name] = '*' + } + } + } + } + + if (this.auditReport && this.auditReport.size > 0) { + await this.#queueVulnDependents(options) + } + + const { add, rm } = options + + if (rm && rm.length) { + addRmPkgDeps.rm(tree.package, rm) + for (const name of rm) { + this.#explicitRequests.add({ from: tree, name, action: 'DELETE' }) + } + } + + if (add && add.length) { + await this.#add(tree, options) + } + + // triggers a refresh of all edgesOut. this has to be done BEFORE + // adding the edges to explicitRequests, because the package setter + // resets all edgesOut. + if (add && add.length || rm && rm.length || this.options.global) { + tree.package = tree.package + } + + for (const spec of this[_resolvedAdd]) { + if (spec.tree === tree) { + this.#explicitRequests.add(tree.edgesOut.get(spec.name)) + } + } + for (const name of globalExplicitUpdateNames) { + this.#explicitRequests.add(tree.edgesOut.get(name)) + } + + this.#depsQueue.push(tree) + } + + // This returns a promise because we might not have the name yet, and need to + // call pacote.manifest to find the name. + async #add (tree, { add, saveType = null, saveBundle = false }) { + // If we have a link it will need to be added relative to the target's path + const path = tree.target.path + + // get the name for each of the specs in the list. + // ie, doing `foo@bar` we just return foo but if it's a url or git, we + // don't know the name until we fetch it and look in its manifest. + await Promise.all(add.map(async rawSpec => { + // We do NOT provide the path to npa here, because user-additions need to + // be resolved relative to the tree being added to. + let spec = npa(rawSpec) + + // if it's just @'' then we reload whatever's there, or get latest + // if it's an explicit tag, we need to install that specific tag version + const isTag = spec.rawSpec && spec.type === 'tag' + + // look up the names of file/directory/git specs + if (!spec.name || isTag) { + const mani = await pacote.manifest(spec, { ...this.options }) + if (isTag) { + // translate tag to a version + spec = npa(`${mani.name}@${mani.version}`) + } + spec.name = mani.name + } + + const { name } = spec + if (spec.type === 'file') { + spec = npa(`file:${relpath(path, spec.fetchSpec)}`, path) + spec.name = name + } else if (spec.type === 'directory') { + try { + const real = await realpath(spec.fetchSpec, this[_rpcache], this[_stcache]) + spec = npa(`file:${relpath(path, real)}`, path) + spec.name = name + } catch { + // TODO: create synthetic test case to simulate realpath failure + } + } + spec.tree = tree + this[_resolvedAdd].push(spec) + })) + + // now this._resolvedAdd is a list of spec objects with names. + // find a home for each of them! + addRmPkgDeps.add({ + pkg: tree.package, + add: this[_resolvedAdd], + saveBundle, + saveType, + }) + } + + // TODO: provide a way to fix bundled deps by exposing metadata about + // what's in the bundle at each published manifest. Without that, we + // can't possibly fix bundled deps without breaking a ton of other stuff, + // and leaving the user subject to getting it overwritten later anyway. + async #queueVulnDependents (options) { + for (const vuln of this.auditReport.values()) { + for (const node of vuln.nodes) { + const bundler = node.getBundler() + + // XXX this belongs in the audit report itself, not here. + // We shouldn't even get these things here, and they shouldn't + // be printed by npm-audit-report as if they can be fixed, because + // they can't. + if (bundler) { + log.warn(`audit fix ${node.name}@${node.version}`, + `${node.location}\nis a bundled dependency of\n${ + bundler.name}@${bundler.version} at ${bundler.location}\n` + + 'It cannot be fixed automatically.\n' + + `Check for updates to the ${bundler.name} package.`) + continue + } + + for (const edge of node.edgesIn) { + this.addTracker('idealTree', edge.from.name, edge.from.location) + this.#depsQueue.push(edge.from) + } + } + } + + // note any that can't be fixed at the root level without --force + // if there's a fix, we use that. otherwise, the user has to remove it, + // find a different thing, fix the upstream, etc. + // + // XXX: how to handle top nodes that aren't the root? Maybe the report + // just tells the user to cd into that directory and fix it? + if (this.options.force && this.auditReport && this.auditReport.topVulns.size) { + options.add = options.add || [] + options.rm = options.rm || [] + const nodesTouched = new Set() + for (const [name, topVuln] of this.auditReport.topVulns.entries()) { + const { + simpleRange, + topNodes, + fixAvailable, + } = topVuln + for (const node of topNodes) { + if (!node.isProjectRoot && !node.isWorkspace) { + // not something we're going to fix, sorry. have to cd into + // that directory and fix it yourself. + log.warn('audit', 'Manual fix required in linked project ' + + `at ./${node.location} for ${name}@${simpleRange}.\n` + + `'cd ./${node.location}' and run 'npm audit' for details.`) + continue + } + + if (!fixAvailable) { + log.warn('audit', `No fix available for ${name}@${simpleRange}`) + continue + } + + // name may be different if parent fixes the dep + // see Vuln fixAvailable setter + const { isSemVerMajor, version, name: fixName } = fixAvailable + const breakingMessage = isSemVerMajor + ? 'a SemVer major change' + : 'outside your stated dependency range' + log.warn('audit', `Updating ${fixName} to ${version}, ` + + `which is ${breakingMessage}.`) + + await this.#add(node, { add: [`${fixName}@${version}`] }) + nodesTouched.add(node) + } + } + for (const node of nodesTouched) { + node.package = node.package + } + } + } + + #avoidRange (name) { + if (!this.auditReport) { + return null + } + const vuln = this.auditReport.get(name) + if (!vuln) { + return null + } + return vuln.range + } + + #queueNamedUpdates () { + // ignore top nodes, since they are not loaded the same way, and + // probably have their own project associated with them. + + // for every node with one of the names on the list, we add its + // dependents to the queue to be evaluated. in buildDepStep, + // anything on the update names list will get refreshed, even if + // it isn't a problem. + + // XXX this could be faster by doing a series of inventory.query('name') + // calls rather than walking over everything in the tree. + for (const node of this.idealTree.inventory.values()) { + // XXX add any invalid edgesOut to the queue + if (this[_updateNames].includes(node.name) && + !node.isTop && !node.inDepBundle && !node.inShrinkwrap) { + for (const edge of node.edgesIn) { + this.addTracker('idealTree', edge.from.name, edge.from.location) + this.#depsQueue.push(edge.from) + } + } + } + } + + async #inflateAncientLockfile () { + const { meta, inventory } = this.idealTree + const ancient = meta.ancientLockfile + const old = meta.loadedFromDisk && !(meta.originalLockfileVersion >= 2) + + if (inventory.size === 0 || !ancient && !old) { + return + } + + // if the lockfile is from node v5 or earlier, then we'll have to reload + // all the manifests of everything we encounter. this is costly, but at + // least it's just a one-time hit. + const timeEnd = time.start('idealTree:inflate') + + // don't warn if we're not gonna actually write it back anyway. + const heading = ancient ? 'ancient lockfile' : 'old lockfile' + if (ancient || !this.options.lockfileVersion || + this.options.lockfileVersion >= defaultLockfileVersion) { + log.warn(heading, + ` +The ${meta.type} file was created with an old version of npm, +so supplemental metadata must be fetched from the registry. + +This is a one-time fix-up, please be patient... +`) + } + + this.addTracker('idealTree:inflate') + const queue = [] + for (const node of inventory.values()) { + if (node.isProjectRoot) { + continue + } + + // if the node's location isn't within node_modules then this is actually + // a link target, so skip it. the link node itself will be queued later. + if (!node.location.startsWith('node_modules')) { + continue + } + + queue.push(async () => { + log.silly('inflate', node.location) + const { resolved, version, path, name, location, integrity } = node + // don't try to hit the registry for linked deps + const useResolved = resolved && ( + !version || resolved.startsWith('file:') + ) + const id = useResolved ? resolved : version + const spec = npa.resolve(name, id, dirname(path)) + const t = `idealTree:inflate:${location}` + this.addTracker(t) + try { + const mani = await pacote.manifest(spec, { + ...this.options, + resolved: resolved, + integrity: integrity, + fullMetadata: false, + }) + node.package = { ...mani, _id: `${mani.name}@${mani.version}` } + } catch (er) { + const warning = `Could not fetch metadata for ${name}@${id}` + log.warn(heading, warning, er) + } + this.finishTracker(t) + }) + } + await promiseCallLimit(queue) + + // have to re-calc dep flags, because the nodes don't have edges + // until their packages get assigned, so everything looks extraneous + calcDepFlags(this.idealTree) + + // yes, yes, this isn't the "original" version, but now that it's been + // upgraded, we need to make sure we don't do the work to upgrade it + // again, since it's now as new as can be. + if (!this.options.lockfileVersion && !meta.hiddenLockfile) { + meta.originalLockfileVersion = defaultLockfileVersion + } + this.finishTracker('idealTree:inflate') + timeEnd() + } + + // at this point we have a virtual tree with the actual root node's + // package deps, which may be partly or entirely incomplete, invalid + // or extraneous. + #buildDeps () { + const timeEnd = time.start('idealTree:buildDeps') + const tree = this.idealTree.target + tree.assertRootOverrides() + this.#depsQueue.push(tree) + // XXX also push anything that depends on a node with a name + // in the override list + log.silly('idealTree', 'buildDeps') + this.addTracker('idealTree', tree.name, '') + return this.#buildDepStep().then(timeEnd) + } + + async #buildDepStep () { + // removes tracker of previous dependency in the queue + if (this.#currentDep) { + const { location, name } = this.#currentDep + time.end(`idealTree:${location || '#root'}`) + this.finishTracker('idealTree', name, location) + this.#currentDep = null + } + + if (!this.#depsQueue.length) { + return this.#resolveLinks() + } + + const node = this.#depsQueue.pop() + const bd = node.package.bundleDependencies + const hasBundle = bd && Array.isArray(bd) && bd.length + const { hasShrinkwrap } = node + + // if the node was already visited, or has since been removed from the + // tree, skip over it and process the rest of the queue. If a node has + // a shrinkwrap, also skip it, because it's going to get its deps + // satisfied by whatever's in that file anyway. + if (this.#depsSeen.has(node) || + node.root !== this.idealTree || + hasShrinkwrap && !this.#complete) { + return this.#buildDepStep() + } + + this.#depsSeen.add(node) + this.#currentDep = node + time.start(`idealTree:${node.location || '#root'}`) + + // if we're loading a _complete_ ideal tree, for a --package-lock-only + // installation for example, we have to crack open the tarball and + // look inside if it has bundle deps or shrinkwraps. note that this is + // not necessary during a reification, because we just update the + // ideal tree by reading bundles/shrinkwraps in place. + // Don't bother if the node is from the actual tree and hasn't + // been resolved, because we can't fetch it anyway, could be anything! + const crackOpen = this.#complete && + node !== this.idealTree && + node.resolved && + (hasBundle || hasShrinkwrap) && + !node.inert + if (crackOpen) { + const Arborist = this.constructor + const opt = { ...this.options } + await cacache.tmp.withTmp(this.cache, opt, async path => { + await pacote.extract(node.resolved, path, { + ...opt, + Arborist, + resolved: node.resolved, + integrity: node.integrity, + }) + + if (hasShrinkwrap) { + await new Arborist({ ...this.options, path }) + .loadVirtual({ root: node }) + } + + if (hasBundle) { + await new Arborist({ ...this.options, path }) + .loadActual({ root: node, ignoreMissing: true }) + } + }) + } + + // if any deps are missing or invalid, then we fetch the manifest for + // the thing we want, and build a new dep node from that. + // Then, find the ideal placement for that node. The ideal placement + // searches from the node's deps (or parent deps in the case of non-root + // peer deps), and walks up the tree until it finds the highest spot + // where it doesn't cause any conflicts. + // + // A conflict can be: + // - A node by that name already exists at that location. + // - The parent has a peer dep on that name + // - One of the node's peer deps conflicts at that location, unless the + // peer dep is met by a node at that location, which is fine. + // + // If we create a new node, then build its ideal deps as well. + // + // Note: this is the same "maximally naive" deduping tree-building + // algorithm that npm has used since v3. In a case like this: + // + // root -> (a@1, b@1||2) + // a -> (b@1) + // + // You'll end up with a tree like this: + // + // root + // +-- a@1 + // | +-- b@1 + // +-- b@2 + // + // rather than this, more deduped, but just as correct tree: + // + // root + // +-- a@1 + // +-- b@1 + // + // Another way to look at it is that this algorithm favors getting higher + // version deps at higher levels in the tree, even if that reduces + // potential deduplication. + // + // Set `preferDedupe: true` in the options to replace the shallower + // dep if allowed. + + const tasks = [] + const peerSource = this.#peerSetSource.get(node) || node + for (const edge of this.#problemEdges(node)) { + if (edge.peerConflicted) { + continue + } + + // peerSetSource is only relevant when we have a peerEntryEdge + // otherwise we're setting regular non-peer deps as if they have + // a virtual root of whatever brought in THIS node. + // so we VR the node itself if the edge is not a peer + const source = edge.peer ? peerSource : node + + const virtualRoot = this.#virtualRoot(source, true) + // reuse virtual root if we already have one, but don't + // try to do the override ahead of time, since we MAY be able + // to create a more correct tree than the virtual root could. + const vrEdge = virtualRoot && virtualRoot.edgesOut.get(edge.name) + const vrDep = vrEdge && vrEdge.valid && vrEdge.to + // only re-use the virtualRoot if it's a peer edge we're placing. + // otherwise, we end up in situations where we override peer deps that + // we could have otherwise found homes for. Eg: + // xy -> (x, y) + // x -> PEER(z@1) + // y -> PEER(z@2) + // If xy is a dependency, we can resolve this like: + // project + // +-- xy + // | +-- y + // | +-- z@2 + // +-- x + // +-- z@1 + // But if x and y are loaded in the same virtual root, then they will + // be forced to agree on a version of z. + const required = new Set([edge.from]) + const parent = edge.peer ? virtualRoot : null + const dep = vrDep && vrDep.satisfies(edge) ? vrDep + : await this.#nodeFromEdge(edge, parent, null, required) + + /* istanbul ignore next */ + debug(() => { + if (!dep) { + throw new Error('no dep??') + } + }) + + tasks.push({ edge, dep }) + } + + const placeDeps = tasks.sort((a, b) => localeCompare(a.edge.name, b.edge.name)) + + const promises = [] + for (const { edge, dep } of placeDeps) { + const pd = new PlaceDep({ + edge, + dep, + + auditReport: this.auditReport, + explicitRequest: this.#explicitRequests.has(edge), + force: this.options.force, + installLinks: this.installLinks, + installStrategy: this.#installStrategy, + legacyPeerDeps: this.legacyPeerDeps, + preferDedupe: this.#preferDedupe, + strictPeerDeps: this.#strictPeerDeps, + updateNames: this[_updateNames], + }) + // placing a dep is actually a tree of placing the dep itself + // and all of its peer group that aren't already met by the tree + depth({ + tree: pd, + getChildren: pd => pd.children, + visit: pd => { + const { placed, edge, canPlace: cpd } = pd + // if we didn't place anything, nothing to do here + if (!placed) { + return + } + + // we placed something, that means we changed the tree + if (placed.errors.length) { + this.#loadFailures.add(placed) + } + this.#mutateTree = true + if (cpd.canPlaceSelf === OK) { + for (const edgeIn of placed.edgesIn) { + if (edgeIn === edge) { + continue + } + const { from, valid, peerConflicted } = edgeIn + if (!peerConflicted && !valid && !this.#depsSeen.has(from)) { + this.addTracker('idealTree', from.name, from.location) + this.#depsQueue.push(edgeIn.from) + } + } + } else { + /* istanbul ignore else - should be only OK or REPLACE here */ + if (cpd.canPlaceSelf === REPLACE) { + // this may also create some invalid edges, for example if we're + // intentionally causing something to get nested which was + // previously placed in this location. + for (const edgeIn of placed.edgesIn) { + if (edgeIn === edge) { + continue + } + + const { valid, peerConflicted } = edgeIn + if (!valid && !peerConflicted) { + // if it's already been visited, we have to re-visit + // otherwise, just enqueue normally. + this.#depsSeen.delete(edgeIn.from) + this.#depsQueue.push(edgeIn.from) + } + } + } + } + + /* istanbul ignore if - should be impossible */ + if (cpd.canPlaceSelf === CONFLICT) { + debug(() => { + const er = new Error('placed with canPlaceSelf=CONFLICT') + throw Object.assign(er, { placeDep: pd }) + }) + return + } + + // lastly, also check for the missing deps of the node we placed, + // and any holes created by pruning out conflicted peer sets. + this.#depsQueue.push(placed) + for (const dep of pd.needEvaluation) { + this.#depsSeen.delete(dep) + this.#depsQueue.push(dep) + } + + // pre-fetch any problem edges, since we'll need these soon + // if it fails at this point, though, don't worry because it + // may well be an optional dep that has gone missing. it'll + // fail later anyway. + for (const e of this.#problemEdges(placed)) { + // XXX This is somehow load bearing. This makes tests that print + // the ideal tree of a tree with tarball dependencies fail. This + // can't be changed or removed till we figure out why + // The test is named "tarball deps with transitive tarball deps" + promises.push(() => + this.#fetchManifest(npa.resolve(e.name, e.spec, fromPath(placed, e))) + .catch(() => null) + ) + } + }, + }) + } + + for (const { to } of node.edgesOut.values()) { + if (to && to.isLink && to.target) { + this.#linkNodes.add(to) + } + } + + await promiseCallLimit(promises) + return this.#buildDepStep() + } + + // loads a node from an edge, and then loads its peer deps (and their + // peer deps, on down the line) into a virtual root parent. + async #nodeFromEdge (edge, parent_, secondEdge, required) { + // create a virtual root node with the same deps as the node that + // is requesting this one, so that we can get all the peer deps in + // a context where they're likely to be resolvable. + // Note that the virtual root will also have virtual copies of the + // targets of any child Links, so that they resolve appropriately. + const parent = parent_ || this.#virtualRoot(edge.from) + + const spec = npa.resolve(edge.name, edge.spec, edge.from.path) + const first = await this.#nodeFromSpec(edge.name, spec, parent, edge) + + // we might have a case where the parent has a peer dependency on + // `foo@*` which resolves to v2, but another dep in the set has a + // peerDependency on `foo@1`. In that case, if we force it to be v2, + // we're unnecessarily triggering an ERESOLVE. + // If we have a second edge to worry about, and it's not satisfied + // by the first node, try a second and see if that satisfies the + // original edge here. + const spec2 = secondEdge && npa.resolve( + edge.name, + secondEdge.spec, + secondEdge.from.path + ) + const second = secondEdge && !secondEdge.valid + ? await this.#nodeFromSpec(edge.name, spec2, parent, secondEdge) + : null + + // pick the second one if they're both happy with that; otherwise, first + const node = second && edge.valid ? second : first + // ensure the one we want is the one that's placed + node.parent = parent + + if (required.has(edge.from) && edge.type !== 'peerOptional' || + secondEdge && ( + required.has(secondEdge.from) && secondEdge.type !== 'peerOptional')) { + required.add(node) + } + + // keep track of the thing that caused this node to be included. + const src = parent.sourceReference + this.#peerSetSource.set(node, src) + + // do not load the peers along with the set if this is a global top pkg + // otherwise we'll be tempted to put peers as other top-level installed + // things, potentially clobbering what's there already, which is not + // what we want. the missing edges will be picked up on the next pass. + if (this.options.global && edge.from.isProjectRoot) { + return node + } + + // otherwise, we have to make sure that our peers can go along with us. + return this.#loadPeerSet(node, required) + } + + #virtualRoot (node, reuse = false) { + if (reuse && this.#virtualRoots.has(node)) { + return this.#virtualRoots.get(node) + } + + const vr = new Node({ + path: node.realpath, + sourceReference: node, + installLinks: this.installLinks, + legacyPeerDeps: this.legacyPeerDeps, + overrides: node.overrides, + }) + + // also need to set up any targets from any link deps, so that + // they are properly reflected in the virtual environment + for (const child of node.children.values()) { + if (child.isLink) { + new Node({ + path: child.realpath, + sourceReference: child.target, + root: vr, + }) + } + } + + this.#virtualRoots.set(node, vr) + return vr + } + + #problemEdges (node) { + // skip over any bundled deps, they're not our problem. + // Note that this WILL fetch bundled meta-deps which are also dependencies + // but not listed as bundled deps. When reifying, we first unpack any + // nodes that have bundleDependencies, then do a loadActual on them, move + // the nodes into the ideal tree, and then prune. So, fetching those + // possibly-bundled meta-deps at this point doesn't cause any worse + // problems than a few unnecessary packument fetches. + + // also skip over any nodes in the tree that failed to load, since those + // will crash the install later on anyway. + const bd = node.isProjectRoot || node.isWorkspace ? null + : node.package.bundleDependencies + const bundled = new Set(bd || []) + + const problems = [] + for (const edge of node.edgesOut.values()) { + // If it's included in a bundle, we take whatever is specified. + if (bundled.has(edge.name)) { + continue + } + + // If it's already been logged as a load failure, skip it. + if (edge.to && this.#loadFailures.has(edge.to)) { + continue + } + + // If it's shrinkwrapped, we use what the shrinkwap wants. + if (edge.to && edge.to.inShrinkwrap) { + continue + } + + // If the edge has no destination, that's a problem, unless + // if it's peerOptional and not explicitly requested. + if (!edge.to) { + if (edge.type !== 'peerOptional' || + this.#explicitRequests.has(edge)) { + problems.push(edge) + } + continue + } + + // If the edge has an error, there's a problem. + if (!edge.valid) { + problems.push(edge) + continue + } + + // If the edge is a workspace, and it's valid, leave it alone + if (edge.to.isWorkspace) { + continue + } + + // user explicitly asked to update this package by name, problem + if (this[_updateNames].includes(edge.name)) { + problems.push(edge) + continue + } + + // fixing a security vulnerability with this package, problem + if (this.auditReport && this.auditReport.isVulnerable(edge.to)) { + problems.push(edge) + continue + } + + // user has explicitly asked to install this package, problem + if (this.#explicitRequests.has(edge)) { + problems.push(edge) + continue + } + } + return problems + } + + async #fetchManifest (spec) { + const options = { + ...this.options, + avoid: this.#avoidRange(spec.name), + fullMetadata: true, + } + // get the intended spec and stored metadata from yarn.lock file, + // if available and valid. + spec = this.idealTree.meta.checkYarnLock(spec, options) + + if (this.#manifests.has(spec.raw)) { + return this.#manifests.get(spec.raw) + } else { + log.silly('fetch manifest', spec.raw.replace(spec.rawSpec, redact(spec.rawSpec))) + const mani = await pacote.manifest(spec, options) + this.#manifests.set(spec.raw, mani) + return mani + } + } + + #nodeFromSpec (name, spec, parent, edge) { + // pacote will slap integrity on its options, so we have to clone + // the object so it doesn't get mutated. + // Don't bother to load the manifest for link deps, because the target + // might be within another package that doesn't exist yet. + const { installLinks, legacyPeerDeps } = this + const isWorkspace = this.idealTree.workspaces && this.idealTree.workspaces.has(spec.name) + + // spec is a directory, link it if: + // - it's a workspace, OR + // - it's a project-internal file: dependency (always linked), OR + // - it's external and installLinks is false + // TODO post arborist refactor, will need to check for installStrategy=linked + let isProjectInternalFileSpec = false + if (edge?.rawSpec.startsWith('file:../') || edge?.rawSpec.startsWith('file:./')) { + const targetPath = resolve(parent.realpath, edge.rawSpec.slice(5)) + const resolvedProjectRoot = resolve(this.idealTree.realpath) + // Check if the target is within the project root + isProjectInternalFileSpec = targetPath.startsWith(resolvedProjectRoot + sep) || targetPath === resolvedProjectRoot + } + + // When using --install-links, we need to handle transitive file dependencies specially + // If the parent was installed (not linked) due to --install-links, and this is a file: dep, we should also install it rather than link it + const parentWasInstalled = parent && !parent.isLink && parent.resolved?.startsWith('file:') + const isTransitiveFileDep = spec.type === 'directory' && parentWasInstalled && installLinks + + // Decide whether to link or copy the dependency + const shouldLink = (isWorkspace || isProjectInternalFileSpec || !installLinks) && !isTransitiveFileDep + if (spec.type === 'directory' && shouldLink) { + return this.#linkFromSpec(name, spec, parent, edge) + } + + // if the spec matches a workspace name, then see if the workspace node will satisfy the edge. if it does, we return the workspace node to make sure it takes priority. + if (isWorkspace) { + const existingNode = this.idealTree.edgesOut.get(spec.name).to + if (existingNode && existingNode.isWorkspace && existingNode.satisfies(edge)) { + return existingNode + } + } + + // For file: dependencies that we're installing (not linking), ensure proper resolution + if (isTransitiveFileDep && edge) { + // For transitive file deps, resolve relative to the parent's original source location + const parentOriginalPath = parent.resolved.slice(5) // Remove 'file:' prefix + const relativePath = edge.rawSpec.slice(5) // Remove 'file:' prefix + const absolutePath = resolve(parentOriginalPath, relativePath) + spec = npa.resolve(name, `file:${absolutePath}`) + } + + // spec isn't a directory, and either isn't a workspace or the workspace we have + // doesn't satisfy the edge. try to fetch a manifest and build a node from that. + return this.#fetchManifest(spec) + .then(pkg => new Node({ name, pkg, parent, installLinks, legacyPeerDeps }), error => { + error.requiredBy = edge.from.location || '.' + + // failed to load the spec, either because of enotarget or + // fetch failure of some other sort. save it so we can verify + // later that it's optional; otherwise, the error is fatal. + const n = new Node({ + name, + parent, + error, + installLinks, + legacyPeerDeps, + }) + this.#loadFailures.add(n) + return n + }) + } + + async #linkFromSpec (name, spec, parent) { + const realpath = spec.fetchSpec + const { installLinks, legacyPeerDeps } = this + const { content: pkg } = await PackageJson.normalize(realpath).catch(() => { + return { content: {} } + }) + const link = new Link({ name, parent, realpath, pkg, installLinks, legacyPeerDeps }) + this.#linkNodes.add(link) + return link + } + + // load all peer deps and meta-peer deps into the node's parent + // At the end of this, the node's peer-type outward edges are all + // resolved, and so are all of theirs, but other dep types are not. + // We prefer to get peer deps that meet the requiring node's dependency, + // if possible, since that almost certainly works (since that package was + // developed with this set of deps) and will typically be more restrictive. + // Note that the peers in the set can conflict either with each other, + // or with a direct dependency from the virtual root parent! In strict + // mode, this is always an error. In force mode, it never is, and we + // prefer the parent's non-peer dep over a peer dep, or the version that + // gets placed first. In non-strict mode, we behave strictly if the + // virtual root is based on the root project, and allow non-peer parent + // deps to override, but throw if no preference can be determined. + async #loadPeerSet (node, required) { + const peerEdges = [...node.edgesOut.values()] + // we typically only install non-optional peers, but we have to + // factor them into the peerSet so that we can avoid conflicts + .filter(e => e.peer && !(e.valid && e.to)) + .sort(({ name: a }, { name: b }) => localeCompare(a, b)) + + for (const edge of peerEdges) { + // node.parent gets mutated during loop execution due to recursive #nodeFromEdge calls. + // When a compatible peer is found (e.g. a@1.1.0 replaces a@1.2.0), the original node loses its parent. + // if node is detached/removed from the tree, or has no parent, so no need to check remaining edgesOut for that node. + if (!node.parent) { + break + } + // already placed this one, and we're happy with it. + if (edge.valid && edge.to) { + continue + } + + const parentEdge = node.parent.edgesOut.get(edge.name) + const { isProjectRoot, isWorkspace } = node.parent.sourceReference + const isMine = isProjectRoot || isWorkspace + const conflictOK = this.options.force || !isMine && !this.#strictPeerDeps + + if (!edge.to) { + if (!parentEdge) { + // easy, just put the thing there + await this.#nodeFromEdge(edge, node.parent, null, required) + continue + } else { + // if the parent's edge is very broad like >=1, and the edge in + // question is something like 1.x, then we want to get a 1.x, not + // a 2.x. pass along the child edge as an advisory guideline. + // if the parent edge doesn't satisfy the child edge, and the + // child edge doesn't satisfy the parent edge, then we have + // a conflict. this is always a problem in strict mode, never + // in force mode, and a problem in non-strict mode if this isn't + // on behalf of our project. in all such cases, we warn at least. + const dep = await this.#nodeFromEdge( + parentEdge, + node.parent, + edge, + required + ) + + // hooray! that worked! + if (edge.valid) { + continue + } + + // allow it. either we're overriding, or it's not something + // that will be installed by default anyway, and we'll fail when + // we get to the point where we need to, if we need to. + if (conflictOK || !required.has(dep)) { + edge.peerConflicted = true + continue + } + + // problem + this.#failPeerConflict(edge, parentEdge) + } + } + + // There is something present already, and we're not happy about it + // See if the thing we WOULD be happy with is also going to satisfy + // the other dependents on the current node. + const current = edge.to + const dep = await this.#nodeFromEdge(edge, null, null, required) + if (dep.canReplace(current)) { + await this.#nodeFromEdge(edge, node.parent, null, required) + continue + } + + // at this point we know that there is a dep there, and + // we don't like it. always fail strictly, always allow forcibly or + // in non-strict mode if it's not our fault. don't warn here, because + // we are going to warn again when we place the deps, if we end up + // overriding for something else. If the thing that has this dep + // isn't also required, then there's a good chance we won't need it, + // so allow it for now and let it conflict if it turns out to actually + // be necessary for the installation. + if (conflictOK || !required.has(edge.from)) { + continue + } + + // ok, it's the root, or we're in unforced strict mode, so this is bad + this.#failPeerConflict(edge, parentEdge) + } + return node + } + + #failPeerConflict (edge, currentEdge) { + const expl = this.#explainPeerConflict(edge, currentEdge) + throw Object.assign(new Error('unable to resolve dependency tree'), expl) + } + + #explainPeerConflict (edge, currentEdge) { + const node = edge.from + const curNode = node.resolve(edge.name) + const current = curNode.explain() + return { + code: 'ERESOLVE', + current, + // it SHOULD be impossible to get here without a current node in place, + // but this at least gives us something report on when bugs creep into + // the tree handling logic. + currentEdge: currentEdge ? currentEdge.explain() : null, + edge: edge.explain(), + strictPeerDeps: this.#strictPeerDeps, + force: this.options.force, + } + } + + // go through all the links in the this.#linkNodes set + // for each one: + // - if outside the root, ignore it, assume it's fine, it's not our problem + // - if a node in the tree already, assign the target to that node. + // - if a path under an existing node, then assign that as the fsParent, + // and add it to the _depsQueue + // + // call buildDepStep if anything was added to the queue; otherwise, we're done + #resolveLinks () { + for (const link of this.#linkNodes) { + this.#linkNodes.delete(link) + + // link we never ended up placing, skip it + if (link.root !== this.idealTree) { + continue + } + + const tree = this.idealTree.target + const external = !link.target.isDescendantOf(tree) + + // outside the root, somebody else's problem, ignore it + if (external && !this.#follow) { + continue + } + + // didn't find a parent for it or it has not been seen yet + // so go ahead and process it. + const unseenLink = (link.target.parent || link.target.fsParent) && + !this.#depsSeen.has(link.target) + + if (this.#follow && + !link.target.parent && + !link.target.fsParent || + unseenLink) { + this.addTracker('idealTree', link.target.name, link.target.location) + this.#depsQueue.push(link.target) + } + } + + if (this.#depsQueue.length) { + return this.#buildDepStep() + } + } + + #fixDepFlags () { + const timeEnd = time.start('idealTree:fixDepFlags') + const metaFromDisk = this.idealTree.meta.loadedFromDisk + const flagsSuspect = this[_flagsSuspect] + const mutateTree = this.#mutateTree + // if the options set prune:false, then we don't prune, but we still + // mark the extraneous items in the tree if we modified it at all. + // If we did no modifications, we just iterate over the extraneous nodes. + // if we started with an empty tree, then the dep flags are already + // all set to true, and there can be nothing extraneous, so there's + // nothing to prune, because we built it from scratch. if we didn't + // add or remove anything, then also nothing to do. + if (metaFromDisk && mutateTree) { + resetDepFlags(this.idealTree) + } + + // update all the dev/optional/etc flags in the tree + // either we started with a fresh tree, or we + // reset all the flags to find the extraneous nodes. + // + // if we started from a blank slate, or changed something, then + // the dep flags will be all set to true. + if (!metaFromDisk || mutateTree) { + calcDepFlags(this.idealTree) + } else { + // otherwise just unset all the flags on the root node + // since they will sometimes have the default value + this.idealTree.extraneous = false + this.idealTree.dev = false + this.idealTree.optional = false + this.idealTree.devOptional = false + this.idealTree.peer = false + } + + // at this point, any node marked as extraneous should be pruned. + // if we started from a shrinkwrap, and then added/removed something, + // then the tree is suspect. Prune what is marked as extraneous. + // otherwise, don't bother. + const needPrune = metaFromDisk && (mutateTree || flagsSuspect) + if (this.#prune && needPrune) { + this.#idealTreePrune() + } + + timeEnd() + } + + #applyRootOverridesToWorkspaces (tree) { + const rootOverrides = tree.root.package.overrides || {} + + for (const node of tree.root.inventory.values()) { + if (!node.isWorkspace) { + continue + } + + for (const depName of Object.keys(rootOverrides)) { + const edge = node.edgesOut.get(depName) + const rootNode = tree.root.children.get(depName) + + // safely skip if either edge or rootNode doesn't exist yet + if (!edge || !rootNode) { + continue + } + + const resolvedRootVersion = rootNode.package.version + if (!semver.satisfies(resolvedRootVersion, edge.spec)) { + edge.detach() + node.children.delete(depName) + } + } + } + } + + #idealTreePrune () { + for (const node of this.idealTree.inventory.values()) { + // optional peer dependencies are meant to be added to the tree + // through an explicit required dependency (most commonly in the + // root package.json), at which point they won't be optional so + // any dependencies still marked as both optional and peer at + // this point can be pruned as a special kind of extraneous + if (node.extraneous || (node.peer && node.optional)) { + node.parent = null + } + } + } + + #pruneFailedOptional () { + for (const node of this.#loadFailures) { + if (!node.optional) { + throw node.errors[0] + } + + const set = optionalSet(node) + for (const node of set) { + node.inert = true + } + } + } + + async prune (options = {}) { + // allow the user to set options on the ctor as well. + // XXX: deprecate separate method options objects. + options = { ...this.options, ...options } + + await this.buildIdealTree(options) + + this.#idealTreePrune() + + if (!this.options.workspacesEnabled) { + const excludeNodes = this.excludeWorkspacesDependencySet(this.idealTree) + for (const node of this.idealTree.inventory.values()) { + if ( + node.parent !== null + && !node.isProjectRoot + && !excludeNodes.has(node) + && !node.inert + ) { + this[_addNodeToTrashList](node) + } + } + } + + return this.reify(options) + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/arborist/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/arborist/index.js new file mode 100644 index 0000000000000000000000000000000000000000..3622f957b7acde6a6090aef95eeb294b770c7f1e --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/arborist/index.js @@ -0,0 +1,280 @@ +// The arborist manages three trees: +// - actual +// - virtual +// - ideal +// +// The actual tree is what's present on disk in the node_modules tree +// and elsewhere that links may extend. +// +// The virtual tree is loaded from metadata (package.json and lock files). +// +// The ideal tree is what we WANT that actual tree to become. This starts +// with the virtual tree, and then applies the options requesting +// add/remove/update actions. +// +// To reify a tree, we calculate a diff between the ideal and actual trees, +// and then turn the actual tree into the ideal tree by taking the actions +// required. At the end of the reification process, the actualTree is +// updated to reflect the changes. +// +// Each tree has an Inventory at the root. Shrinkwrap is tracked by Arborist +// instance. It always refers to the actual tree, but is updated (and written +// to disk) on reification. + +// Each of the mixin "classes" adds functionality, but are not dependent on +// constructor call order. So, we just load them in an array, and build up +// the base class, so that the overall voltron class is easier to test and +// cover, and separation of concerns can be maintained. + +const { resolve } = require('node:path') +const { homedir } = require('node:os') +const { depth } = require('treeverse') +const mapWorkspaces = require('@npmcli/map-workspaces') +const { log, time } = require('proc-log') +const { saveTypeMap } = require('../add-rm-pkg-deps.js') +const AuditReport = require('../audit-report.js') +const relpath = require('../relpath.js') +const PackumentCache = require('../packument-cache.js') + +const mixins = [ + require('../tracker.js'), + require('./build-ideal-tree.js'), + require('./load-actual.js'), + require('./load-virtual.js'), + require('./rebuild.js'), + require('./reify.js'), + require('./isolated-reifier.js'), +] + +const _setWorkspaces = Symbol.for('setWorkspaces') +const Base = mixins.reduce((a, b) => b(a), require('node:events')) + +// if it's 1, 2, or 3, set it explicitly that. +// if undefined or null, set it null +// otherwise, throw. +const lockfileVersion = lfv => { + if (lfv === 1 || lfv === 2 || lfv === 3) { + return lfv + } + + if (lfv === undefined || lfv === null) { + return null + } + + throw new TypeError('Invalid lockfileVersion config: ' + lfv) +} + +class Arborist extends Base { + constructor (options = {}) { + const timeEnd = time.start('arborist:ctor') + super(options) + this.options = { + nodeVersion: process.version, + ...options, + Arborist: this.constructor, + binLinks: 'binLinks' in options ? !!options.binLinks : true, + cache: options.cache || `${homedir()}/.npm/_cacache`, + dryRun: !!options.dryRun, + formatPackageLock: 'formatPackageLock' in options ? !!options.formatPackageLock : true, + force: !!options.force, + global: !!options.global, + ignoreScripts: !!options.ignoreScripts, + installStrategy: options.global ? 'shallow' : (options.installStrategy ? options.installStrategy : 'hoisted'), + lockfileVersion: lockfileVersion(options.lockfileVersion), + packageLockOnly: !!options.packageLockOnly, + packumentCache: options.packumentCache || new PackumentCache(), + path: options.path || '.', + rebuildBundle: 'rebuildBundle' in options ? !!options.rebuildBundle : true, + replaceRegistryHost: options.replaceRegistryHost, + savePrefix: 'savePrefix' in options ? options.savePrefix : '^', + scriptShell: options.scriptShell, + workspaces: options.workspaces || [], + workspacesEnabled: options.workspacesEnabled !== false, + } + // TODO we only ever look at this.options.replaceRegistryHost, not + // this.replaceRegistryHost. Defaulting needs to be written back to + // this.options to work properly + this.replaceRegistryHost = this.options.replaceRegistryHost = + (!this.options.replaceRegistryHost || this.options.replaceRegistryHost === 'npmjs') ? + 'registry.npmjs.org' : this.options.replaceRegistryHost + + if (options.saveType && !saveTypeMap.get(options.saveType)) { + throw new Error(`Invalid saveType ${options.saveType}`) + } + this.cache = resolve(this.options.cache) + this.diff = null + this.path = resolve(this.options.path) + timeEnd() + } + + // TODO: We should change these to static functions instead + // of methods for the next major version + + // Get the actual nodes corresponding to a root node's child workspaces, + // given a list of workspace names. + workspaceNodes (tree, workspaces) { + const wsMap = tree.workspaces + if (!wsMap) { + log.warn('workspaces', 'filter set, but no workspaces present') + return [] + } + + const nodes = [] + for (const name of workspaces) { + const path = wsMap.get(name) + if (!path) { + log.warn('workspaces', `${name} in filter set, but not in workspaces`) + continue + } + + const loc = relpath(tree.realpath, path) + const node = tree.inventory.get(loc) + + if (!node) { + log.warn('workspaces', `${name} in filter set, but no workspace folder present`) + continue + } + + nodes.push(node) + } + + return nodes + } + + // returns a set of workspace nodes and all their deps + // TODO why is includeWorkspaceRoot a param? + // TODO why is workspaces a param? + workspaceDependencySet (tree, workspaces, includeWorkspaceRoot) { + const wsNodes = this.workspaceNodes(tree, workspaces) + if (includeWorkspaceRoot) { + for (const edge of tree.edgesOut.values()) { + if (edge.type !== 'workspace' && edge.to) { + wsNodes.push(edge.to) + } + } + } + const wsDepSet = new Set(wsNodes) + const extraneous = new Set() + for (const node of wsDepSet) { + for (const edge of node.edgesOut.values()) { + const dep = edge.to + if (dep) { + wsDepSet.add(dep) + if (dep.isLink) { + wsDepSet.add(dep.target) + } + } + } + for (const child of node.children.values()) { + if (child.extraneous) { + extraneous.add(child) + } + } + } + for (const extra of extraneous) { + wsDepSet.add(extra) + } + + return wsDepSet + } + + // returns a set of root dependencies, excluding dependencies that are + // exclusively workspace dependencies + excludeWorkspacesDependencySet (tree) { + const rootDepSet = new Set() + depth({ + tree, + visit: node => { + for (const { to } of node.edgesOut.values()) { + if (!to || to.isWorkspace) { + continue + } + for (const edgeIn of to.edgesIn.values()) { + if (edgeIn.from.isRoot || rootDepSet.has(edgeIn.from)) { + rootDepSet.add(to) + } + } + } + return node + }, + filter: node => node, + getChildren: (node, tree) => + [...tree.edgesOut.values()].map(edge => edge.to), + }) + return rootDepSet + } + + async [_setWorkspaces] (node) { + const workspaces = await mapWorkspaces({ + cwd: node.path, + pkg: node.package, + }) + + if (node && workspaces.size) { + node.workspaces = workspaces + } + + return node + } + + async audit (options = {}) { + this.addTracker('audit') + if (this.options.global) { + throw Object.assign( + new Error('`npm audit` does not support testing globals'), + { code: 'EAUDITGLOBAL' } + ) + } + + // allow the user to set options on the ctor as well. + // XXX: deprecate separate method options objects. + options = { ...this.options, ...options } + + const timeEnd = time.start('audit') + let tree + if (options.packageLock === false) { + // build ideal tree + await this.loadActual(options) + await this.buildIdealTree() + tree = this.idealTree + } else { + tree = await this.loadVirtual() + } + if (this.options.workspaces.length) { + options.filterSet = this.workspaceDependencySet( + tree, + this.options.workspaces, + this.options.includeWorkspaceRoot + ) + } + if (!options.workspacesEnabled) { + options.filterSet = + this.excludeWorkspacesDependencySet(tree) + } + this.auditReport = await AuditReport.load(tree, options) + const ret = options.fix ? this.reify(options) : this.auditReport + timeEnd() + this.finishTracker('audit') + return ret + } + + async dedupe (options = {}) { + // allow the user to set options on the ctor as well. + // XXX: deprecate separate method options objects. + options = { ...this.options, ...options } + const tree = await this.loadVirtual().catch(() => this.loadActual()) + const names = [] + for (const name of tree.inventory.query('name')) { + if (tree.inventory.query('name', name).size > 1) { + names.push(name) + } + } + return this.reify({ + ...options, + preferDedupe: true, + update: { names }, + }) + } +} + +module.exports = Arborist diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/arborist/isolated-reifier.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/arborist/isolated-reifier.js new file mode 100644 index 0000000000000000000000000000000000000000..16210296b5a141e1d42279703348c797a67d4c49 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/arborist/isolated-reifier.js @@ -0,0 +1,453 @@ +const _makeIdealGraph = Symbol('makeIdealGraph') +const _createIsolatedTree = Symbol.for('createIsolatedTree') +const _createBundledTree = Symbol('createBundledTree') +const { mkdirSync } = require('node:fs') +const pacote = require('pacote') +const { join } = require('node:path') +const { depth } = require('treeverse') +const crypto = require('node:crypto') + +// cache complicated function results +const memoize = (fn) => { + const memo = new Map() + return async function (arg) { + const key = arg + if (memo.has(key)) { + return memo.get(key) + } + const result = {} + memo.set(key, result) + await fn(result, arg) + return result + } +} + +module.exports = cls => class IsolatedReifier extends cls { + /** + * Create an ideal graph. + * + * An implementation of npm RFC-0042 + * https://github.com/npm/rfcs/blob/main/accepted/0042-isolated-mode.md + * + * This entire file should be considered technical debt that will be resolved + * with an Arborist refactor or rewrite. Embedded logic in Nodes and Links, + * and the incremental state of building trees and reifying contains too many + * assumptions to do a linked mode properly. + * + * Instead, this approach takes a tree built from build-ideal-tree, and + * returns a new tree-like structure without the embedded logic of Node and + * Link classes. + * + * Since the RFC requires leaving the package-lock in place, this approach + * temporarily replaces the tree state for a couple of steps of reifying. + * + **/ + async [_makeIdealGraph] (options) { + /* Make sure that the ideal tree is build as the rest of + * the algorithm depends on it. + */ + const bitOpt = { + ...options, + complete: false, + } + await this.buildIdealTree(bitOpt) + const idealTree = this.idealTree + + this.rootNode = {} + const root = this.rootNode + this.counter = 0 + + // memoize to cache generating proxy Nodes + this.externalProxyMemo = memoize(this.externalProxy.bind(this)) + this.workspaceProxyMemo = memoize(this.workspaceProxy.bind(this)) + + root.external = [] + root.isProjectRoot = true + root.localLocation = idealTree.location + root.localPath = idealTree.path + root.workspaces = await Promise.all( + Array.from(idealTree.fsChildren.values(), this.workspaceProxyMemo)) + const processed = new Set() + const queue = [idealTree, ...idealTree.fsChildren] + while (queue.length !== 0) { + const next = queue.pop() + if (processed.has(next.location)) { + continue + } + processed.add(next.location) + next.edgesOut.forEach(e => { + if (!e.to || (next.package.bundleDependencies || next.package.bundledDependencies || []).includes(e.to.name)) { + return + } + queue.push(e.to) + }) + if (!next.isProjectRoot && !next.isWorkspace && !next.inert) { + root.external.push(await this.externalProxyMemo(next)) + } + } + + await this.assignCommonProperties(idealTree, root) + + this.idealGraph = root + } + + async workspaceProxy (result, node) { + result.localLocation = node.location + result.localPath = node.path + result.isWorkspace = true + result.resolved = node.resolved + await this.assignCommonProperties(node, result) + } + + async externalProxy (result, node) { + await this.assignCommonProperties(node, result) + if (node.hasShrinkwrap) { + const dir = join( + node.root.path, + 'node_modules', + '.store', + `${node.name}@${node.version}` + ) + mkdirSync(dir, { recursive: true }) + // TODO this approach feels wrong + // and shouldn't be necessary for shrinkwraps + await pacote.extract(node.resolved, dir, { + ...this.options, + resolved: node.resolved, + integrity: node.integrity, + }) + const Arborist = this.constructor + const arb = new Arborist({ ...this.options, path: dir }) + await arb[_makeIdealGraph]({ dev: false }) + this.rootNode.external.push(...arb.idealGraph.external) + arb.idealGraph.external.forEach(e => { + e.root = this.rootNode + e.id = `${node.id}=>${e.id}` + }) + result.localDependencies = [] + result.externalDependencies = arb.idealGraph.externalDependencies + result.externalOptionalDependencies = arb.idealGraph.externalOptionalDependencies + result.dependencies = [ + ...result.externalDependencies, + ...result.localDependencies, + ...result.externalOptionalDependencies, + ] + } + result.optional = node.optional + result.resolved = node.resolved + result.version = node.version + } + + async assignCommonProperties (node, result) { + function validEdgesOut (node) { + return [...node.edgesOut.values()].filter(e => e.to && e.to.target && !(node.package.bundledDependencies || node.package.bundleDependencies || []).includes(e.to.name)) + } + const edges = validEdgesOut(node) + const optionalDeps = edges.filter(e => e.optional).map(e => e.to.target) + const nonOptionalDeps = edges.filter(e => !e.optional).map(e => e.to.target) + + result.localDependencies = await Promise.all(nonOptionalDeps.filter(n => n.isWorkspace).map(this.workspaceProxyMemo)) + result.externalDependencies = await Promise.all(nonOptionalDeps.filter(n => !n.isWorkspace && !n.inert).map(this.externalProxyMemo)) + result.externalOptionalDependencies = await Promise.all(optionalDeps.filter(n => !n.inert).map(this.externalProxyMemo)) + result.dependencies = [ + ...result.externalDependencies, + ...result.localDependencies, + ...result.externalOptionalDependencies, + ] + result.root = this.rootNode + result.id = this.counter++ + result.name = node.name + result.package = { ...node.package } + result.package.bundleDependencies = undefined + result.hasInstallScript = node.hasInstallScript + } + + async [_createBundledTree] () { + // TODO: make sure that idealTree object exists + const idealTree = this.idealTree + // TODO: test workspaces having bundled deps + const queue = [] + + for (const [, edge] of idealTree.edgesOut) { + if (edge.to && (idealTree.package.bundleDependencies || idealTree.package.bundledDependencies || []).includes(edge.to.name)) { + queue.push({ from: idealTree, to: edge.to }) + } + } + for (const child of idealTree.fsChildren) { + for (const [, edge] of child.edgesOut) { + if (edge.to && (child.package.bundleDependencies || child.package.bundledDependencies || []).includes(edge.to.name)) { + queue.push({ from: child, to: edge.to }) + } + } + } + + const processed = new Set() + const nodes = new Map() + const edges = [] + while (queue.length !== 0) { + const nextEdge = queue.pop() + const key = `${nextEdge.from.location}=>${nextEdge.to.location}` + // should be impossible, unless bundled is duped + /* istanbul ignore next */ + if (processed.has(key)) { + continue + } + processed.add(key) + const from = nextEdge.from + if (!from.isRoot && !from.isWorkspace) { + nodes.set(from.location, { location: from.location, resolved: from.resolved, name: from.name, optional: from.optional, pkg: { ...from.package, bundleDependencies: undefined } }) + } + const to = nextEdge.to + nodes.set(to.location, { location: to.location, resolved: to.resolved, name: to.name, optional: to.optional, pkg: { ...to.package, bundleDependencies: undefined } }) + edges.push({ from: from.isRoot ? 'root' : from.location, to: to.location }) + + to.edgesOut.forEach(e => { + // an edge out should always have a to + /* istanbul ignore else */ + if (e.to) { + queue.push({ from: e.from, to: e.to }) + } + }) + } + return { edges, nodes } + } + + async [_createIsolatedTree] () { + await this[_makeIdealGraph](this.options) + + const proxiedIdealTree = this.idealGraph + + const bundledTree = await this[_createBundledTree]() + + const treeHash = (startNode) => { + // generate short hash based on the dependency tree + // starting at this node + const deps = [] + const branch = [] + depth({ + tree: startNode, + getChildren: node => node.dependencies, + filter: node => node, + visit: node => { + branch.push(`${node.name}@${node.version}`) + deps.push(`${branch.join('->')}::${node.resolved}`) + }, + leave: () => { + branch.pop() + }, + }) + deps.sort() + return crypto.createHash('shake256', { outputLength: 16 }) + .update(deps.join(',')) + .digest('base64') + // Node v14 doesn't support base64url + .replace(/\+/g, '-') + .replace(/\//g, '_') + .replace(/=+$/m, '') + } + + const getKey = (idealTreeNode) => { + return `${idealTreeNode.name}@${idealTreeNode.version}-${treeHash(idealTreeNode)}` + } + + const root = { + fsChildren: [], + integrity: null, + inventory: new Map(), + isLink: false, + isRoot: true, + binPaths: [], + edgesIn: new Set(), + edgesOut: new Map(), + hasShrinkwrap: false, + parent: null, + // TODO: we should probably not reference this.idealTree + resolved: this.idealTree.resolved, + isTop: true, + path: proxiedIdealTree.root.localPath, + realpath: proxiedIdealTree.root.localPath, + package: proxiedIdealTree.root.package, + meta: { loadedFromDisk: false }, + global: false, + isProjectRoot: true, + children: [], + } + // root.inventory.set('', t) + // root.meta = this.idealTree.meta + // TODO We should mock better the inventory object because it is used by audit-report.js ... maybe + root.inventory.query = () => { + return [] + } + const processed = new Set() + proxiedIdealTree.workspaces.forEach(c => { + const workspace = { + edgesIn: new Set(), + edgesOut: new Map(), + children: [], + hasInstallScript: c.hasInstallScript, + binPaths: [], + package: c.package, + location: c.localLocation, + path: c.localPath, + realpath: c.localPath, + resolved: c.resolved, + } + root.fsChildren.push(workspace) + root.inventory.set(workspace.location, workspace) + }) + const generateChild = (node, location, pkg, inStore) => { + const newChild = { + global: false, + globalTop: false, + isProjectRoot: false, + isTop: false, + location, + name: node.name, + optional: node.optional, + top: { path: proxiedIdealTree.root.localPath }, + children: [], + edgesIn: new Set(), + edgesOut: new Map(), + binPaths: [], + fsChildren: [], + /* istanbul ignore next -- emulate Node */ + getBundler () { + return null + }, + hasShrinkwrap: false, + inDepBundle: false, + integrity: null, + isLink: false, + isRoot: false, + isInStore: inStore, + path: join(proxiedIdealTree.root.localPath, location), + realpath: join(proxiedIdealTree.root.localPath, location), + resolved: node.resolved, + version: pkg.version, + package: pkg, + } + newChild.target = newChild + root.children.push(newChild) + root.inventory.set(newChild.location, newChild) + } + proxiedIdealTree.external.forEach(c => { + const key = getKey(c) + if (processed.has(key)) { + return + } + processed.add(key) + const location = join('node_modules', '.store', key, 'node_modules', c.name) + generateChild(c, location, c.package, true) + }) + bundledTree.nodes.forEach(node => { + generateChild(node, node.location, node.pkg, false) + }) + bundledTree.edges.forEach(e => { + const from = e.from === 'root' ? root : root.inventory.get(e.from) + const to = root.inventory.get(e.to) + // Maybe optional should be propagated from the original edge + const edge = { optional: false, from, to } + from.edgesOut.set(to.name, edge) + to.edgesIn.add(edge) + }) + const memo = new Set() + + function processEdges (node, externalEdge) { + externalEdge = !!externalEdge + const key = getKey(node) + if (memo.has(key)) { + return + } + memo.add(key) + + let from, nmFolder + if (externalEdge) { + const fromLocation = join('node_modules', '.store', key, 'node_modules', node.name) + from = root.children.find(c => c.location === fromLocation) + nmFolder = join('node_modules', '.store', key, 'node_modules') + } else { + from = node.isProjectRoot ? root : root.fsChildren.find(c => c.location === node.localLocation) + nmFolder = join(node.localLocation, 'node_modules') + } + + const processDeps = (dep, optional, external) => { + optional = !!optional + external = !!external + + const location = join(nmFolder, dep.name) + const binNames = dep.package.bin && Object.keys(dep.package.bin) || [] + const toKey = getKey(dep) + + let target + if (external) { + const toLocation = join('node_modules', '.store', toKey, 'node_modules', dep.name) + target = root.children.find(c => c.location === toLocation) + } else { + target = root.fsChildren.find(c => c.location === dep.localLocation) + } + // TODO: we should no-op is an edge has already been created with the same fromKey and toKey + + binNames.forEach(bn => { + target.binPaths.push(join(from.realpath, 'node_modules', '.bin', bn)) + }) + + const link = { + global: false, + globalTop: false, + isProjectRoot: false, + edgesIn: new Set(), + edgesOut: new Map(), + binPaths: [], + isTop: false, + optional, + location: location, + path: join(dep.root.localPath, nmFolder, dep.name), + realpath: target.path, + name: toKey, + resolved: dep.resolved, + top: { path: dep.root.localPath }, + children: [], + fsChildren: [], + isLink: true, + isStoreLink: true, + isRoot: false, + package: { _id: 'abc', bundleDependencies: undefined, deprecated: undefined, bin: target.package.bin, scripts: dep.package.scripts }, + target, + } + const newEdge1 = { optional, from, to: link } + from.edgesOut.set(dep.name, newEdge1) + link.edgesIn.add(newEdge1) + const newEdge2 = { optional: false, from: link, to: target } + link.edgesOut.set(dep.name, newEdge2) + target.edgesIn.add(newEdge2) + root.children.push(link) + } + + for (const dep of node.localDependencies) { + processEdges(dep, false) + // nonOptional, local + processDeps(dep, false, false) + } + for (const dep of node.externalDependencies) { + processEdges(dep, true) + // nonOptional, external + processDeps(dep, false, true) + } + for (const dep of node.externalOptionalDependencies) { + processEdges(dep, true) + // optional, external + processDeps(dep, true, true) + } + } + + processEdges(proxiedIdealTree, false) + for (const node of proxiedIdealTree.workspaces) { + processEdges(node, false) + } + root.children.forEach(c => c.parent = root) + root.children.forEach(c => c.root = root) + root.root = root + root.target = root + return root + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/arborist/load-actual.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/arborist/load-actual.js new file mode 100644 index 0000000000000000000000000000000000000000..3be44780e01aee22424f9bea34ce9e3545339b71 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/arborist/load-actual.js @@ -0,0 +1,442 @@ +// mix-in implementing the loadActual method + +const { dirname, join, normalize, relative, resolve } = require('node:path') + +const PackageJson = require('@npmcli/package-json') +const { readdirScoped } = require('@npmcli/fs') +const { walkUp } = require('walk-up-path') +const ancestorPath = require('common-ancestor-path') +const treeCheck = require('../tree-check.js') + +const Shrinkwrap = require('../shrinkwrap.js') +const calcDepFlags = require('../calc-dep-flags.js') +const Node = require('../node.js') +const Link = require('../link.js') +const realpath = require('../realpath.js') + +// public symbols +const _changePath = Symbol.for('_changePath') +const _setWorkspaces = Symbol.for('setWorkspaces') +const _rpcache = Symbol.for('realpathCache') +const _stcache = Symbol.for('statCache') + +module.exports = cls => class ActualLoader extends cls { + #actualTree + // ensure when walking the tree that we don't call loadTree on the same + // actual node more than one time. + #actualTreeLoaded = new Set() + #actualTreePromise + + // cache of nodes when loading the actualTree, so that we avoid loaded the + // same node multiple times when symlinks attack. + #cache = new Map() + #filter + + // cache of link targets for setting fsParent links + // We don't do fsParent as a magic getter/setter, because it'd be too costly + // to keep up to date along the walk. + // And, we know that it can ONLY be relevant when the node is a target of a + // link; otherwise, it'd be in a node_modules folder, so take advantage of + // that to limit the scans later. + #topNodes = new Set() + #transplantFilter + + constructor (options) { + super(options) + + // the tree of nodes on disk + this.actualTree = options.actualTree + + // caches for cached realpath calls + const cwd = process.cwd() + // assume that the cwd is real enough for our purposes + this[_rpcache] = new Map([[cwd, cwd]]) + this[_stcache] = new Map() + } + + // public method + // TODO remove options param in next semver major + async loadActual (options = {}) { + // In the past this.actualTree was set as a promise that eventually + // resolved, and overwrite this.actualTree with the resolved value. This + // was a problem because virtually no other code expects this.actualTree to + // be a promise. Instead we only set it once resolved, and also return it + // from the promise so that it is what's returned from this function when + // awaited. + if (this.actualTree) { + return this.actualTree + } + if (!this.#actualTreePromise) { + // allow the user to set options on the ctor as well. + // XXX: deprecate separate method options objects. + options = { ...this.options, ...options } + + this.#actualTreePromise = this.#loadActual(options) + .then(tree => { + // reset all deps to extraneous prior to recalc + if (!options.root) { + for (const node of tree.inventory.values()) { + node.extraneous = true + } + } + + // only reset root flags if we're not re-rooting, + // otherwise leave as-is + calcDepFlags(tree, !options.root) + this.actualTree = treeCheck(tree) + return this.actualTree + }) + } + return this.#actualTreePromise + } + + // return the promise so that we don't ever have more than one going at the + // same time. This is so that buildIdealTree can default to the actualTree + // if no shrinkwrap present, but reify() can still call buildIdealTree and + // loadActual in parallel safely. + + async #loadActual (options) { + // mostly realpath to throw if the root doesn't exist + const { + global, + filter = () => true, + root = null, + transplantFilter = () => true, + ignoreMissing = false, + forceActual = false, + } = options + this.#filter = filter + this.#transplantFilter = transplantFilter + + if (global) { + const real = await realpath(this.path, this[_rpcache], this[_stcache]) + const params = { + path: this.path, + realpath: real, + pkg: {}, + global, + loadOverrides: true, + } + if (this.path === real) { + this.#actualTree = this.#newNode(params) + } else { + this.#actualTree = await this.#newLink(params) + } + } else { + // not in global mode, hidden lockfile is allowed, load root pkg too + this.#actualTree = await this.#loadFSNode({ + path: this.path, + real: await realpath(this.path, this[_rpcache], this[_stcache]), + loadOverrides: true, + }) + + this.#actualTree.assertRootOverrides() + + // if forceActual is set, don't even try the hidden lockfile + if (!forceActual) { + // Note: hidden lockfile will be rejected if it's not the latest thing + // in the folder, or if any of the entries in the hidden lockfile are + // missing. + const meta = await Shrinkwrap.load({ + path: this.#actualTree.path, + hiddenLockfile: true, + resolveOptions: this.options, + }) + + if (meta.loadedFromDisk) { + this.#actualTree.meta = meta + // have to load on a new Arborist object, so we don't assign + // the virtualTree on this one! Also, the weird reference is because + // we can't easily get a ref to Arborist in this module, without + // creating a circular reference, since this class is a mixin used + // to build up the Arborist class itself. + await new this.constructor({ ...this.options }).loadVirtual({ + root: this.#actualTree, + }) + await this[_setWorkspaces](this.#actualTree) + + this.#transplant(root) + return this.#actualTree + } + } + + const meta = await Shrinkwrap.load({ + path: this.#actualTree.path, + lockfileVersion: this.options.lockfileVersion, + resolveOptions: this.options, + }) + this.#actualTree.meta = meta + } + + await this.#loadFSTree(this.#actualTree) + await this[_setWorkspaces](this.#actualTree) + + // if there are workspace targets without Link nodes created, load + // the targets, so that we know what they are. + if (this.#actualTree.workspaces && this.#actualTree.workspaces.size) { + const promises = [] + for (const path of this.#actualTree.workspaces.values()) { + if (!this.#cache.has(path)) { + // workspace overrides use the root overrides + const p = this.#loadFSNode({ path, root: this.#actualTree, useRootOverrides: true }) + .then(node => this.#loadFSTree(node)) + promises.push(p) + } + } + await Promise.all(promises) + } + + if (!ignoreMissing) { + await this.#findMissingEdges() + } + + // try to find a node that is the parent in a fs tree sense, but not a + // node_modules tree sense, of any link targets. this allows us to + // resolve deps that node will find, but a legacy npm view of the + // world would not have noticed. + for (const path of this.#topNodes) { + const node = this.#cache.get(path) + if (node && !node.parent && !node.fsParent) { + for (const p of walkUp(dirname(path))) { + if (this.#cache.has(p)) { + node.fsParent = this.#cache.get(p) + break + } + } + } + } + + this.#transplant(root) + + if (global) { + // need to depend on the children, or else all of them + // will end up being flagged as extraneous, since the + // global root isn't a "real" project + const tree = this.#actualTree + const actualRoot = tree.isLink ? tree.target : tree + const { dependencies = {} } = actualRoot.package + for (const [name, kid] of actualRoot.children.entries()) { + const def = kid.isLink ? `file:${kid.realpath}` : '*' + dependencies[name] = dependencies[name] || def + } + actualRoot.package = { ...actualRoot.package, dependencies } + } + return this.#actualTree + } + + #transplant (root) { + if (!root || root === this.#actualTree) { + return + } + + this.#actualTree[_changePath](root.path) + for (const node of this.#actualTree.children.values()) { + if (!this.#transplantFilter(node)) { + node.root = null + } + } + + root.replace(this.#actualTree) + for (const node of this.#actualTree.fsChildren) { + node.root = this.#transplantFilter(node) ? root : null + } + + this.#actualTree = root + } + + async #loadFSNode ({ path, parent, real, root, loadOverrides, useRootOverrides }) { + if (!real) { + try { + real = await realpath(path, this[_rpcache], this[_stcache]) + } catch (error) { + // if realpath fails, just provide a dummy error node + return new Node({ + error, + path, + realpath: path, + parent, + root, + loadOverrides, + }) + } + } + + const cached = this.#cache.get(path) + let node + // missing edges get a dummy node, assign the parent and return it + if (cached && !cached.dummy) { + cached.parent = parent + return cached + } else { + const params = { + installLinks: this.installLinks, + legacyPeerDeps: this.legacyPeerDeps, + path, + realpath: real, + parent, + root, + loadOverrides, + } + + try { + const { content: pkg } = await PackageJson.normalize(real) + params.pkg = pkg + if (useRootOverrides && root.overrides) { + params.overrides = root.overrides.getNodeRule({ name: pkg.name, version: pkg.version }) + } + } catch (err) { + if (err.code === 'EJSONPARSE') { + // TODO @npmcli/package-json should be doing this + err.path = join(real, 'package.json') + } + params.error = err + } + + // soldier on if read-package-json raises an error, passing it to the + // Node which will attach it to its errors array (Link passes it along to + // its target node) + if (normalize(path) === real) { + node = this.#newNode(params) + } else { + node = await this.#newLink(params) + } + } + this.#cache.set(path, node) + return node + } + + #newNode (options) { + // check it for an fsParent if it's a tree top. there's a decent chance + // it'll get parented later, making the fsParent scan a no-op, but better + // safe than sorry, since it's cheap. + const { parent, realpath } = options + if (!parent) { + this.#topNodes.add(realpath) + } + return new Node(options) + } + + async #newLink (options) { + const { realpath } = options + this.#topNodes.add(realpath) + const target = this.#cache.get(realpath) + const link = new Link({ ...options, target }) + + if (!target) { + // Link set its target itself in this case + this.#cache.set(realpath, link.target) + // if a link target points at a node outside of the root tree's + // node_modules hierarchy, then load that node as well. + await this.#loadFSTree(link.target) + } + + return link + } + + async #loadFSTree (node) { + const did = this.#actualTreeLoaded + if (!node.isLink && !did.has(node.target.realpath)) { + did.add(node.target.realpath) + await this.#loadFSChildren(node.target) + return Promise.all( + [...node.target.children.entries()] + .filter(([, kid]) => !did.has(kid.realpath)) + .map(([, kid]) => this.#loadFSTree(kid)) + ) + } + } + + // create child nodes for all the entries in node_modules + // and attach them to the node as a parent + async #loadFSChildren (node) { + const nm = resolve(node.realpath, 'node_modules') + try { + const kids = await readdirScoped(nm).then(paths => paths.map(p => p.replace(/\\/g, '/'))) + return Promise.all( + // ignore . dirs and retired scoped package folders + kids.filter(kid => !/^(@[^/]+\/)?\./.test(kid)) + .filter(kid => this.#filter(node, kid)) + .map(kid => this.#loadFSNode({ + parent: node, + path: resolve(nm, kid), + }))) + } catch { + // error in the readdir is not fatal, just means no kids + } + } + + async #findMissingEdges () { + // try to resolve any missing edges by walking up the directory tree, + // checking for the package in each node_modules folder. stop at the + // root directory. + // The tricky move here is that we load a "dummy" node for the folder + // containing the node_modules folder, so that it can be assigned as + // the fsParent. It's a bad idea to *actually* load that full node, + // because people sometimes develop in ~/projects/node_modules/... + // so we'd end up loading a massive tree with lots of unrelated junk. + const nmContents = new Map() + const tree = this.#actualTree + for (const node of tree.inventory.values()) { + const ancestor = ancestorPath(node.realpath, this.path) + + const depPromises = [] + for (const [name, edge] of node.edgesOut.entries()) { + const notMissing = !edge.missing && + !(edge.to && (edge.to.dummy || edge.to.parent !== node)) + if (notMissing) { + continue + } + + // start the walk from the dirname, because we would have found + // the dep in the loadFSTree step already if it was local. + for (const p of walkUp(dirname(node.realpath))) { + // only walk as far as the nearest ancestor + // this keeps us from going into completely unrelated + // places when a project is just missing something, but + // allows for finding the transitive deps of link targets. + // ie, if it has to go up and back out to get to the path + // from the nearest common ancestor, we've gone too far. + if (ancestor && /^\.\.(?:[\\/]|$)/.test(relative(ancestor, p))) { + break + } + + let entries + if (!nmContents.has(p)) { + entries = await readdirScoped(p + '/node_modules') + .catch(() => []).then(paths => paths.map(p => p.replace(/\\/g, '/'))) + nmContents.set(p, entries) + } else { + entries = nmContents.get(p) + } + + if (!entries.includes(name)) { + continue + } + + let d + if (!this.#cache.has(p)) { + d = new Node({ path: p, root: node.root, dummy: true }) + this.#cache.set(p, d) + } else { + d = this.#cache.get(p) + } + if (d.dummy) { + // it's a placeholder, so likely would not have loaded this dep, + // unless another dep in the tree also needs it. + const depPath = normalize(`${p}/node_modules/${name}`) + const cached = this.#cache.get(depPath) + if (!cached || cached.dummy) { + depPromises.push(this.#loadFSNode({ + path: depPath, + root: node.root, + parent: d, + }).then(node => this.#loadFSTree(node))) + } + } + break + } + } + await Promise.all(depPromises) + } + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js new file mode 100644 index 0000000000000000000000000000000000000000..ba1b7478bd510fabc154aa273810c5c536022461 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/arborist/load-virtual.js @@ -0,0 +1,305 @@ +const { resolve } = require('node:path') +// mixin providing the loadVirtual method +const mapWorkspaces = require('@npmcli/map-workspaces') +const PackageJson = require('@npmcli/package-json') +const nameFromFolder = require('@npmcli/name-from-folder') + +const consistentResolve = require('../consistent-resolve.js') +const Shrinkwrap = require('../shrinkwrap.js') +const Node = require('../node.js') +const Link = require('../link.js') +const relpath = require('../relpath.js') +const calcDepFlags = require('../calc-dep-flags.js') +const treeCheck = require('../tree-check.js') + +const flagsSuspect = Symbol.for('flagsSuspect') +const setWorkspaces = Symbol.for('setWorkspaces') + +module.exports = cls => class VirtualLoader extends cls { + #rootOptionProvided + + constructor (options) { + super(options) + + // the virtual tree we load from a shrinkwrap + this.virtualTree = options.virtualTree + this[flagsSuspect] = false + } + + // public method + async loadVirtual (options = {}) { + if (this.virtualTree) { + return this.virtualTree + } + + // allow the user to set reify options on the ctor as well. + // XXX: deprecate separate reify() options object. + options = { ...this.options, ...options } + + if (options.root && options.root.meta) { + await this.#loadFromShrinkwrap(options.root.meta, options.root) + return treeCheck(this.virtualTree) + } + + const s = await Shrinkwrap.load({ + path: this.path, + lockfileVersion: this.options.lockfileVersion, + resolveOptions: this.options, + }) + if (!s.loadedFromDisk && !options.root) { + const er = new Error('loadVirtual requires existing shrinkwrap file') + throw Object.assign(er, { code: 'ENOLOCK' }) + } + + // when building the ideal tree, we pass in a root node to this function + // otherwise, load it from the root package json or the lockfile + const pkg = await PackageJson.normalize(this.path).then(p => p.content).catch(() => s.data.packages[''] || {}) + // TODO clean this up + const { + root = await this[setWorkspaces](this.#loadNode('', pkg, true)), + } = options + this.#rootOptionProvided = options.root + + await this.#loadFromShrinkwrap(s, root) + root.assertRootOverrides() + return treeCheck(this.virtualTree) + } + + async #loadFromShrinkwrap (s, root) { + if (!this.#rootOptionProvided) { + // root is never any of these things, but might be a brand new + // baby Node object that never had its dep flags calculated. + root.extraneous = false + root.dev = false + root.optional = false + root.devOptional = false + root.peer = false + } else { + this[flagsSuspect] = true + } + + this.#checkRootEdges(s, root) + root.meta = s + this.virtualTree = root + const { links, nodes } = this.#resolveNodes(s, root) + await this.#resolveLinks(links, nodes) + if (!(s.originalLockfileVersion >= 2)) { + this.#assignBundles(nodes) + } + if (this[flagsSuspect]) { + // reset all dep flags + // can't use inventory here, because virtualTree might not be root + for (const node of nodes.values()) { + if (node.isRoot || node === this.#rootOptionProvided) { + continue + } + node.extraneous = true + node.dev = true + node.optional = true + node.devOptional = true + node.peer = true + } + calcDepFlags(this.virtualTree, !this.#rootOptionProvided) + } + return root + } + + // check the lockfile deps, and see if they match. if they do not + // then we have to reset dep flags at the end. for example, if the + // user manually edits their package.json file, then we need to know + // that the idealTree is no longer entirely trustworthy. + #checkRootEdges (s, root) { + // loaded virtually from tree, no chance of being out of sync + // ancient lockfiles are critically damaged by this process, + // so we need to just hope for the best in those cases. + if (!s.loadedFromDisk || s.ancientLockfile) { + return + } + + const lock = s.get('') + const prod = lock.dependencies || {} + const dev = lock.devDependencies || {} + const optional = lock.optionalDependencies || {} + const peer = lock.peerDependencies || {} + const peerOptional = {} + + if (lock.peerDependenciesMeta) { + for (const [name, meta] of Object.entries(lock.peerDependenciesMeta)) { + if (meta.optional && peer[name] !== undefined) { + peerOptional[name] = peer[name] + delete peer[name] + } + } + } + + for (const name of Object.keys(optional)) { + delete prod[name] + } + + const lockWS = {} + const workspaces = mapWorkspaces.virtual({ + cwd: this.path, + lockfile: s.data, + }) + + for (const [name, path] of workspaces.entries()) { + lockWS[name] = `file:${path}` + } + + // Should rootNames exclude optional? + const rootNames = new Set(root.edgesOut.keys()) + + const lockByType = ({ dev, optional, peer, peerOptional, prod, workspace: lockWS }) + + // Find anything in shrinkwrap deps that doesn't match root's type or spec + for (const type in lockByType) { + const deps = lockByType[type] + for (const name in deps) { + const edge = root.edgesOut.get(name) + if (!edge || edge.type !== type || edge.spec !== deps[name]) { + return this[flagsSuspect] = true + } + rootNames.delete(name) + } + } + // Something was in root that's not accounted for in shrinkwrap + if (rootNames.size) { + return this[flagsSuspect] = true + } + } + + // separate out link metadata, and create Node objects for nodes + #resolveNodes (s, root) { + const links = new Map() + const nodes = new Map([['', root]]) + for (const [location, meta] of Object.entries(s.data.packages)) { + // skip the root because we already got it + if (!location) { + continue + } + + if (meta.link) { + links.set(location, meta) + } else { + nodes.set(location, this.#loadNode(location, meta)) + } + } + return { links, nodes } + } + + // links is the set of metadata, and nodes is the map of non-Link nodes + // Set the targets to nodes in the set, if we have them (we might not) + async #resolveLinks (links, nodes) { + for (const [location, meta] of links.entries()) { + const targetPath = resolve(this.path, meta.resolved) + const targetLoc = relpath(this.path, targetPath) + const target = nodes.get(targetLoc) + + if (!target) { + const err = new Error( +`Missing target in lock file: "${targetLoc}" is referenced by "${location}" but does not exist. +To fix: +1. rm package-lock.json +2. npm install` + ) + err.code = 'EMISSINGTARGET' + throw err + } + + const link = this.#loadLink(location, targetLoc, target, meta) + nodes.set(location, link) + nodes.set(targetLoc, link.target) + + // we always need to read the package.json for link targets + // outside node_modules because they can be changed by the local user + if (!link.target.parent) { + await PackageJson.normalize(link.realpath).then(p => link.target.package = p.content).catch(() => null) + } + } + } + + #assignBundles (nodes) { + for (const [location, node] of nodes) { + // Skip assignment of parentage for the root package + if (!location || node.isLink && !node.target.location) { + continue + } + const { name, parent, package: { inBundle } } = node + + if (!parent) { + continue + } + + // read inBundle from package because 'package' here is + // actually a v2 lockfile metadata entry. + // If the *parent* is also bundled, though, or if the parent has + // no dependency on it, then we assume that it's being pulled in + // just by virtue of its parent or a transitive dep being bundled. + const { package: ppkg } = parent + const { inBundle: parentBundled } = ppkg + if (inBundle && !parentBundled && parent.edgesOut.has(node.name)) { + if (!ppkg.bundleDependencies) { + ppkg.bundleDependencies = [name] + } else { + ppkg.bundleDependencies.push(name) + } + } + } + } + + #loadNode (location, sw, loadOverrides) { + const p = this.virtualTree ? this.virtualTree.realpath : this.path + const path = resolve(p, location) + // shrinkwrap doesn't include package name unless necessary + if (!sw.name) { + sw.name = nameFromFolder(path) + } + + const dev = sw.dev + const optional = sw.optional + const devOptional = dev || optional || sw.devOptional + const peer = sw.peer + + const node = new Node({ + installLinks: this.installLinks, + legacyPeerDeps: this.legacyPeerDeps, + root: this.virtualTree, + path, + realpath: path, + integrity: sw.integrity, + resolved: consistentResolve(sw.resolved, this.path, path), + pkg: sw, + hasShrinkwrap: sw.hasShrinkwrap, + dev, + optional, + devOptional, + peer, + loadOverrides, + }) + // cast to boolean because they're undefined in the lock file when false + node.extraneous = !!sw.extraneous + node.devOptional = !!(sw.devOptional || sw.dev || sw.optional) + node.peer = !!sw.peer + node.optional = !!sw.optional + node.dev = !!sw.dev + return node + } + + #loadLink (location, targetLoc, target) { + const path = resolve(this.path, location) + const link = new Link({ + installLinks: this.installLinks, + legacyPeerDeps: this.legacyPeerDeps, + path, + realpath: resolve(this.path, targetLoc), + target, + pkg: target && target.package, + }) + link.extraneous = target.extraneous + link.devOptional = target.devOptional + link.peer = target.peer + link.optional = target.optional + link.dev = target.dev + return link + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/arborist/rebuild.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/arborist/rebuild.js new file mode 100644 index 0000000000000000000000000000000000000000..272d6a4122aef738c6e782f995fc7b25ac749b5b --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/arborist/rebuild.js @@ -0,0 +1,403 @@ +// Arborist.rebuild({path = this.path}) will do all the binlinks and +// bundle building needed. Called by reify, and by `npm rebuild`. + +const PackageJson = require('@npmcli/package-json') +const binLinks = require('bin-links') +const localeCompare = require('@isaacs/string-locale-compare')('en') +const promiseAllRejectLate = require('promise-all-reject-late') +const runScript = require('@npmcli/run-script') +const { callLimit: promiseCallLimit } = require('promise-call-limit') +const { depth: dfwalk } = require('treeverse') +const { isNodeGypPackage, defaultGypInstallScript } = require('@npmcli/node-gyp') +const { log, time } = require('proc-log') +const { resolve } = require('node:path') + +const boolEnv = b => b ? '1' : '' +const sortNodes = (a, b) => (a.depth - b.depth) || localeCompare(a.path, b.path) + +const _checkBins = Symbol.for('checkBins') + +// defined by reify mixin +const _handleOptionalFailure = Symbol.for('handleOptionalFailure') +const _trashList = Symbol.for('trashList') + +module.exports = cls => class Builder extends cls { + #doHandleOptionalFailure + #oldMeta = null + #queues + + constructor (options) { + super(options) + + this.scriptsRun = new Set() + this.#resetQueues() + } + + async rebuild ({ nodes, handleOptionalFailure = false } = {}) { + // nothing to do if we're not building anything! + if (this.options.ignoreScripts && !this.options.binLinks) { + return + } + + // when building for the first time, as part of reify, we ignore + // failures in optional nodes, and just delete them. however, when + // running JUST a rebuild, we treat optional failures as real fails + this.#doHandleOptionalFailure = handleOptionalFailure + + if (!nodes) { + nodes = await this.#loadDefaultNodes() + } + + // separates links nodes so that it can run + // prepare scripts and link bins in the expected order + const timeEnd = time.start('build') + + const { + depNodes, + linkNodes, + } = this.#retrieveNodesByType(nodes) + + // build regular deps + await this.#build(depNodes, {}) + + // build link deps + if (linkNodes.size) { + this.#resetQueues() + await this.#build(linkNodes, { type: 'links' }) + } + + timeEnd() + } + + // if we don't have a set of nodes, then just rebuild + // the actual tree on disk. + async #loadDefaultNodes () { + let nodes + const tree = await this.loadActual() + let filterSet + if (!this.options.workspacesEnabled) { + filterSet = this.excludeWorkspacesDependencySet(tree) + nodes = tree.inventory.filter(node => + filterSet.has(node) || node.isProjectRoot + ) + } else if (this.options.workspaces.length) { + filterSet = this.workspaceDependencySet( + tree, + this.options.workspaces, + this.options.includeWorkspaceRoot + ) + nodes = tree.inventory.filter(node => filterSet.has(node)) + } else { + nodes = tree.inventory.values() + } + return nodes + } + + #retrieveNodesByType (nodes) { + const depNodes = new Set() + const linkNodes = new Set() + const storeNodes = new Set() + + for (const node of nodes) { + if (node.isStoreLink) { + storeNodes.add(node) + } else if (node.isLink) { + linkNodes.add(node) + } else { + depNodes.add(node) + } + } + // Make sure that store linked nodes are processed last. + // We can't process store links separately or else lifecycle scripts on + // standard nodes might not have bin links yet. + for (const node of storeNodes) { + depNodes.add(node) + } + + // deduplicates link nodes and their targets, avoids + // calling lifecycle scripts twice when running `npm rebuild` + // ref: https://github.com/npm/cli/issues/2905 + // + // we avoid doing so if global=true since `bin-links` relies + // on having the target nodes available in global mode. + if (!this.options.global) { + for (const node of linkNodes) { + depNodes.delete(node.target) + } + } + + return { + depNodes, + linkNodes, + } + } + + #resetQueues () { + this.#queues = { + preinstall: [], + install: [], + postinstall: [], + prepare: [], + bin: [], + } + } + + async #build (nodes, { type = 'deps' }) { + const timeEnd = time.start(`build:${type}`) + + await this.#buildQueues(nodes) + + if (!this.options.ignoreScripts) { + await this.#runScripts('preinstall') + } + + // links should run prepare scripts and only link bins after that + if (type === 'links') { + if (!this.options.ignoreScripts) { + await this.#runScripts('prepare') + } + } + if (this.options.binLinks) { + await this.#linkAllBins() + } + + if (!this.options.ignoreScripts) { + await this.#runScripts('install') + await this.#runScripts('postinstall') + } + + timeEnd() + } + + async #buildQueues (nodes) { + const timeEnd = time.start('build:queue') + const set = new Set() + + const promises = [] + for (const node of nodes) { + promises.push(this.#addToBuildSet(node, set)) + + // if it has bundle deps, add those too, if rebuildBundle + if (this.options.rebuildBundle !== false) { + const bd = node.package.bundleDependencies + if (bd && bd.length) { + dfwalk({ + tree: node, + leave: node => promises.push(this.#addToBuildSet(node, set)), + getChildren: node => [...node.children.values()], + filter: node => node.inBundle, + }) + } + } + } + await promiseAllRejectLate(promises) + + // now sort into the queues for the 4 things we have to do + // run in the same predictable order that buildIdealTree uses + // there's no particular reason for doing it in this order rather + // than another, but sorting *somehow* makes it consistent. + const queue = [...set].sort(sortNodes) + + for (const node of queue) { + const { package: { bin, scripts = {} } } = node.target + const { preinstall, install, postinstall, prepare } = scripts + const tests = { bin, preinstall, install, postinstall, prepare } + for (const [key, has] of Object.entries(tests)) { + if (has) { + this.#queues[key].push(node) + } + } + } + timeEnd() + } + + async [_checkBins] (node) { + // if the node is a global top, and we're not in force mode, then + // any existing bins need to either be missing, or a symlink into + // the node path. Otherwise a package can have a preinstall script + // that unlinks something, to allow them to silently overwrite system + // binaries, which is unsafe and insecure. + if (!node.globalTop || this.options.force) { + return + } + const { path, package: pkg } = node + await binLinks.checkBins({ pkg, path, top: true, global: true }) + } + + async #addToBuildSet (node, set, refreshed = false) { + if (set.has(node)) { + return + } + + if (this.#oldMeta === null) { + const { root: { meta } } = node + this.#oldMeta = meta && meta.loadedFromDisk && + !(meta.originalLockfileVersion >= 2) + } + + const { package: pkg, hasInstallScript } = node.target + const { gypfile, bin, scripts = {} } = pkg + + const { preinstall, install, postinstall, prepare } = scripts + const anyScript = preinstall || install || postinstall || prepare + if (!refreshed && !anyScript && (hasInstallScript || this.#oldMeta)) { + // we either have an old metadata (and thus might have scripts) + // or we have an indication that there's install scripts (but + // don't yet know what they are) so we have to load the package.json + // from disk to see what the deal is. Failure here just means + // no scripts to add, probably borked package.json. + // add to the set then remove while we're reading the pj, so we + // don't accidentally hit it multiple times. + set.add(node) + const { content: pkg } = await PackageJson.normalize(node.path).catch(() => { + return { content: {} } + }) + set.delete(node) + + const { scripts = {} } = pkg + node.package.scripts = scripts + return this.#addToBuildSet(node, set, true) + } + + // Rebuild node-gyp dependencies lacking an install or preinstall script + // note that 'scripts' might be missing entirely, and the package may + // set gypfile:false to avoid this automatic detection. + const isGyp = gypfile !== false && + !install && + !preinstall && + await isNodeGypPackage(node.path) + + if (bin || preinstall || install || postinstall || prepare || isGyp) { + if (bin) { + await this[_checkBins](node) + } + if (isGyp) { + scripts.install = defaultGypInstallScript + node.package.scripts = scripts + } + set.add(node) + } + } + + async #runScripts (event) { + const queue = this.#queues[event] + + if (!queue.length) { + return + } + + const timeEnd = time.start(`build:run:${event}`) + const stdio = this.options.foregroundScripts ? 'inherit' : 'pipe' + const limit = this.options.foregroundScripts ? 1 : undefined + await promiseCallLimit(queue.map(node => async () => { + const { + path, + integrity, + resolved, + optional, + peer, + dev, + devOptional, + package: pkg, + location, + isStoreLink, + } = node.target + + // skip any that we know we'll be deleting + // or storeLinks + if (this[_trashList].has(path) || isStoreLink) { + return + } + + const timeEndLocation = time.start(`build:run:${event}:${location}`) + log.info('run', pkg._id, event, location, pkg.scripts[event]) + const env = { + npm_package_resolved: resolved, + npm_package_integrity: integrity, + npm_package_json: resolve(path, 'package.json'), + npm_package_optional: boolEnv(optional), + npm_package_dev: boolEnv(dev), + npm_package_peer: boolEnv(peer), + npm_package_dev_optional: + boolEnv(devOptional && !dev && !optional), + } + const runOpts = { + event, + path, + pkg, + stdio, + env, + scriptShell: this.options.scriptShell, + } + const p = runScript(runOpts).catch(er => { + const { code, signal } = er + log.info('run', pkg._id, event, { code, signal }) + throw er + }).then(({ args, code, signal, stdout, stderr }) => { + this.scriptsRun.add({ + pkg, + path, + event, + // I do not know why this needs to be on THIS line but refactoring + // this function would be quite a process + // eslint-disable-next-line promise/always-return + cmd: args && args[args.length - 1], + env, + code, + signal, + stdout, + stderr, + }) + log.info('run', pkg._id, event, { code, signal }) + }) + + await (this.#doHandleOptionalFailure + ? this[_handleOptionalFailure](node, p) + : p) + + timeEndLocation() + }), { limit }) + timeEnd() + } + + async #linkAllBins () { + const queue = this.#queues.bin + if (!queue.length) { + return + } + + const timeEnd = time.start('build:link') + const promises = [] + // sort the queue by node path, so that the module-local collision + // detector in bin-links will always resolve the same way. + for (const node of queue.sort(sortNodes)) { + // TODO these run before they're awaited + promises.push(this.#createBinLinks(node)) + } + + await promiseAllRejectLate(promises) + timeEnd() + } + + async #createBinLinks (node) { + if (this[_trashList].has(node.path)) { + return + } + + const timeEnd = time.start(`build:link:${node.location}`) + + const p = binLinks({ + pkg: node.package, + path: node.path, + top: !!(node.isTop || node.globalTop), + force: this.options.force, + global: !!node.globalTop, + }) + + await (this.#doHandleOptionalFailure + ? this[_handleOptionalFailure](node, p) + : p) + + timeEnd() + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js new file mode 100644 index 0000000000000000000000000000000000000000..70d4d9796d2e722594cd845f49469c46e73f7dcf --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js @@ -0,0 +1,1516 @@ +// mixin implementing the reify method +const PackageJson = require('@npmcli/package-json') +const hgi = require('hosted-git-info') +const npa = require('npm-package-arg') +const packageContents = require('@npmcli/installed-package-contents') +const pacote = require('pacote') +const promiseAllRejectLate = require('promise-all-reject-late') +const runScript = require('@npmcli/run-script') +const { callLimit: promiseCallLimit } = require('promise-call-limit') +const { depth: dfwalk } = require('treeverse') +const { dirname, resolve, relative, join } = require('node:path') +const { log, time } = require('proc-log') +const { lstat, mkdir, rm, symlink } = require('node:fs/promises') +const { moveFile } = require('@npmcli/fs') +const { subset, intersects } = require('semver') +const { walkUp } = require('walk-up-path') + +const AuditReport = require('../audit-report.js') +const Diff = require('../diff.js') +const calcDepFlags = require('../calc-dep-flags.js') +const debug = require('../debug.js') +const onExit = require('../signal-handling.js') +const optionalSet = require('../optional-set.js') +const relpath = require('../relpath.js') +const retirePath = require('../retire-path.js') +const treeCheck = require('../tree-check.js') +const { defaultLockfileVersion } = require('../shrinkwrap.js') +const { saveTypeMap, hasSubKey } = require('../add-rm-pkg-deps.js') + +// Part of steps (steps need refactoring before we can do anything about these) +const _retireShallowNodes = Symbol.for('retireShallowNodes') +const _loadBundlesAndUpdateTrees = Symbol.for('loadBundlesAndUpdateTrees') +const _submitQuickAudit = Symbol('submitQuickAudit') +const _unpackNewModules = Symbol.for('unpackNewModules') +const _build = Symbol.for('build') + +// shared by rebuild mixin +const _trashList = Symbol.for('trashList') +const _handleOptionalFailure = Symbol.for('handleOptionalFailure') +const _loadTrees = Symbol.for('loadTrees') +// defined by rebuild mixin +const _checkBins = Symbol.for('checkBins') + +// shared symbols for swapping out when testing +// TODO tests should not be this deep into internals +const _diffTrees = Symbol.for('diffTrees') +const _createSparseTree = Symbol.for('createSparseTree') +const _loadShrinkwrapsAndUpdateTrees = Symbol.for('loadShrinkwrapsAndUpdateTrees') +const _reifyNode = Symbol.for('reifyNode') +const _updateAll = Symbol.for('updateAll') +const _updateNames = Symbol.for('updateNames') +const _moveContents = Symbol.for('moveContents') +const _moveBackRetiredUnchanged = Symbol.for('moveBackRetiredUnchanged') +const _removeTrash = Symbol.for('removeTrash') +const _renamePath = Symbol.for('renamePath') +const _rollbackRetireShallowNodes = Symbol.for('rollbackRetireShallowNodes') +const _rollbackCreateSparseTree = Symbol.for('rollbackCreateSparseTree') +const _rollbackMoveBackRetiredUnchanged = Symbol.for('rollbackMoveBackRetiredUnchanged') +const _saveIdealTree = Symbol.for('saveIdealTree') +const _reifyPackages = Symbol.for('reifyPackages') + +// defined by build-ideal-tree mixin +const _resolvedAdd = Symbol.for('resolvedAdd') +const _usePackageLock = Symbol.for('usePackageLock') +// used by build-ideal-tree mixin +const _addNodeToTrashList = Symbol.for('addNodeToTrashList') + +const _createIsolatedTree = Symbol.for('createIsolatedTree') + +module.exports = cls => class Reifier extends cls { + #bundleMissing = new Set() // child nodes we'd EXPECT to be included in a bundle, but aren't + #bundleUnpacked = new Set() // the nodes we unpack to read their bundles + #dryRun + #nmValidated = new Set() + #omit + #retiredPaths = {} + #retiredUnchanged = {} + #savePrefix + #shrinkwrapInflated = new Set() + #sparseTreeDirs = new Set() + #sparseTreeRoots = new Set() + + constructor (options) { + super(options) + + this[_trashList] = new Set() + } + + // public method + async reify (options = {}) { + const linked = (options.installStrategy || this.options.installStrategy) === 'linked' + + if (this.options.packageLockOnly && this.options.global) { + const er = new Error('cannot generate lockfile for global packages') + er.code = 'ESHRINKWRAPGLOBAL' + throw er + } + + this.#omit = new Set(options.omit) + + // start tracker block + this.addTracker('reify') + const timeEnd = time.start('reify') + // don't create missing dirs on dry runs + if (!this.options.packageLockOnly && !this.options.dryRun) { + // we do NOT want to set ownership on this folder, especially + // recursively, because it can have other side effects to do that + // in a project directory. We just want to make it if it's missing. + await mkdir(resolve(this.path), { recursive: true }) + + // do not allow the top-level node_modules to be a symlink + await this.#validateNodeModules(resolve(this.path, 'node_modules')) + } + await this[_loadTrees](options) + + const oldTree = this.idealTree + if (linked) { + // swap out the tree with the isolated tree + // this is currently technical debt which will be resolved in a refactor + // of Node/Link trees + log.warn('reify', 'The "linked" install strategy is EXPERIMENTAL and may contain bugs.') + this.idealTree = await this[_createIsolatedTree]() + } + await this[_diffTrees]() + await this[_reifyPackages]() + if (linked) { + // swap back in the idealTree + // so that the lockfile is preserved + this.idealTree = oldTree + } + await this[_saveIdealTree](options) + // clean inert + for (const node of this.idealTree.inventory.values()) { + if (node.inert) { + node.parent = null + } + } + // clean up any trash that is still in the tree + for (const path of this[_trashList]) { + const loc = relpath(this.idealTree.realpath, path) + const node = this.idealTree.inventory.get(loc) + if (node && node.root === this.idealTree) { + node.parent = null + } + } + + // if we filtered to only certain nodes, then anything ELSE needs + // to be untouched in the resulting actual tree, even if it differs + // in the idealTree. Copy over anything that was in the actual and + // was not changed, delete anything in the ideal and not actual. + // Then we move the entire idealTree over to this.actualTree, and + // save the hidden lockfile. + if (this.diff && this.diff.filterSet.size) { + const reroot = new Set() + + const { filterSet } = this.diff + const seen = new Set() + for (const [loc, ideal] of this.idealTree.inventory.entries()) { + seen.add(loc) + + // if it's an ideal node from the filter set, then skip it + // because we already made whatever changes were necessary + if (filterSet.has(ideal)) { + continue + } + + // otherwise, if it's not in the actualTree, then it's not a thing + // that we actually added. And if it IS in the actualTree, then + // it's something that we left untouched, so we need to record + // that. + const actual = this.actualTree.inventory.get(loc) + if (!actual) { + ideal.root = null + } else { + if ([...actual.linksIn].some(link => filterSet.has(link))) { + seen.add(actual.location) + continue + } + const { realpath, isLink } = actual + if (isLink && ideal.isLink && ideal.realpath === realpath) { + continue + } else { + reroot.add(actual) + } + } + } + + // now find any actual nodes that may not be present in the ideal + // tree, but were left behind by virtue of not being in the filter + for (const [loc, actual] of this.actualTree.inventory.entries()) { + if (seen.has(loc)) { + continue + } + seen.add(loc) + + // we know that this is something that ISN'T in the idealTree, + // or else we will have addressed it in the previous loop. + // If it's in the filterSet, that means we intentionally removed + // it, so nothing to do here. + if (filterSet.has(actual)) { + continue + } + + reroot.add(actual) + } + + // go through the rerooted actual nodes, and move them over. + for (const actual of reroot) { + actual.root = this.idealTree + } + + // prune out any tops that lack a linkIn, they are no longer relevant. + for (const top of this.idealTree.tops) { + if (top.linksIn.size === 0) { + top.root = null + } + } + + // need to calculate dep flags, since nodes may have been marked + // as extraneous or otherwise incorrect during transit. + calcDepFlags(this.idealTree) + } + + // save the ideal's meta as a hidden lockfile after we actualize it + this.idealTree.meta.filename = + this.idealTree.realpath + '/node_modules/.package-lock.json' + this.idealTree.meta.hiddenLockfile = true + this.idealTree.meta.lockfileVersion = defaultLockfileVersion + + this.actualTree = this.idealTree + this.idealTree = null + + if (!this.options.global) { + await this.actualTree.meta.save() + const ignoreScripts = !!this.options.ignoreScripts + // if we aren't doing a dry run or ignoring scripts and we actually made changes to the dep + // tree, then run the dependencies scripts + if (!this.options.dryRun && !ignoreScripts && this.diff && this.diff.children.length) { + const { path, package: pkg } = this.actualTree.target + const stdio = this.options.foregroundScripts ? 'inherit' : 'pipe' + const { scripts = {} } = pkg + for (const event of ['predependencies', 'dependencies', 'postdependencies']) { + if (Object.prototype.hasOwnProperty.call(scripts, event)) { + log.info('run', pkg._id, event, scripts[event]) + await time.start(`reify:run:${event}`, () => runScript({ + event, + path, + pkg, + stdio, + scriptShell: this.options.scriptShell, + })) + } + } + } + } + // This is a very bad pattern and I can't wait to stop doing it + this.auditReport = await this.auditReport + + this.finishTracker('reify') + timeEnd() + return treeCheck(this.actualTree) + } + + async [_reifyPackages] () { + // we don't submit the audit report or write to disk on dry runs + if (this.options.dryRun) { + return + } + + if (this.options.packageLockOnly) { + // we already have the complete tree, so just audit it now, + // and that's all we have to do here. + return this[_submitQuickAudit]() + } + + // ok, we're about to start touching the fs. need to roll back + // if we get an early termination. + let reifyTerminated = null + const removeHandler = onExit(({ signal }) => { + // only call once. if signal hits twice, we just terminate + removeHandler() + reifyTerminated = Object.assign(new Error('process terminated'), { + signal, + }) + return false + }) + + // [rollbackfn, [...actions]] + // after each step, if the process was terminated, execute the rollback + // note that each rollback *also* calls the previous one when it's + // finished, and then the first one throws the error, so we only need + // a new rollback step when we have a new thing that must be done to + // revert the install. + const steps = [ + [_rollbackRetireShallowNodes, [ + _retireShallowNodes, + ]], + [_rollbackCreateSparseTree, [ + _createSparseTree, + _loadShrinkwrapsAndUpdateTrees, + _loadBundlesAndUpdateTrees, + _submitQuickAudit, + _unpackNewModules, + ]], + [_rollbackMoveBackRetiredUnchanged, [ + _moveBackRetiredUnchanged, + _build, + ]], + ] + for (const [rollback, actions] of steps) { + for (const action of actions) { + try { + await this[action]() + if (reifyTerminated) { + throw reifyTerminated + } + } catch (er) { + // TODO rollbacks shouldn't be relied on to throw err + await this[rollback](er) + /* istanbul ignore next - rollback throws, should never hit this */ + throw er + } + } + } + + // no rollback for this one, just exit with the error, since the + // install completed and can't be safely recovered at this point. + await this[_removeTrash]() + if (reifyTerminated) { + throw reifyTerminated + } + + // done modifying the file system, no need to keep listening for sigs + removeHandler() + } + + // when doing a local install, we load everything and figure it all out. + // when doing a global install, we *only* care about the explicit requests. + [_loadTrees] (options) { + const timeEnd = time.start('reify:loadTrees') + const bitOpt = { + ...options, + complete: this.options.packageLockOnly || this.options.dryRun, + } + + // if we're only writing a package lock, then it doesn't matter what's here + if (this.options.packageLockOnly) { + return this.buildIdealTree(bitOpt).then(timeEnd) + } + + const actualOpt = this.options.global ? { + ignoreMissing: true, + global: true, + filter: (node, kid) => { + // if it's not the project root, and we have no explicit requests, + // then we're already into a nested dep, so we keep it + if (this.explicitRequests.size === 0 || !node.isProjectRoot) { + return true + } + + // if we added it as an edgeOut, then we want it + if (this.idealTree.edgesOut.has(kid)) { + return true + } + + // if it's an explicit request, then we want it + const hasExplicit = [...this.explicitRequests] + .some(edge => edge.name === kid) + if (hasExplicit) { + return true + } + + // ignore the rest of the global install folder + return false + }, + } : { ignoreMissing: true } + + if (!this.options.global) { + return Promise.all([ + this.loadActual(actualOpt), + this.buildIdealTree(bitOpt), + ]).then(timeEnd) + } + + // the global install space tends to have a lot of stuff in it. don't + // load all of it, just what we care about. we won't be saving a + // hidden lockfile in there anyway. Note that we have to load ideal + // BEFORE loading actual, so that the actualOpt can use the + // explicitRequests which is set during buildIdealTree + return this.buildIdealTree(bitOpt) + .then(() => this.loadActual(actualOpt)) + .then(timeEnd) + } + + [_diffTrees] () { + if (this.options.packageLockOnly) { + return + } + + const timeEnd = time.start('reify:diffTrees') + // XXX if we have an existing diff already, there should be a way + // to just invalidate the parts that changed, but avoid walking the + // whole tree again. + + const includeWorkspaces = this.options.workspacesEnabled + const includeRootDeps = !includeWorkspaces + || this.options.includeWorkspaceRoot && this.options.workspaces.length > 0 + + const filterNodes = [] + if (this.options.global && this.explicitRequests.size) { + const idealTree = this.idealTree.target + const actualTree = this.actualTree.target + // we ONLY are allowed to make changes in the global top-level + // children where there's an explicit request. + for (const { name } of this.explicitRequests) { + const ideal = idealTree.children.get(name) + if (ideal) { + filterNodes.push(ideal) + } + const actual = actualTree.children.get(name) + if (actual) { + filterNodes.push(actual) + } + } + } else { + if (includeWorkspaces) { + // add all ws nodes to filterNodes + for (const ws of this.options.workspaces) { + const ideal = this.idealTree.children.get(ws) + if (ideal) { + filterNodes.push(ideal) + } + const actual = this.actualTree.children.get(ws) + if (actual) { + filterNodes.push(actual) + } + } + } + if (includeRootDeps) { + // add all non-workspace nodes to filterNodes + for (const tree of [this.idealTree, this.actualTree]) { + for (const { type, to } of tree.edgesOut.values()) { + if (type !== 'workspace' && to) { + filterNodes.push(to) + } + } + } + } + } + + // find all the nodes that need to change between the actual + // and ideal trees. + this.diff = Diff.calculate({ + omit: this.#omit, + shrinkwrapInflated: this.#shrinkwrapInflated, + filterNodes, + actual: this.actualTree, + ideal: this.idealTree, + }) + + // we don't have to add 'removed' folders to the trashlist, because + // they'll be moved aside to a retirement folder, and then the retired + // folder will be deleted at the end. This is important when we have + // a folder like FOO being "removed" in favor of a folder like "foo", + // because if we remove node_modules/FOO on case-insensitive systems, + // it will remove the dep that we *want* at node_modules/foo. + + timeEnd() + } + + // add the node and all its bins to the list of things to be + // removed later on in the process. optionally, also mark them + // as a retired paths, so that we move them out of the way and + // replace them when rolling back on failure. + [_addNodeToTrashList] (node, retire = false) { + const paths = [node.path, ...node.binPaths] + const moves = this.#retiredPaths + log.silly('reify', 'mark', retire ? 'retired' : 'deleted', paths) + for (const path of paths) { + if (retire) { + const retired = retirePath(path) + moves[path] = retired + this[_trashList].add(retired) + } else { + this[_trashList].add(path) + } + } + } + + // move aside the shallowest nodes in the tree that have to be + // changed or removed, so that we can rollback if necessary. + [_retireShallowNodes] () { + const timeEnd = time.start('reify:retireShallow') + const moves = this.#retiredPaths = {} + for (const diff of this.diff.children) { + if (diff.action === 'CHANGE' || diff.action === 'REMOVE') { + // we'll have to clean these up at the end, so add them to the list + this[_addNodeToTrashList](diff.actual, true) + } + } + log.silly('reify', 'moves', moves) + const movePromises = Object.entries(moves) + .map(([from, to]) => this[_renamePath](from, to)) + return promiseAllRejectLate(movePromises).then(timeEnd) + } + + [_renamePath] (from, to, didMkdirp = false) { + return moveFile(from, to) + .catch(er => { + // Occasionally an expected bin file might not exist in the package, + // or a shim/symlink might have been moved aside. If we've already + // handled the most common cause of ENOENT (dir doesn't exist yet), + // then just ignore any ENOENT. + if (er.code === 'ENOENT') { + return didMkdirp ? null : mkdir(dirname(to), { recursive: true }).then(() => + this[_renamePath](from, to, true)) + } else if (er.code === 'EEXIST') { + return rm(to, { recursive: true, force: true }).then(() => moveFile(from, to)) + } else { + throw er + } + }) + } + + [_rollbackRetireShallowNodes] (er) { + const timeEnd = time.start('reify:rollback:retireShallow') + const moves = this.#retiredPaths + const movePromises = Object.entries(moves) + .map(([from, to]) => this[_renamePath](to, from)) + return promiseAllRejectLate(movePromises) + // ignore subsequent rollback errors + .catch(() => {}) + .then(timeEnd) + .then(() => { + throw er + }) + } + + [_createSparseTree] () { + const timeEnd = time.start('reify:createSparse') + // if we call this fn again, we look for the previous list + // so that we can avoid making the same directory multiple times + const leaves = this.diff.leaves + .filter(diff => { + return (diff.action === 'ADD' || diff.action === 'CHANGE') && + !this.#sparseTreeDirs.has(diff.ideal.path) && + !diff.ideal.isLink + }) + .map(diff => diff.ideal) + + // we check this in parallel, so guard against multiple attempts to + // retire the same path at the same time. + const dirsChecked = new Set() + return promiseAllRejectLate(leaves.map(async node => { + for (const d of walkUp(node.path)) { + if (d === node.top.path) { + break + } + if (dirsChecked.has(d)) { + continue + } + dirsChecked.add(d) + const st = await lstat(d).catch(() => null) + // this can happen if we have a link to a package with a name + // that the filesystem treats as if it is the same thing. + // would be nice to have conditional istanbul ignores here... + /* istanbul ignore next - defense in depth */ + if (st && !st.isDirectory()) { + const retired = retirePath(d) + this.#retiredPaths[d] = retired + this[_trashList].add(retired) + await this[_renamePath](d, retired) + } + } + this.#sparseTreeDirs.add(node.path) + const made = await mkdir(node.path, { recursive: true }) + // if the directory already exists, made will be undefined. if that's the case + // we don't want to remove it because we aren't the ones who created it so we + // omit it from the #sparseTreeRoots + if (made) { + this.#sparseTreeRoots.add(made) + } + })).then(timeEnd) + } + + [_rollbackCreateSparseTree] (er) { + const timeEnd = time.start('reify:rollback:createSparse') + // cut the roots of the sparse tree that were created, not the leaves + const roots = this.#sparseTreeRoots + // also delete the moves that we retired, so that we can move them back + const failures = [] + const targets = [...roots, ...Object.keys(this.#retiredPaths)] + const unlinks = targets + .map(path => rm(path, { recursive: true, force: true }).catch(er => failures.push([path, er]))) + return promiseAllRejectLate(unlinks).then(() => { + // eslint-disable-next-line promise/always-return + if (failures.length) { + log.warn('cleanup', 'Failed to remove some directories', failures) + } + }) + .then(timeEnd) + .then(() => this[_rollbackRetireShallowNodes](er)) + } + + // shrinkwrap nodes define their dependency branches with a file, so + // we need to unpack them, read that shrinkwrap file, and then update + // the tree by calling loadVirtual with the node as the root. + [_loadShrinkwrapsAndUpdateTrees] () { + const seen = this.#shrinkwrapInflated + const shrinkwraps = this.diff.leaves + .filter(d => (d.action === 'CHANGE' || d.action === 'ADD' || !d.action) && + d.ideal.hasShrinkwrap && !seen.has(d.ideal) && + !this[_trashList].has(d.ideal.path)) + + if (!shrinkwraps.length) { + return + } + + const timeEnd = time.start('reify:loadShrinkwraps') + + const Arborist = this.constructor + return promiseAllRejectLate(shrinkwraps.map(diff => { + const node = diff.ideal + seen.add(node) + return diff.action ? this[_reifyNode](node) : node + })) + .then(nodes => promiseAllRejectLate(nodes.map(node => new Arborist({ + ...this.options, + path: node.path, + }).loadVirtual({ root: node })))) + // reload the diff and sparse tree because the ideal tree changed + .then(() => this[_diffTrees]()) + .then(() => this[_createSparseTree]()) + .then(() => this[_loadShrinkwrapsAndUpdateTrees]()) + .then(timeEnd) + } + + // create a symlink for Links, extract for Nodes + // return the node object, since we usually want that + // handle optional dep failures here + // If reifying fails, and the node is optional, add it and its optionalSet + // to the trash list + // Always return the node. + [_reifyNode] (node) { + const timeEnd = time.start(`reifyNode:${node.location}`) + this.addTracker('reify', node.name, node.location) + + const p = Promise.resolve().then(async () => { + await this[_checkBins](node) + await this.#extractOrLink(node) + const { _id, deprecated } = node.package + // The .catch is in _handleOptionalFailure. Not ideal, this should be cleaned up. + // eslint-disable-next-line promise/always-return + if (deprecated) { + log.warn('deprecated', `${_id}: ${deprecated}`) + } + }) + + return this[_handleOptionalFailure](node, p) + .then(() => { + this.finishTracker('reify', node.name, node.location) + timeEnd() + return node + }) + } + + // do not allow node_modules to be a symlink + async #validateNodeModules (nm) { + if (this.options.force || this.#nmValidated.has(nm)) { + return + } + const st = await lstat(nm).catch(() => null) + if (!st || st.isDirectory()) { + this.#nmValidated.add(nm) + return + } + log.warn('reify', 'Removing non-directory', nm) + await rm(nm, { recursive: true, force: true }) + } + + async #extractOrLink (node) { + const nm = resolve(node.parent.path, 'node_modules') + await this.#validateNodeModules(nm) + + if (!node.isLink) { + // in normal cases, node.resolved should *always* be set by now. + // however, it is possible when a lockfile is damaged, or very old, + // or in some other race condition bugs in npm v6, that a previously + // bundled dependency will have just a version, but no resolved value, + // and no 'bundled: true' setting. + // Do the best with what we have, or else remove it from the tree + // entirely, since we can't possibly reify it. + let res = null + if (node.resolved) { + const registryResolved = this.#registryResolved(node.resolved) + if (registryResolved) { + res = `${node.name}@${registryResolved}` + } + } else if (node.package.name && node.version) { + res = `${node.package.name}@${node.version}` + } + + // no idea what this thing is. remove it from the tree. + if (!res) { + const warning = 'invalid or damaged lockfile detected\n' + + 'please re-try this operation once it completes\n' + + 'so that the damage can be corrected, or perform\n' + + 'a fresh install with no lockfile if the problem persists.' + log.warn('reify', warning) + log.verbose('reify', 'unrecognized node in tree', node.path) + node.parent = null + node.fsParent = null + this[_addNodeToTrashList](node) + return + } + await debug(async () => { + const st = await lstat(node.path).catch(() => null) + if (st && !st.isDirectory()) { + debug.log('unpacking into a non-directory', node) + throw Object.assign(new Error('ENOTDIR: not a directory'), { + code: 'ENOTDIR', + path: node.path, + }) + } + }) + await pacote.extract(res, node.path, { + ...this.options, + resolved: node.resolved, + integrity: node.integrity, + }) + // store nodes don't use Node class so node.package doesn't get updated + if (node.isInStore) { + const { content: pkg } = await PackageJson.normalize(node.path) + node.package.scripts = pkg.scripts + } + return + } + + // node.isLink + await rm(node.path, { recursive: true, force: true }) + + // symlink + const dir = dirname(node.path) + const target = node.realpath + + let rel + if (node.resolved?.startsWith('file:')) { + rel = this.#calculateRelativePath(node, dir, target, nm) + } else { + rel = relative(dir, target) + } + + await mkdir(dir, { recursive: true }) + return symlink(rel, node.path, 'junction') + } + + // if the node is optional, then the failure of the promise is nonfatal + // just add it and its optional set to the trash list. + [_handleOptionalFailure] (node, p) { + return (node.optional ? p.catch(() => { + const set = optionalSet(node) + for (const node of set) { + log.verbose('reify', 'failed optional dependency', node.path) + node.inert = true + this[_addNodeToTrashList](node) + } + }) : p).then(() => node) + } + + #calculateRelativePath (node, dir, target) { + // Check if the node is affected by a root override + let hasRootOverride = [...node.edgesIn].some(edge => edge.from.isRoot && edge.overrides) + // If not set via edges, see if the root package.json explicitly lists an override + if (!hasRootOverride && node.root) { + const rootPackage = node.root.target + hasRootOverride = !!(rootPackage && + rootPackage.package.overrides && + rootPackage.package.overrides[node.name]) + } + if (!hasRootOverride) { + return relative(dir, target) + } + // If an override is detected, attempt to retrieve the override spec from the root package.json + const overrideSpec = node.root?.target?.package?.overrides?.[node.name] + if (typeof overrideSpec === 'string' && overrideSpec.startsWith('file:')) { + const overridePath = overrideSpec.replace(/^file:/, '') + const rootDir = node.root.target.path + return relative(dir, resolve(rootDir, overridePath)) + } + + // Fallback: derive the file path from node.resolved in a platform-agnostic way + const filePath = node.resolved.replace(/^file:/, '') + return join(filePath) + } + + #registryResolved (resolved) { + // the default registry url is a magic value meaning "the currently + // configured registry". + // `resolved` must never be falsey. + // + // XXX: use a magic string that isn't also a valid value, like + // ${REGISTRY} or something. This has to be threaded through the + // Shrinkwrap and Node classes carefully, so for now, just treat + // the default reg as the magical animal that it has been. + try { + const resolvedURL = hgi.parseUrl(resolved) + + if ((this.options.replaceRegistryHost === resolvedURL.hostname) || + this.options.replaceRegistryHost === 'always') { + const registryURL = new URL(this.registry) + + // Replace the host with the registry host while keeping the path intact + resolvedURL.hostname = registryURL.hostname + resolvedURL.port = registryURL.port + resolvedURL.protocol = registryURL.protocol + + // Make sure we don't double-include the path if it's already there + const registryPath = registryURL.pathname.replace(/\/$/, '') + + if (registryPath && registryPath !== '/' && !resolvedURL.pathname.startsWith(registryPath)) { + // Since hostname is changed, we need to ensure the registry path is included + resolvedURL.pathname = registryPath + resolvedURL.pathname + } + + return resolvedURL.toString() + } + return resolved + } catch (e) { + // if we could not parse the url at all then returning nothing + // here means it will get removed from the tree in the next step + return undefined + } + } + + // bundles are *sort of* like shrinkwraps, in that the branch is defined + // by the contents of the package. however, in their case, rather than + // shipping a virtual tree that must be reified, they ship an entire + // reified actual tree that must be unpacked and not modified. + [_loadBundlesAndUpdateTrees] (depth = 0, bundlesByDepth) { + let maxBundleDepth + if (!bundlesByDepth) { + bundlesByDepth = new Map() + maxBundleDepth = -1 + dfwalk({ + tree: this.diff, + visit: diff => { + const node = diff.ideal + if (!node) { + return + } + if (node.isProjectRoot) { + return + } + + const { bundleDependencies } = node.package + if (bundleDependencies && bundleDependencies.length) { + maxBundleDepth = Math.max(maxBundleDepth, node.depth) + if (!bundlesByDepth.has(node.depth)) { + bundlesByDepth.set(node.depth, [node]) + } else { + bundlesByDepth.get(node.depth).push(node) + } + } + }, + getChildren: diff => diff.children, + }) + + bundlesByDepth.set('maxBundleDepth', maxBundleDepth) + } else { + maxBundleDepth = bundlesByDepth.get('maxBundleDepth') + } + + if (depth === 0) { + time.start('reify:loadBundles') + } + + if (depth > maxBundleDepth) { + // if we did something, then prune the tree and update the diffs + if (maxBundleDepth !== -1) { + this.#pruneBundledMetadeps(bundlesByDepth) + this[_diffTrees]() + } + time.end('reify:loadBundles') + return + } + + // skip any that have since been removed from the tree, eg by a + // shallower bundle overwriting them with a bundled meta-dep. + const set = (bundlesByDepth.get(depth) || []) + .filter(node => node.root === this.idealTree && + node.target !== node.root && + !this[_trashList].has(node.path)) + + if (!set.length) { + return this[_loadBundlesAndUpdateTrees](depth + 1, bundlesByDepth) + } + + // extract all the nodes with bundles + return promiseCallLimit(set.map(node => { + return () => { + this.#bundleUnpacked.add(node) + return this[_reifyNode](node) + } + }), { rejectLate: true }) + // then load their unpacked children and move into the ideal tree + .then(nodes => + promiseAllRejectLate(nodes.map(async node => { + const arb = new this.constructor({ + ...this.options, + path: node.path, + }) + const notTransplanted = new Set(node.children.keys()) + await arb.loadActual({ + root: node, + // don't transplant any sparse folders we created + // loadActual will set node.package to {} for empty directories + // if by chance there are some empty folders in the node_modules + // tree for some other reason, then ok, ignore those too. + transplantFilter: node => { + if (node.package._id) { + // it's actually in the bundle if it gets transplanted + notTransplanted.delete(node.name) + return true + } else { + return false + } + }, + }) + for (const name of notTransplanted) { + this.#bundleMissing.add(node.children.get(name)) + } + }))) + // move onto the next level of bundled items + .then(() => this[_loadBundlesAndUpdateTrees](depth + 1, bundlesByDepth)) + } + + // https://github.com/npm/cli/issues/1597#issuecomment-667639545 + #pruneBundledMetadeps (bundlesByDepth) { + const bundleShadowed = new Set() + + // Example dep graph: + // root -> (a, c) + // a -> BUNDLE(b) + // b -> c + // c -> b + // + // package tree: + // root + // +-- a + // | +-- b(1) + // | +-- c(1) + // +-- b(2) + // +-- c(2) + // 1. mark everything that's shadowed by anything in the bundle. This + // marks b(2) and c(2). + // 2. anything with edgesIn from outside the set, mark not-extraneous, + // remove from set. This unmarks c(2). + // 3. continue until no change + // 4. remove everything in the set from the tree. b(2) is pruned + + // create the list of nodes shadowed by children of bundlers + for (const bundles of bundlesByDepth.values()) { + // skip the 'maxBundleDepth' item + if (!Array.isArray(bundles)) { + continue + } + for (const node of bundles) { + for (const name of node.children.keys()) { + const shadow = node.parent.resolve(name) + if (!shadow) { + continue + } + bundleShadowed.add(shadow) + shadow.extraneous = true + } + } + } + + // lib -> (a@1.x) BUNDLE(a@1.2.3 (b@1.2.3)) + // a@1.2.3 -> (b@1.2.3) + // a@1.3.0 -> (b@2) + // b@1.2.3 -> () + // b@2 -> (c@2) + // + // root + // +-- lib + // | +-- a@1.2.3 + // | +-- b@1.2.3 + // +-- b@2 <-- shadowed, now extraneous + // +-- c@2 <-- also shadowed, because only dependent is shadowed + for (const shadow of bundleShadowed) { + for (const shadDep of shadow.edgesOut.values()) { + /* istanbul ignore else - pretty unusual situation, just being + * defensive here. Would mean that a bundled dep has a dependency + * that is unmet. which, weird, but if you bundle it, we take + * whatever you put there and assume the publisher knows best. */ + if (shadDep.to) { + bundleShadowed.add(shadDep.to) + shadDep.to.extraneous = true + } + } + } + + let changed + do { + changed = false + for (const shadow of bundleShadowed) { + for (const edge of shadow.edgesIn) { + if (!bundleShadowed.has(edge.from)) { + shadow.extraneous = false + bundleShadowed.delete(shadow) + changed = true + break + } + } + } + } while (changed) + + for (const shadow of bundleShadowed) { + this[_addNodeToTrashList](shadow) + shadow.root = null + } + } + + async [_submitQuickAudit] () { + if (this.options.audit === false) { + this.auditReport = null + return + } + + // we submit the quick audit at this point in the process, as soon as + // we have all the deps resolved, so that it can overlap with the other + // actions as much as possible. Stash the promise, which we resolve + // before finishing the reify() and returning the tree. Thus, we do + // NOT return the promise, as the intent is for this to run in parallel + // with the reification, and be resolved at a later time. + const timeEnd = time.start('reify:audit') + const options = { ...this.options } + const tree = this.idealTree + + // if we're operating on a workspace, only audit the workspace deps + if (this.options.workspaces.length) { + options.filterSet = this.workspaceDependencySet( + tree, + this.options.workspaces, + this.options.includeWorkspaceRoot + ) + } + + this.auditReport = AuditReport.load(tree, options).then(res => { + timeEnd() + return res + }) + } + + // ok! actually unpack stuff into their target locations! + // The sparse tree has already been created, so we walk the diff + // kicking off each unpack job. If any fail, we rm the sparse + // tree entirely and try to put everything back where it was. + [_unpackNewModules] () { + const timeEnd = time.start('reify:unpack') + const unpacks = [] + dfwalk({ + tree: this.diff, + visit: diff => { + // no unpacking if we don't want to change this thing + if (diff.action !== 'CHANGE' && diff.action !== 'ADD') { + return + } + + const node = diff.ideal + const bd = this.#bundleUnpacked.has(node) + const sw = this.#shrinkwrapInflated.has(node) + const bundleMissing = this.#bundleMissing.has(node) + + // check whether we still need to unpack this one. + // test the inDepBundle last, since that's potentially a tree walk. + const doUnpack = node && // can't unpack if removed! + // root node already exists + !node.isRoot && + // already unpacked to read bundle + !bd && + // already unpacked to read sw + !sw && + // already unpacked by another dep's bundle + (bundleMissing || !node.inDepBundle) + + if (doUnpack) { + unpacks.push(this[_reifyNode](node)) + } + }, + getChildren: diff => diff.children, + }) + return promiseAllRejectLate(unpacks).then(timeEnd) + } + + // This is the part where we move back the unchanging nodes that were + // the children of a node that did change. If this fails, the rollback + // is a three-step process. First, we try to move the retired unchanged + // nodes BACK to their retirement folders, then delete the sparse tree, + // then move everything out of retirement. + [_moveBackRetiredUnchanged] () { + // get a list of all unchanging children of any shallow retired nodes + // if they are not the ancestor of any node in the diff set, then the + // directory won't already exist, so just rename it over. + // This is sort of an inverse diff tree, of all the nodes where + // the actualTree and idealTree _don't_ differ, starting from the + // shallowest nodes that we moved aside in the first place. + const timeEnd = time.start('reify:unretire') + const moves = this.#retiredPaths + this.#retiredUnchanged = {} + return promiseAllRejectLate(this.diff.children.map(diff => { + // skip if nothing was retired + if (diff.action !== 'CHANGE' && diff.action !== 'REMOVE') { + return + } + + const { path: realFolder } = diff.actual + const retireFolder = moves[realFolder] + /* istanbul ignore next - should be impossible */ + debug(() => { + if (!retireFolder) { + const er = new Error('trying to un-retire but not retired') + throw Object.assign(er, { + realFolder, + retireFolder, + actual: diff.actual, + ideal: diff.ideal, + action: diff.action, + }) + } + }) + + this.#retiredUnchanged[retireFolder] = [] + return promiseAllRejectLate(diff.unchanged.map(node => { + // no need to roll back links, since we'll just delete them anyway + if (node.isLink) { + return mkdir(dirname(node.path), { recursive: true, force: true }) + .then(() => this[_reifyNode](node)) + } + + // will have been moved/unpacked along with bundler + if (node.inDepBundle && !this.#bundleMissing.has(node)) { + return + } + + this.#retiredUnchanged[retireFolder].push(node) + + const rel = relative(realFolder, node.path) + const fromPath = resolve(retireFolder, rel) + // if it has bundleDependencies, then make node_modules. otherwise + // skip it. + const bd = node.package.bundleDependencies + const dir = bd && bd.length ? node.path + '/node_modules' : node.path + return mkdir(dir, { recursive: true }).then(() => this[_moveContents](node, fromPath)) + })) + })).then(timeEnd) + } + + // move the contents from the fromPath to the node.path + [_moveContents] (node, fromPath) { + return packageContents({ + path: fromPath, + depth: 1, + packageJsonCache: new Map([[fromPath + '/package.json', node.package]]), + }).then(res => promiseAllRejectLate(res.map(path => { + const rel = relative(fromPath, path) + const to = resolve(node.path, rel) + return this[_renamePath](path, to) + }))) + } + + [_rollbackMoveBackRetiredUnchanged] (er) { + const moves = this.#retiredPaths + // flip the mapping around to go back + const realFolders = new Map(Object.entries(moves).map(([k, v]) => [v, k])) + const promises = Object.entries(this.#retiredUnchanged) + .map(([retireFolder, nodes]) => promiseAllRejectLate(nodes.map(node => { + const realFolder = realFolders.get(retireFolder) + const rel = relative(realFolder, node.path) + const fromPath = resolve(retireFolder, rel) + return this[_moveContents]({ ...node, path: fromPath }, node.path) + }))) + return promiseAllRejectLate(promises) + .then(() => this[_rollbackCreateSparseTree](er)) + } + + [_build] () { + const timeEnd = time.start('reify:build') + + // for all the things being installed, run their appropriate scripts + // run in tip->root order, so as to be more likely to build a node's + // deps before attempting to build it itself + const nodes = [] + dfwalk({ + tree: this.diff, + leave: diff => { + if (!diff.ideal.isProjectRoot) { + nodes.push(diff.ideal) + } + }, + // process adds before changes, ignore removals + getChildren: diff => diff && diff.children, + filter: diff => diff.action === 'ADD' || diff.action === 'CHANGE', + }) + + // pick up link nodes from the unchanged list as we want to run their + // scripts in every install despite of having a diff status change + for (const node of this.diff.unchanged) { + const tree = node.root.target + + // skip links that only live within node_modules as they are most + // likely managed by packages we installed, we only want to rebuild + // unchanged links we directly manage + const linkedFromRoot = (node.parent === tree && !node.inert) || node.target.fsTop === tree + if (node.isLink && linkedFromRoot) { + nodes.push(node) + } + } + + return this.rebuild({ nodes, handleOptionalFailure: true }).then(timeEnd) + } + + // the tree is pretty much built now, so it's cleanup time. + // remove the retired folders, and any deleted nodes + // If this fails, there isn't much we can do but tell the user about it. + // Thankfully, it's pretty unlikely that it'll fail, since rm is a node builtin. + async [_removeTrash] () { + const timeEnd = time.start('reify:trash') + const promises = [] + const failures = [] + const _rm = path => rm(path, { recursive: true, force: true }).catch(er => failures.push([path, er])) + + for (const path of this[_trashList]) { + promises.push(_rm(path)) + } + + await promiseAllRejectLate(promises) + if (failures.length) { + log.warn('cleanup', 'Failed to remove some directories', failures) + } + + timeEnd() + } + + // last but not least, we save the ideal tree metadata to the package-lock + // or shrinkwrap file, and any additions or removals to package.json + async [_saveIdealTree] (options) { + // the ideal tree is actualized now, hooray! + // it still contains all the references to optional nodes that were removed + // for install failures. Those still end up in the shrinkwrap, so we + // save it first, then prune out the optional trash, and then return it. + + const save = !(options.save === false) + + // we check for updates in order to make sure we run save ideal tree + // even though save=false since we want `npm update` to be able to + // write to package-lock files by default + const hasUpdates = this[_updateAll] || this[_updateNames].length + + // we're going to completely skip save ideal tree in case of a global or + // dry-run install and also if the save option is set to false, EXCEPT for + // update since the expected behavior for npm7+ is for update to + // NOT save to package.json, we make that exception since we still want + // saveIdealTree to be able to write the lockfile by default. + const saveIdealTree = !( + (!save && !hasUpdates) + || this.options.global + || this.options.dryRun + ) + + if (!saveIdealTree) { + return false + } + + const timeEnd = time.start('reify:save') + + const updatedTrees = new Set() + const updateNodes = nodes => { + for (const { name, tree: addTree } of nodes) { + // addTree either the root, or a workspace + const edge = addTree.edgesOut.get(name) + const pkg = addTree.package + const req = npa.resolve(name, edge.spec, addTree.realpath) + const { rawSpec, subSpec } = req + + const spec = subSpec ? subSpec.rawSpec : rawSpec + const child = edge.to + + // if we tried to install an optional dep, but it was a version + // that we couldn't resolve, this MAY be missing. if we haven't + // blown up by now, it's because it was not a problem, though, so + // just move on. + if (!child || !addTree.isTop) { + continue + } + + let newSpec + // True if the dependency is getting installed from a local file path + // In this case it is not possible to do the normal version comparisons + // as the new version will be a file path + const isLocalDep = req.type === 'directory' || req.type === 'file' + if (req.registry) { + const version = child.version + const prefixRange = version ? this.options.savePrefix + version : '*' + // if we installed a range, then we save the range specified + // if it is not a subset of the ^x.y.z. eg, installing a range + // of `1.x <1.2.3` will not be saved as `^1.2.0`, because that + // would allow versions outside the requested range. Tags and + // specific versions save with the save-prefix. + const isRange = (subSpec || req).type === 'range' + + let range = spec + if ( + !isRange || + spec === '*' || + subset(prefixRange, spec, { loose: true }) + ) { + range = prefixRange + } + + const pname = child.packageName + const alias = name !== pname + newSpec = alias ? `npm:${pname}@${range}` : range + } else if (req.hosted) { + // save the git+https url if it has auth; otherwise, shortcut + const h = req.hosted + const opt = { noCommittish: false } + if (h.https && h.auth) { + newSpec = `git+${h.https(opt)}` + } else { + newSpec = h.shortcut(opt) + } + } else if (isLocalDep) { + // when finding workspace nodes, make sure that + // we save them using their version instead of + // using their relative path + if (edge.type === 'workspace') { + const { version } = edge.to.target + const prefixRange = version ? this.options.savePrefix + version : '*' + newSpec = prefixRange + } else { + // save the relative path in package.json + // Normally saveSpec is updated with the proper relative + // path already, but it's possible to specify a full absolute + // path initially, in which case we can end up with the wrong + // thing, so just get the ultimate fetchSpec and relativize it. + const p = req.fetchSpec.replace(/^file:/, '') + const rel = relpath(addTree.realpath, p) + newSpec = `file:${rel}` + } + } else { + newSpec = req.saveSpec + } + + if (options.saveType) { + const depType = saveTypeMap.get(options.saveType) + pkg[depType][name] = newSpec + // PackageJson.normalize will have moved it here if it was in both, if it is empty it will be deleted later + if (options.saveType === 'prod' && pkg.optionalDependencies) { + delete pkg.optionalDependencies[name] + } + } else { + if (hasSubKey(pkg, 'dependencies', name)) { + pkg.dependencies[name] = newSpec + } + + if (hasSubKey(pkg, 'devDependencies', name)) { + pkg.devDependencies[name] = newSpec + // don't update peer or optional if we don't have to + if (hasSubKey(pkg, 'peerDependencies', name) && (isLocalDep || !intersects(newSpec, pkg.peerDependencies[name]))) { + pkg.peerDependencies[name] = newSpec + } + + if (hasSubKey(pkg, 'optionalDependencies', name) && (isLocalDep || !intersects(newSpec, pkg.optionalDependencies[name]))) { + pkg.optionalDependencies[name] = newSpec + } + } else { + if (hasSubKey(pkg, 'peerDependencies', name)) { + pkg.peerDependencies[name] = newSpec + } + + if (hasSubKey(pkg, 'optionalDependencies', name)) { + pkg.optionalDependencies[name] = newSpec + } + } + } + + updatedTrees.add(addTree) + } + } + + // Returns true if any of the edges from this node has a semver + // range definition that is an exact match to the version installed + // e.g: should return true if for a given and installed version 1.0.0, + // range is either =1.0.0 or 1.0.0 + const exactVersion = node => { + for (const edge of node.edgesIn) { + try { + if (subset(edge.spec, node.version)) { + return false + } + } catch { + // ignore errors + } + } + return true + } + + // helper that retrieves an array of nodes that were + // potentially updated during the reify process, in order + // to limit the number of nodes to check and update, only + // select nodes from the inventory that are direct deps + // of a given package.json (project root or a workspace) + // and in ase of using a list of `names`, restrict nodes + // to only names that are found in this list + const retrieveUpdatedNodes = names => { + const filterDirectDependencies = node => + !node.isRoot && node.resolveParent && node.resolveParent.isRoot + && (!names || names.includes(node.name)) + && exactVersion(node) // skip update for exact ranges + + const directDeps = this.idealTree.inventory + .filter(filterDirectDependencies) + + // traverses the list of direct dependencies and collect all nodes + // to be updated, since any of them might have changed during reify + const nodes = [] + for (const node of directDeps) { + for (const edgeIn of node.edgesIn) { + nodes.push({ + name: node.name, + tree: edgeIn.from.target, + }) + } + } + return nodes + } + + if (save) { + // when using update all alongside with save, we'll make + // sure to refresh every dependency of the root idealTree + if (this[_updateAll]) { + const nodes = retrieveUpdatedNodes() + updateNodes(nodes) + } else { + // resolvedAdd is the list of user add requests, but with names added + // to things like git repos and tarball file/urls. However, if the + // user requested 'foo@', and we have a foo@file:../foo, then we should + // end up saving the spec we actually used, not whatever they gave us. + if (this[_resolvedAdd].length) { + updateNodes(this[_resolvedAdd]) + } + + // if updating given dependencies by name, restrict the list of + // nodes to check to only those currently in _updateNames + if (this[_updateNames].length) { + const nodes = retrieveUpdatedNodes(this[_updateNames]) + updateNodes(nodes) + } + + // grab any from explicitRequests that had deps removed + for (const { from: tree } of this.explicitRequests) { + updatedTrees.add(tree) + } + } + } + + if (save) { + for (const tree of updatedTrees) { + // refresh the edges so they have the correct specs + tree.package = tree.package + const pkgJson = await PackageJson.load(tree.path, { create: true }) + const { + dependencies = {}, + devDependencies = {}, + optionalDependencies = {}, + peerDependencies = {}, + // bundleDependencies is not required by PackageJson like the other + // fields here PackageJson also doesn't omit an empty array for this + // field so defaulting this to an empty array would add that field to + // every package.json file. + bundleDependencies, + } = tree.package + + pkgJson.update({ + dependencies, + devDependencies, + optionalDependencies, + peerDependencies, + bundleDependencies, + }) + await pkgJson.save() + } + } + + // before now edge specs could be changing, affecting the `requires` field + // in the package lock, so we hold off saving to the very last action + if (this[_usePackageLock]) { + // preserve indentation, if possible + let format = this.idealTree.package[Symbol.for('indent')] + if (format === undefined) { + format = ' ' + } + + // TODO this ignores options.save + await this.idealTree.meta.save({ + format: (this.options.formatPackageLock && format) ? format + : this.options.formatPackageLock, + }) + } + + timeEnd() + return true + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/audit-report.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/audit-report.js new file mode 100644 index 0000000000000000000000000000000000000000..ce274635d3b7ca1e7135b5201e0771e97671841c --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/audit-report.js @@ -0,0 +1,328 @@ +// an object representing the set of vulnerabilities in a tree + +const localeCompare = require('@isaacs/string-locale-compare')('en') +const npa = require('npm-package-arg') +const pickManifest = require('npm-pick-manifest') + +const Vuln = require('./vuln.js') +const Calculator = require('@npmcli/metavuln-calculator') + +const { log, time } = require('proc-log') + +const npmFetch = require('npm-registry-fetch') + +class AuditReport extends Map { + #omit + error = null + topVulns = new Map() + + static load (tree, opts) { + return new AuditReport(tree, opts).run() + } + + get auditReportVersion () { + return 2 + } + + toJSON () { + const obj = { + auditReportVersion: this.auditReportVersion, + vulnerabilities: {}, + metadata: { + vulnerabilities: { + info: 0, + low: 0, + moderate: 0, + high: 0, + critical: 0, + total: this.size, + }, + dependencies: { + prod: 0, + dev: 0, + optional: 0, + peer: 0, + peerOptional: 0, + total: this.tree.inventory.size - 1, + }, + }, + } + + for (const node of this.tree.inventory.values()) { + const { dependencies } = obj.metadata + let prod = true + for (const type of [ + 'dev', + 'optional', + 'peer', + 'peerOptional', + ]) { + if (node[type]) { + dependencies[type]++ + prod = false + } + } + if (prod) { + dependencies.prod++ + } + } + + // if it doesn't have any topVulns, then it's fixable with audit fix + // for each topVuln, figure out if it's fixable with audit fix --force, + // or if we have to just delete the thing, and if the fix --force will + // require a semver major update. + const vulnerabilities = [] + for (const [name, vuln] of this.entries()) { + vulnerabilities.push([name, vuln.toJSON()]) + obj.metadata.vulnerabilities[vuln.severity]++ + } + + obj.vulnerabilities = vulnerabilities + .sort(([a], [b]) => localeCompare(a, b)) + .reduce((set, [name, vuln]) => { + set[name] = vuln + return set + }, {}) + + return obj + } + + constructor (tree, opts = {}) { + super() + this.#omit = new Set(opts.omit || []) + this.calculator = new Calculator(opts) + this.options = opts + this.tree = tree + this.filterSet = opts.filterSet + } + + async run () { + this.report = await this.#getReport() + log.silly('audit report', this.report) + if (this.report) { + await this.#init() + } + return this + } + + isVulnerable (node) { + const vuln = this.get(node.packageName) + return !!(vuln && vuln.isVulnerable(node)) + } + + async #init () { + const timeEnd = time.start('auditReport:init') + + const promises = [] + for (const [name, advisories] of Object.entries(this.report)) { + for (const advisory of advisories) { + promises.push(this.calculator.calculate(name, advisory)) + } + } + + // now the advisories are calculated with a set of versions + // and the packument. turn them into our style of vuln objects + // which also have the affected nodes, and also create entries + // for all the metavulns that we find from dependents. + const advisories = new Set(await Promise.all(promises)) + const seen = new Set() + for (const advisory of advisories) { + const { name, range } = advisory + const k = `${name}@${range}` + + const vuln = this.get(name) || new Vuln({ name, advisory }) + if (this.has(name)) { + vuln.addAdvisory(advisory) + } + super.set(name, vuln) + + // don't flag the exact same name/range more than once + // adding multiple advisories with the same range is fine, but no + // need to search for nodes we already would have added. + if (!seen.has(k)) { + const p = [] + for (const node of this.tree.inventory.query('packageName', name)) { + if (!this.shouldAudit(node)) { + continue + } + + // if not vulnerable by this advisory, keep searching + if (!advisory.testVersion(node.version)) { + continue + } + + // we will have loaded the source already if this is a metavuln + if (advisory.type === 'metavuln') { + vuln.addVia(this.get(advisory.dependency)) + } + + // already marked this one, no need to do it again + if (vuln.nodes.has(node)) { + continue + } + + // haven't marked this one yet. get its dependents. + vuln.nodes.add(node) + for (const { from: dep, spec } of node.edgesIn) { + if (dep.isTop && !vuln.topNodes.has(dep)) { + vuln.fixAvailable = this.#fixAvailable(vuln, spec) + if (vuln.fixAvailable !== true) { + // now we know the top node is vulnerable, and cannot be + // upgraded out of the bad place without --force. But, there's + // no need to add it to the actual vulns list, because nothing + // depends on root. + this.topVulns.set(vuln.name, vuln) + vuln.topNodes.add(dep) + } + } else { + // calculate a metavuln, if necessary + const calc = this.calculator.calculate(dep.packageName, advisory) + // eslint-disable-next-line promise/always-return + p.push(calc.then(meta => { + // eslint-disable-next-line promise/always-return + if (meta.testVersion(dep.version, spec)) { + advisories.add(meta) + } + })) + } + } + } + await Promise.all(p) + seen.add(k) + } + + // make sure we actually got something. if not, remove it + // this can happen if you are loading from a lockfile created by + // npm v5, since it lists the current version of all deps, + // rather than the range that is actually depended upon, + // or if using --omit with the older audit endpoint. + if (this.get(name).nodes.size === 0) { + this.delete(name) + continue + } + + // if the vuln is valid, but THIS advisory doesn't apply to any of + // the nodes it references, then remove it from the advisory list. + // happens when using omit with old audit endpoint. + for (const advisory of vuln.advisories) { + const relevant = [...vuln.nodes] + .some(n => advisory.testVersion(n.version)) + if (!relevant) { + vuln.deleteAdvisory(advisory) + } + } + } + + timeEnd() + } + + // given the spec, see if there is a fix available at all, and note whether or not it's a semver major fix or not (i.e. will need --force) + #fixAvailable (vuln, spec) { + // TODO we return true, false, OR an object here. this is probably a bad pattern. + if (!vuln.testSpec(spec)) { + return true + } + + // even if we HAVE a packument, if we're looking for it somewhere other than the registry and we have something vulnerable then we're stuck with it. + const specObj = npa(spec) + if (!specObj.registry) { + return false + } + + if (specObj.subSpec) { + spec = specObj.subSpec.rawSpec + } + + // we don't provide fixes for top nodes other than root, but we still check to see if the node is fixable with a different version, and note if that is a semver major bump. + try { + const { + _isSemVerMajor: isSemVerMajor, + version, + name, + } = pickManifest(vuln.packument, spec, { + ...this.options, + before: null, + avoid: vuln.range, + avoidStrict: true, + }) + return { name, version, isSemVerMajor } + } catch (er) { + return false + } + } + + set () { + throw new Error('do not call AuditReport.set() directly') + } + + async #getReport () { + // if we're not auditing, just return false + if (this.options.audit === false || this.options.offline === true || this.tree.inventory.size === 1) { + return null + } + + const timeEnd = time.start('auditReport:getReport') + try { + const body = this.prepareBulkData() + log.silly('audit', 'bulk request', body) + + // no sense asking if we don't have anything to audit, + // we know it'll be empty + if (!Object.keys(body).length) { + return null + } + + const res = await npmFetch('/-/npm/v1/security/advisories/bulk', { + ...this.options, + registry: this.options.auditRegistry || this.options.registry, + method: 'POST', + gzip: true, + body, + }) + + return await res.json() + } catch (er) { + log.verbose('audit error', er) + log.silly('audit error', String(er.body)) + this.error = er + return null + } finally { + timeEnd() + } + } + + // return true if we should audit this one + shouldAudit (node) { + if ( + !node.version || + node.isRoot || + (this.filterSet && this.filterSet?.size !== 0 && !this.filterSet?.has(node)) + ) { + return false + } + if (this.#omit.size === 0) { + return true + } + return !node.shouldOmit(this.#omit) + } + + prepareBulkData () { + const payload = {} + for (const name of this.tree.inventory.query('packageName')) { + const set = new Set() + for (const node of this.tree.inventory.query('packageName', name)) { + if (!this.shouldAudit(node)) { + continue + } + + set.add(node.version) + } + if (set.size) { + payload[name] = [...set] + } + } + return payload + } +} + +module.exports = AuditReport diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/calc-dep-flags.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/calc-dep-flags.js new file mode 100644 index 0000000000000000000000000000000000000000..76de452ed3d80fe984ff4d0ec9f8d2a7002fdde1 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/calc-dep-flags.js @@ -0,0 +1,145 @@ +const { depth } = require('treeverse') + +const calcDepFlags = (tree, resetRoot = true) => { + if (resetRoot) { + tree.dev = false + tree.optional = false + tree.devOptional = false + tree.peer = false + } + const ret = depth({ + tree, + visit: node => calcDepFlagsStep(node), + filter: node => node, + getChildren: (node, tree) => + [...tree.edgesOut.values()].map(edge => edge.to), + }) + return ret +} + +const calcDepFlagsStep = (node) => { + // This rewalk is necessary to handle cases where devDep and optional + // or normal dependency graphs overlap deep in the dep graph. + // Since we're only walking through deps that are not already flagged + // as non-dev/non-optional, it's typically a very shallow traversal + + node.extraneous = false + resetParents(node, 'extraneous') + resetParents(node, 'dev') + resetParents(node, 'peer') + resetParents(node, 'devOptional') + resetParents(node, 'optional') + + // for links, map their hierarchy appropriately + if (node.isLink) { + // node.target can be null, we check to ensure it's not null before proceeding + if (node.target == null) { + return node + } + node.target.dev = node.dev + node.target.optional = node.optional + node.target.devOptional = node.devOptional + node.target.peer = node.peer + return calcDepFlagsStep(node.target) + } + + node.edgesOut.forEach(({ peer, optional, dev, to }) => { + // if the dep is missing, then its flags are already maximally unset + if (!to) { + return + } + // everything with any kind of edge into it is not extraneous + to.extraneous = false + + // If this is a peer edge, mark the target as peer + if (peer) { + to.peer = true + } else if (to.peer && !hasIncomingPeerEdge(to)) { + unsetFlag(to, 'peer') + } + + // devOptional is the *overlap* of the dev and optional tree. + // however, for convenience and to save an extra rewalk, we leave + // it set when we are in *either* tree, and then omit it from the + // package-lock if either dev or optional are set. + const unsetDevOpt = !node.devOptional && !node.dev && !node.optional && !dev && !optional + + // if we are not in the devOpt tree, then we're also not in + // either the dev or opt trees + const unsetDev = unsetDevOpt || !node.dev && !dev + const unsetOpt = unsetDevOpt || !node.optional && !optional + + if (unsetDevOpt) { + unsetFlag(to, 'devOptional') + } + + if (unsetDev) { + unsetFlag(to, 'dev') + } + + if (unsetOpt) { + unsetFlag(to, 'optional') + } + }) + + return node +} + +const hasIncomingPeerEdge = (node) => { + const target = node.isLink && node.target ? node.target : node + for (const edge of target.edgesIn) { + if (edge.type === 'peer') { + return true + } + } + return false +} + +const resetParents = (node, flag) => { + if (node[flag]) { + return + } + + for (let p = node; p && (p === node || p[flag]); p = p.resolveParent) { + p[flag] = false + } +} + +// typically a short walk, since it only traverses deps that have the flag set. +const unsetFlag = (node, flag) => { + if (node[flag]) { + node[flag] = false + depth({ + tree: node, + visit: node => { + node.extraneous = node[flag] = false + if (node.isLink && node.target) { + node.target.extraneous = node.target[flag] = false + } + }, + getChildren: node => { + const children = [] + const targetNode = node.isLink && node.target ? node.target : node + for (const edge of targetNode.edgesOut.values()) { + if (edge.to?.[flag]) { + // For the peer flag, only follow peer edges to unset the flag + // Don't propagate peer flag through prod/dev/optional edges + if (flag === 'peer') { + if (edge.type === 'peer') { + children.push(edge.to) + } + } else { + // For other flags, follow prod edges (and peer edges for non-peer flags) + if (edge.type === 'prod' || edge.type === 'peer') { + children.push(edge.to) + } + } + } + } + return children + }, + }) + } +} + +module.exports = calcDepFlags diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/can-place-dep.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/can-place-dep.js new file mode 100644 index 0000000000000000000000000000000000000000..1a3ccff66922777a3d3d777a81c7657d71e8d5b2 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/can-place-dep.js @@ -0,0 +1,436 @@ +// Internal methods used by buildIdealTree. +// Answer the question: "can I put this dep here?" +// +// IMPORTANT: *nothing* in this class should *ever* modify or mutate the tree +// at all. The contract here is strictly limited to read operations. We call +// this in the process of walking through the ideal tree checking many +// different potential placement targets for a given node. If a change is made +// to the tree along the way, that can cause serious problems! +// +// In order to enforce this restriction, in debug mode, canPlaceDep() will +// snapshot the tree at the start of the process, and then at the end, will +// verify that it still matches the snapshot, and throw an error if any changes +// occurred. +// +// The algorithm is roughly like this: +// - check the node itself: +// - if there is no version present, and no conflicting edges from target, +// OK, provided all peers can be placed at or above the target. +// - if the current version matches, KEEP +// - if there is an older version present, which can be replaced, then +// - if satisfying and preferDedupe? KEEP +// - else: REPLACE +// - if there is a newer version present, and preferDedupe, REPLACE +// - if the version present satisfies the edge, KEEP +// - else: CONFLICT +// - if the node is not in conflict, check each of its peers: +// - if the peer can be placed in the target, continue +// - else if the peer can be placed in a parent, and there is no other +// conflicting version shadowing it, continue +// - else CONFLICT +// - If the peers are not in conflict, return the original node's value +// +// An exception to this logic is that if the target is the deepest location +// that a node can be placed, and the conflicting node can be placed deeper, +// then we will return REPLACE rather than CONFLICT, and Arborist will queue +// the replaced node for resolution elsewhere. + +const localeCompare = require('@isaacs/string-locale-compare')('en') +const semver = require('semver') +const debug = require('./debug.js') +const peerEntrySets = require('./peer-entry-sets.js') +const deepestNestingTarget = require('./deepest-nesting-target.js') + +const CONFLICT = Symbol('CONFLICT') +const OK = Symbol('OK') +const REPLACE = Symbol('REPLACE') +const KEEP = Symbol('KEEP') + +class CanPlaceDep { + // dep is a dep that we're trying to place. it should already live in + // a virtual tree where its peer set is loaded as children of the root. + // target is the actual place where we're trying to place this dep + // in a node_modules folder. + // edge is the edge that we're trying to satisfy with this placement. + // parent is the CanPlaceDep object of the entry node when placing a peer. + constructor (options) { + const { + dep, + target, + edge, + preferDedupe, + parent = null, + peerPath = [], + explicitRequest = false, + } = options + + debug(() => { + if (!dep) { + throw new Error('no dep provided to CanPlaceDep') + } + + if (!target) { + throw new Error('no target provided to CanPlaceDep') + } + + if (!edge) { + throw new Error('no edge provided to CanPlaceDep') + } + + this._treeSnapshot = JSON.stringify([...target.root.inventory.entries()] + .map(([loc, { packageName, version, resolved }]) => { + return [loc, packageName, version, resolved] + }).sort(([a], [b]) => localeCompare(a, b))) + }) + + // the result of whether we can place it or not + this.canPlace = null + // if peers conflict, but this one doesn't, then that is useful info + this.canPlaceSelf = null + + this.dep = dep + this.target = target + this.edge = edge + this.explicitRequest = explicitRequest + + // preventing cycles when we check peer sets + this.peerPath = peerPath + // we always prefer to dedupe peers, because they are trying + // a bit harder to be singletons. + this.preferDedupe = !!preferDedupe || edge.peer + this.parent = parent + this.children = [] + + this.isSource = target === this.peerSetSource + this.name = edge.name + this.current = target.children.get(this.name) + this.targetEdge = target.edgesOut.get(this.name) + this.conflicts = new Map() + + // check if this dep was already subject to a peerDep override while + // building the peerSet. + this.edgeOverride = !dep.satisfies(edge) + + this.canPlace = this.checkCanPlace() + if (!this.canPlaceSelf) { + this.canPlaceSelf = this.canPlace + } + + debug(() => { + const treeSnapshot = JSON.stringify([...target.root.inventory.entries()] + .map(([loc, { packageName, version, resolved }]) => { + return [loc, packageName, version, resolved] + }).sort(([a], [b]) => localeCompare(a, b))) + /* istanbul ignore if */ + if (this._treeSnapshot !== treeSnapshot) { + throw Object.assign(new Error('tree changed in CanPlaceDep'), { + expect: this._treeSnapshot, + actual: treeSnapshot, + }) + } + }) + } + + checkCanPlace () { + const { target, targetEdge, current, dep } = this + + // if the dep failed to load, we're going to fail the build or + // prune it out anyway, so just move forward placing/replacing it. + if (dep.errors.length) { + return current ? REPLACE : OK + } + + // cannot place peers inside their dependents, except for tops + if (targetEdge && targetEdge.peer && !target.isTop) { + return CONFLICT + } + + // skip this test if there's a current node, because we might be able + // to dedupe against it anyway + if (!current && + targetEdge && + !dep.satisfies(targetEdge) && + targetEdge !== this.edge) { + return CONFLICT + } + + return current ? this.checkCanPlaceCurrent() : this.checkCanPlaceNoCurrent() + } + + // we know that the target has a dep by this name in its node_modules + // already. Can return KEEP, REPLACE, or CONFLICT. + checkCanPlaceCurrent () { + const { preferDedupe, explicitRequest, current, target, edge, dep } = this + + if (dep.matches(current)) { + if (current.satisfies(edge) || this.edgeOverride) { + return explicitRequest ? REPLACE : KEEP + } + } + + const { version: curVer } = current + const { version: newVer } = dep + const tryReplace = curVer && newVer && semver.gte(newVer, curVer) + if (tryReplace && dep.canReplace(current)) { + // It's extremely rare that a replaceable node would be a conflict, if + // the current one wasn't a conflict, but it is theoretically possible + // if peer deps are pinned. In that case we treat it like any other + // conflict, and keep trying. + const cpp = this.canPlacePeers(REPLACE) + if (cpp !== CONFLICT) { + return cpp + } + } + + // ok, can't replace the current with new one, but maybe current is ok? + if (current.satisfies(edge) && (!explicitRequest || preferDedupe)) { + return KEEP + } + + // if we prefer deduping, then try replacing newer with older + if (preferDedupe && !tryReplace && dep.canReplace(current)) { + const cpp = this.canPlacePeers(REPLACE) + if (cpp !== CONFLICT) { + return cpp + } + } + + // Check for interesting cases! + // First, is this the deepest place that this thing can go, and NOT the + // deepest place where the conflicting dep can go? If so, replace it, + // and let it re-resolve deeper in the tree. + const myDeepest = this.deepestNestingTarget + + // ok, i COULD be placed deeper, so leave the current one alone. + if (target !== myDeepest) { + return CONFLICT + } + + // if we are not checking a peerDep, then we MUST place it here, in the + // target that has a non-peer dep on it. + if (!edge.peer && target === edge.from) { + return this.canPlacePeers(REPLACE) + } + + // if we aren't placing a peer in a set, then we're done here. + // This is ignored because it SHOULD be redundant, as far as I can tell, + // with the deepest target and target===edge.from tests. But until we + // can prove that isn't possible, this condition is here for safety. + /* istanbul ignore if - allegedly impossible */ + if (!this.parent && !edge.peer) { + return CONFLICT + } + + // check the deps in the peer group for each edge into that peer group + // if ALL of them can be pushed deeper, or if it's ok to replace its + // members with the contents of the new peer group, then we're good. + let canReplace = true + for (const [entryEdge, currentPeers] of peerEntrySets(current)) { + if (entryEdge === this.edge || entryEdge === this.peerEntryEdge) { + continue + } + + // First, see if it's ok to just replace the peerSet entirely. + // we do this by walking out from the entryEdge, because in a case like + // this: + // + // v -> PEER(a@1||2) + // a@1 -> PEER(b@1) + // a@2 -> PEER(b@2) + // b@1 -> PEER(a@1) + // b@2 -> PEER(a@2) + // + // root + // +-- v + // +-- a@2 + // +-- b@2 + // + // Trying to place a peer group of (a@1, b@1) would fail to note that + // they can be replaced, if we did it by looping 1 by 1. If we are + // replacing something, we don't have to check its peer deps, because + // the peerDeps in the placed peerSet will presumably satisfy. + const entryNode = entryEdge.to + const entryRep = dep.parent.children.get(entryNode.name) + if (entryRep) { + if (entryRep.canReplace(entryNode, dep.parent.children.keys())) { + continue + } + } + + let canClobber = !entryRep + if (!entryRep) { + const peerReplacementWalk = new Set([entryNode]) + OUTER: for (const currentPeer of peerReplacementWalk) { + for (const edge of currentPeer.edgesOut.values()) { + if (!edge.peer || !edge.valid) { + continue + } + const rep = dep.parent.children.get(edge.name) + if (!rep) { + if (edge.to) { + peerReplacementWalk.add(edge.to) + } + continue + } + if (!rep.satisfies(edge)) { + canClobber = false + break OUTER + } + } + } + } + if (canClobber) { + continue + } + + // ok, we can't replace, but maybe we can nest the current set deeper? + let canNestCurrent = true + for (const currentPeer of currentPeers) { + if (!canNestCurrent) { + break + } + + // still possible to nest this peerSet + const curDeep = deepestNestingTarget(entryEdge.from, currentPeer.name) + if (curDeep === target || target.isDescendantOf(curDeep)) { + canNestCurrent = false + canReplace = false + } + if (canNestCurrent) { + continue + } + } + } + + // if we can nest or replace all the current peer groups, we can replace. + if (canReplace) { + return this.canPlacePeers(REPLACE) + } + + return CONFLICT + } + + checkCanPlaceNoCurrent () { + const { target, peerEntryEdge, dep, name } = this + + // check to see what that name resolves to here, and who may depend on + // being able to reach it by crawling up past the parent. we know + // that it's not the target's direct child node, and if it was a direct + // dep of the target, we would have conflicted earlier. + const current = target !== peerEntryEdge.from && target.resolve(name) + if (current) { + for (const edge of current.edgesIn.values()) { + if (edge.from.isDescendantOf(target) && edge.valid) { + if (!dep.satisfies(edge)) { + return CONFLICT + } + } + } + } + + // no objections, so this is fine as long as peers are ok here. + return this.canPlacePeers(OK) + } + + get deepestNestingTarget () { + const start = this.parent ? this.parent.deepestNestingTarget + : this.edge.from + return deepestNestingTarget(start, this.name) + } + + get conflictChildren () { + return this.allChildren.filter(c => c.canPlace === CONFLICT) + } + + get allChildren () { + const set = new Set(this.children) + for (const child of set) { + for (const grandchild of child.children) { + set.add(grandchild) + } + } + return [...set] + } + + get top () { + return this.parent ? this.parent.top : this + } + + // check if peers can go here. returns state or CONFLICT + canPlacePeers (state) { + this.canPlaceSelf = state + if (this._canPlacePeers) { + return this._canPlacePeers + } + + // TODO: represent peerPath in ERESOLVE error somehow? + const peerPath = [...this.peerPath, this.dep] + let sawConflict = false + for (const peerEdge of this.dep.edgesOut.values()) { + if (!peerEdge.peer || !peerEdge.to || peerPath.includes(peerEdge.to)) { + continue + } + const peer = peerEdge.to + // it may be the case that the *initial* dep can be nested, but a peer + // of that dep needs to be placed shallower, because the target has + // a peer dep on the peer as well. + const target = deepestNestingTarget(this.target, peer.name) + const cpp = new CanPlaceDep({ + dep: peer, + target, + parent: this, + edge: peerEdge, + peerPath, + // always place peers in preferDedupe mode + preferDedupe: true, + }) + /* istanbul ignore next */ + debug(() => { + if (this.children.some(c => c.dep === cpp.dep)) { + throw new Error('checking same dep repeatedly') + } + }) + this.children.push(cpp) + + if (cpp.canPlace === CONFLICT) { + sawConflict = true + } + } + + this._canPlacePeers = sawConflict ? CONFLICT : state + return this._canPlacePeers + } + + // what is the node that is causing this peerSet to be placed? + get peerSetSource () { + return this.parent ? this.parent.peerSetSource : this.edge.from + } + + get peerEntryEdge () { + return this.top.edge + } + + static get CONFLICT () { + return CONFLICT + } + + static get OK () { + return OK + } + + static get REPLACE () { + return REPLACE + } + + static get KEEP () { + return KEEP + } + + get description () { + const { canPlace } = this + return canPlace && canPlace.description || + /* istanbul ignore next - old node affordance */ canPlace + } +} + +module.exports = CanPlaceDep diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/case-insensitive-map.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/case-insensitive-map.js new file mode 100644 index 0000000000000000000000000000000000000000..afc6afbe0f98ad684566c528d512db404c4a74e4 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/case-insensitive-map.js @@ -0,0 +1,50 @@ +// package children are represented with a Map object, but many file systems +// are case-insensitive and unicode-normalizing, so we need to treat +// node.children.get('FOO') and node.children.get('foo') as the same thing. + +module.exports = class CIMap extends Map { + #keys = new Map() + + constructor (items = []) { + super() + for (const [key, val] of items) { + this.set(key, val) + } + } + + #normKey (key) { + if (typeof key !== 'string') { + return key + } + return key.normalize('NFKD').toLowerCase() + } + + get (key) { + const normKey = this.#normKey(key) + return this.#keys.has(normKey) ? super.get(this.#keys.get(normKey)) + : undefined + } + + set (key, val) { + const normKey = this.#normKey(key) + if (this.#keys.has(normKey)) { + super.delete(this.#keys.get(normKey)) + } + this.#keys.set(normKey, key) + return super.set(key, val) + } + + delete (key) { + const normKey = this.#normKey(key) + if (this.#keys.has(normKey)) { + const prevKey = this.#keys.get(normKey) + this.#keys.delete(normKey) + return super.delete(prevKey) + } + } + + has (key) { + const normKey = this.#normKey(key) + return this.#keys.has(normKey) && super.has(this.#keys.get(normKey)) + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/consistent-resolve.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/consistent-resolve.js new file mode 100644 index 0000000000000000000000000000000000000000..890caa32f10722b399c29da8f7ea9311eb4ad7f7 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/consistent-resolve.js @@ -0,0 +1,44 @@ +// take a path and a resolved value, and turn it into a resolution from +// the given new path. This is used with converting a package.json's +// relative file: path into one suitable for a lockfile, or between +// lockfiles, and for converting hosted git repos to a consistent url type. +const npa = require('npm-package-arg') +const relpath = require('./relpath.js') +const consistentResolve = (resolved, fromPath, toPath, relPaths = false) => { + if (!resolved) { + return null + } + + try { + const hostedOpt = { noCommittish: false } + const { + fetchSpec, + saveSpec, + type, + hosted, + rawSpec, + raw, + } = npa(resolved, fromPath) + if (type === 'file' || type === 'directory') { + if (relPaths && toPath) { + return `file:${relpath(toPath, fetchSpec)}` + } + return `file:${fetchSpec}` + } + if (hosted) { + return `git+${hosted.auth ? hosted.https(hostedOpt) : hosted.sshurl(hostedOpt)}` + } + if (type === 'git') { + return saveSpec + } + if (rawSpec === '*') { + return raw + } + return rawSpec + } catch (_) { + // whatever we passed in was not acceptable to npa. + // leave it 100% untouched. + return resolved + } +} +module.exports = consistentResolve diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/debug.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/debug.js new file mode 100644 index 0000000000000000000000000000000000000000..d5197b46ef09a985c681bb6df709b727ce0d71d6 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/debug.js @@ -0,0 +1,32 @@ +// certain assertions we should do only when testing arborist itself, because +// they are too expensive or aggressive and would break user programs if we +// miss a situation where they are actually valid. +// +// call like this: +// +// /* istanbul ignore next - debug check */ +// debug(() => { +// if (someExpensiveCheck) +// throw new Error('expensive check should have returned false') +// }) + +// run in debug mode if explicitly requested, running arborist tests, +// or working in the arborist project directory. + +const debug = process.env.ARBORIST_DEBUG !== '0' && ( + process.env.ARBORIST_DEBUG === '1' || + /\barborist\b/.test(process.env.NODE_DEBUG || '') || + process.env.npm_package_name === '@npmcli/arborist' && + ['test', 'snap'].includes(process.env.npm_lifecycle_event) || + process.cwd() === require('node:path').resolve(__dirname, '..') +) + +module.exports = debug ? fn => fn() : () => {} +const red = process.stderr.isTTY ? msg => `\x1B[31m${msg}\x1B[39m` : m => m +module.exports.log = (...msg) => module.exports(() => { + const { format } = require('node:util') + const prefix = `\n${process.pid} ${red(format(msg.shift()))} ` + msg = (prefix + format(...msg).trim().split('\n').join(prefix)).trim() + /* eslint-disable-next-line no-console */ + console.error(msg) +}) diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/deepest-nesting-target.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/deepest-nesting-target.js new file mode 100644 index 0000000000000000000000000000000000000000..2c6647f5db7badd1a51d42a41dfaa5872ab9964c --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/deepest-nesting-target.js @@ -0,0 +1,18 @@ +// given a starting node, what is the *deepest* target where name could go? +// This is not on the Node class for the simple reason that we sometimes +// need to check the deepest *potential* target for a Node that is not yet +// added to the tree where we are checking. +const deepestNestingTarget = (start, name) => { + for (const target of start.ancestry()) { + // note: this will skip past the first target if edge is peer + if (target.isProjectRoot || !target.resolveParent || target.globalTop) { + return target + } + const targetEdge = target.edgesOut.get(name) + if (!targetEdge || !targetEdge.peer) { + return target + } + } +} + +module.exports = deepestNestingTarget diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/dep-valid.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/dep-valid.js new file mode 100644 index 0000000000000000000000000000000000000000..6571c0b5fae6c95882507a0979f29f3334d9a61a --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/dep-valid.js @@ -0,0 +1,150 @@ +// Do not rely on package._fields, so that we don't throw +// false failures if a tree is generated by other clients. +// Only relies on child.resolved, which MAY come from +// client-specific package.json meta _fields, but most of +// the time will be pulled out of a lockfile + +const semver = require('semver') +const npa = require('npm-package-arg') +const { relative } = require('node:path') +const fromPath = require('./from-path.js') + +const depValid = (child, requested, requestor) => { + // NB: we don't do much to verify 'tag' type requests. + // Just verify that we got a remote resolution. Presumably, it + // came from a registry and was tagged at some point. + + if (typeof requested === 'string') { + try { + // tarball/dir must have resolved to the same tgz on disk, but for + // file: deps that depend on other files/dirs, we must resolve the + // location based on the *requestor* file/dir, not where it ends up. + // '' is equivalent to '*' + requested = npa.resolve(child.name, requested || '*', fromPath(requestor, requestor.edgesOut.get(child.name))) + } catch (er) { + // Not invalid because the child doesn't match, but because + // the spec itself is not supported. Nothing would match, + // so the edge is definitely not valid and never can be. + er.dependency = child.name + er.requested = requested + requestor.errors.push(er) + return false + } + } + + // if the lockfile is super old, or hand-modified, + // then it's possible to hit this state. + if (!requested) { + const er = new Error('Invalid dependency specifier') + er.dependency = child.name + er.requested = requested + requestor.errors.push(er) + return false + } + + switch (requested.type) { + case 'range': + if (requested.fetchSpec === '*') { + return true + } + // fallthrough + case 'version': + // if it's a version or a range other than '*', semver it + return semver.satisfies(child.version, requested.fetchSpec, true) + + case 'directory': + return linkValid(child, requested, requestor) + + case 'file': + return tarballValid(child, requested, requestor) + + case 'alias': + // check that the alias target is valid + return depValid(child, requested.subSpec, requestor) + + case 'tag': + // if it's a tag, we just verify that it has a tarball resolution + // presumably, it came from the registry and was tagged at some point + return child.resolved && npa(child.resolved).type === 'remote' + + case 'remote': + // verify that we got it from the desired location + return child.resolved === requested.fetchSpec + + case 'git': { + // if it's a git type, verify that they're the same repo + // + // if it specifies a definite commit, then it must have the + // same commit to be considered the same repo + // + // if it has a #semver: specifier, verify that the + // version in the package is in the semver range + const resRepo = npa(child.resolved || '') + const resHost = resRepo.hosted + const reqHost = requested.hosted + const reqCommit = /^[a-fA-F0-9]{40}$/.test(requested.gitCommittish || '') + const nc = { noCommittish: !reqCommit } + if (!resHost) { + if (resRepo.fetchSpec !== requested.fetchSpec) { + return false + } + } else { + if (reqHost?.ssh(nc) !== resHost.ssh(nc)) { + return false + } + } + if (!requested.gitRange) { + return true + } + return semver.satisfies(child.package.version, requested.gitRange, { + loose: true, + }) + } + + default: // impossible, just being cautious + break + } + + const er = new Error('Unsupported dependency type') + er.dependency = child.name + er.requested = requested + requestor.errors.push(er) + return false +} + +const linkValid = (child, requested, requestor) => { + const isLink = !!child.isLink + // if we're installing links and the node is a link, then it's invalid because we want + // a real node to be there. Except for workspaces. They are always links. + if (requestor.installLinks && !child.isWorkspace) { + return !isLink + } + + // directory must be a link to the specified folder + return isLink && relative(child.realpath, requested.fetchSpec) === '' +} + +const tarballValid = (child, requested) => { + if (child.isLink) { + return false + } + + if (child.resolved) { + return child.resolved.replace(/\\/g, '/') === `file:${requested.fetchSpec.replace(/\\/g, '/')}` + } + + // if we have a legacy mutated package.json file. we can't be 100% + // sure that it resolved to the same file, but if it was the same + // request, that's a pretty good indicator of sameness. + if (child.package._requested) { + return child.package._requested.saveSpec === requested.saveSpec + } + + // ok, we're probably dealing with some legacy cruft here, not much + // we can do at this point unfortunately. + return false +} + +module.exports = (child, requested, accept, requestor) => + depValid(child, requested, requestor) || + (typeof accept === 'string' ? depValid(child, accept, requestor) : false) diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/diff.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/diff.js new file mode 100644 index 0000000000000000000000000000000000000000..465657cc62422273b27b2029a843d177704cca32 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/diff.js @@ -0,0 +1,324 @@ +// a tree representing the difference between two trees +// A Diff node's parent is not necessarily the parent of +// the node location it refers to, but rather the highest level +// node that needs to be either changed or removed. +// Thus, the root Diff node is the shallowest change required +// for a given branch of the tree being mutated. + +const { depth } = require('treeverse') +const { existsSync } = require('node:fs') + +const ssri = require('ssri') + +class Diff { + constructor ({ actual, ideal, filterSet, shrinkwrapInflated, omit }) { + this.omit = omit + this.filterSet = filterSet + this.shrinkwrapInflated = shrinkwrapInflated + this.children = [] + this.actual = actual + this.ideal = ideal + if (this.ideal) { + this.resolved = this.ideal.resolved + this.integrity = this.ideal.integrity + } + this.action = getAction(this) + this.parent = null + // the set of leaf nodes that we rake up to the top level + this.leaves = [] + // the set of nodes that don't change in this branch of the tree + this.unchanged = [] + // the set of nodes that will be removed in this branch of the tree + this.removed = [] + } + + static calculate ({ + actual, + ideal, + filterNodes = [], + shrinkwrapInflated = new Set(), + omit = new Set(), + }) { + // if there's a filterNode, then: + // - get the path from the root to the filterNode. The root or + // root.target should have an edge either to the filterNode or + // a link to the filterNode. If not, abort. Add the path to the + // filterSet. + // - Add set of Nodes depended on by the filterNode to filterSet. + // - Anything outside of that set should be ignored by getChildren + const filterSet = new Set() + const extraneous = new Set() + for (const filterNode of filterNodes) { + const { root } = filterNode + if (root !== ideal && root !== actual) { + throw new Error('invalid filterNode: outside idealTree/actualTree') + } + const rootTarget = root.target + const edge = [...rootTarget.edgesOut.values()].filter(e => { + return e.to && (e.to === filterNode || e.to.target === filterNode) + })[0] + filterSet.add(root) + filterSet.add(rootTarget) + filterSet.add(ideal) + filterSet.add(actual) + if (edge && edge.to) { + filterSet.add(edge.to) + filterSet.add(edge.to.target) + } + filterSet.add(filterNode) + + depth({ + tree: filterNode, + visit: node => filterSet.add(node), + getChildren: node => { + node = node.target + const loc = node.location + const idealNode = ideal.inventory.get(loc) + const ideals = !idealNode ? [] + : [...idealNode.edgesOut.values()].filter(e => e.to).map(e => e.to) + const actualNode = actual.inventory.get(loc) + const actuals = !actualNode ? [] + : [...actualNode.edgesOut.values()].filter(e => e.to).map(e => e.to) + if (actualNode) { + for (const child of actualNode.children.values()) { + if (child.extraneous) { + extraneous.add(child) + } + } + } + + return ideals.concat(actuals) + }, + }) + } + for (const extra of extraneous) { + filterSet.add(extra) + } + + return depth({ + tree: new Diff({ actual, ideal, filterSet, shrinkwrapInflated, omit }), + getChildren, + leave, + }) + } +} + +const getAction = ({ actual, ideal }) => { + if (!ideal) { + return 'REMOVE' + } + + // bundled meta-deps are copied over to the ideal tree when we visit it, + // so they'll appear to be missing here. There's no need to handle them + // in the diff, though, because they'll be replaced at reify time anyway + // Otherwise, add the missing node. + if (!actual) { + return ideal.inDepBundle ? null : 'ADD' + } + + // always ignore the root node + if (ideal.isRoot && actual.isRoot) { + return null + } + + // if the versions don't match, it's a change no matter what + if (ideal.version !== actual.version) { + return 'CHANGE' + } + + const binsExist = ideal.binPaths.every((path) => existsSync(path)) + + // top nodes, links, and git deps won't have integrity, but do have resolved + // if neither node has integrity, the bins exist, and either (a) neither + // node has a resolved value or (b) they both do and match, then we can + // leave this one alone since we already know the versions match due to + // the condition above. The "neither has resolved" case (a) cannot be + // treated as a 'mark CHANGE and refetch', because shrinkwraps, bundles, + // and link deps may lack this information, and we don't want to try to + // go to the registry for something that isn't there. + const noIntegrity = !ideal.integrity && !actual.integrity + const noResolved = !ideal.resolved && !actual.resolved + const resolvedMatch = ideal.resolved && ideal.resolved === actual.resolved + if (noIntegrity && binsExist && (resolvedMatch || noResolved)) { + return null + } + + // otherwise, verify that it's the same bits + // note that if ideal has integrity, and resolved doesn't, we treat + // that as a 'change', so that it gets re-fetched and locked down. + const integrityMismatch = !ideal.integrity || !actual.integrity || + !ssri.parse(ideal.integrity).match(actual.integrity) + if (integrityMismatch || !binsExist) { + return 'CHANGE' + } + + return null +} + +const allChildren = node => { + if (!node) { + return new Map() + } + + // if the node is root, and also a link, then what we really + // want is to traverse the target's children + if (node.isRoot && node.isLink) { + return allChildren(node.target) + } + + const kids = new Map() + for (const n of [node, ...node.fsChildren]) { + for (const kid of n.children.values()) { + kids.set(kid.path, kid) + } + } + return kids +} + +// functions for the walk options when we traverse the trees +// to create the diff tree +const getChildren = diff => { + const children = [] + const { + actual, + ideal, + unchanged, + removed, + filterSet, + shrinkwrapInflated, + omit, + } = diff + + // Note: we DON'T diff fsChildren themselves, because they are either + // included in the package contents, or part of some other project, and + // will never appear in legacy shrinkwraps anyway. but we _do_ include the + // child nodes of fsChildren, because those are nodes that we are typically + // responsible for installing. + const actualKids = allChildren(actual) + const idealKids = allChildren(ideal) + + if (ideal && ideal.hasShrinkwrap && !shrinkwrapInflated.has(ideal)) { + // Guaranteed to get a diff.leaves here, because we always + // be called with a proper Diff object when ideal has a shrinkwrap + // that has not been inflated. + diff.leaves.push(diff) + return children + } + + const paths = new Set([...actualKids.keys(), ...idealKids.keys()]) + for (const path of paths) { + const actual = actualKids.get(path) + const ideal = idealKids.get(path) + diffNode({ + actual, + ideal, + children, + unchanged, + removed, + filterSet, + shrinkwrapInflated, + omit, + }) + } + + if (diff.leaves && !children.length) { + diff.leaves.push(diff) + } + + return children +} + +const diffNode = ({ + actual, + ideal, + children, + unchanged, + removed, + filterSet, + shrinkwrapInflated, + omit, +}) => { + if (filterSet.size && !(filterSet.has(ideal) || filterSet.has(actual))) { + return + } + + if (ideal?.shouldOmit?.(omit)) { + ideal.inert = true + } + + // Treat inert nodes as undefined for the purposes of diffing. + if (ideal?.inert) { + ideal = undefined + } + if (!actual && !ideal) { + return + } + + const action = getAction({ actual, ideal }) + + // if it's a match, then get its children + // otherwise, this is the child diff node + if (action || (!shrinkwrapInflated.has(ideal) && ideal.hasShrinkwrap)) { + if (action === 'REMOVE') { + removed.push(actual) + } + children.push(new Diff({ actual, ideal, filterSet, shrinkwrapInflated, omit })) + } else { + unchanged.push(ideal) + // !*! Weird dirty hack warning !*! + // + // Bundled deps aren't loaded in the ideal tree, because we don't know + // what they are going to be without unpacking. Swap them over now if + // the bundling node isn't changing, so we don't prune them later. + // + // It's a little bit dirty to be doing this here, since it means that + // diffing trees can mutate them, but otherwise we have to walk over + // all unchanging bundlers and correct the diff later, so it's more + // efficient to just fix it while we're passing through already. + // + // Note that moving over a bundled dep will break the links to other + // deps under this parent, which may have been transitively bundled. + // Breaking those links means that we'll no longer see the transitive + // dependency, meaning that it won't appear as bundled any longer! + // In order to not end up dropping transitively bundled deps, we have + // to get the list of nodes to move, then move them all at once, rather + // than moving them one at a time in the first loop. + const bd = ideal.package.bundleDependencies + if (actual && bd && bd.length) { + const bundledChildren = [] + for (const node of actual.children.values()) { + if (node.inBundle) { + bundledChildren.push(node) + } + } + for (const node of bundledChildren) { + node.parent = ideal + } + } + children.push(...getChildren({ + actual, + ideal, + unchanged, + removed, + filterSet, + shrinkwrapInflated, + omit, + })) + } +} + +// set the parentage in the leave step so that we aren't attaching +// child nodes only to remove them later. also bubble up the unchanged +// nodes so that we can move them out of staging in the reification step. +const leave = (diff, children) => { + children.forEach(kid => { + kid.parent = diff + diff.leaves.push(...kid.leaves) + diff.unchanged.push(...kid.unchanged) + diff.removed.push(...kid.removed) + }) + diff.children = children + return diff +} + +module.exports = Diff diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/edge.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/edge.js new file mode 100644 index 0000000000000000000000000000000000000000..242d2669ae4ca3312871316efa29c561988efdbc --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/edge.js @@ -0,0 +1,358 @@ +// An edge in the dependency graph +// Represents a dependency relationship of some kind + +const util = require('node:util') +const npa = require('npm-package-arg') +const depValid = require('./dep-valid.js') +const OverrideSet = require('./override-set.js') + +class ArboristEdge { + constructor (edge) { + this.name = edge.name + this.spec = edge.spec + this.type = edge.type + + const edgeFrom = edge.from?.location + const edgeTo = edge.to?.location + const override = edge.overrides?.value + + if (edgeFrom != null) { + this.from = edgeFrom + } + if (edgeTo) { + this.to = edgeTo + } + if (edge.error) { + this.error = edge.error + } + if (edge.peerConflicted) { + this.peerConflicted = true + } + if (override) { + this.overridden = override + } + } +} + +class Edge { + #accept + #error + #explanation + #from + #name + #spec + #to + #type + + static types = Object.freeze([ + 'prod', + 'dev', + 'optional', + 'peer', + 'peerOptional', + 'workspace', + ]) + + // XXX where is this used? + static errors = Object.freeze([ + 'DETACHED', + 'MISSING', + 'PEER LOCAL', + 'INVALID', + ]) + + constructor (options) { + const { type, name, spec, accept, from, overrides } = options + + // XXX are all of these error states even possible? + if (typeof spec !== 'string') { + throw new TypeError('must provide string spec') + } + if (!Edge.types.includes(type)) { + throw new TypeError(`invalid type: ${type}\n(valid types are: ${Edge.types.join(', ')})`) + } + if (type === 'workspace' && npa(spec).type !== 'directory') { + throw new TypeError('workspace edges must be a symlink') + } + if (typeof name !== 'string') { + throw new TypeError('must provide dependency name') + } + if (!from) { + throw new TypeError('must provide "from" node') + } + if (accept !== undefined) { + if (typeof accept !== 'string') { + throw new TypeError('accept field must be a string if provided') + } + this.#accept = accept || '*' + } + if (overrides !== undefined) { + this.overrides = overrides + } + + this.#name = name + this.#type = type + this.#spec = spec + this.#explanation = null + this.#from = from + + from.edgesOut.get(this.#name)?.detach() + from.addEdgeOut(this) + + this.reload(true) + this.peerConflicted = false + } + + satisfiedBy (node) { + if (node.name !== this.#name || !this.#from) { + return false + } + + // NOTE: this condition means we explicitly do not support overriding + // bundled or shrinkwrapped dependencies + if (node.hasShrinkwrap || node.inShrinkwrap || node.inBundle) { + return depValid(node, this.rawSpec, this.#accept, this.#from) + } + + // If there's no override we just use the spec. + if (!this.overrides?.keySpec) { + return depValid(node, this.spec, this.#accept, this.#from) + } + // There's some override. If the target node satisfies the overriding spec + // then it's okay. + if (depValid(node, this.spec, this.#accept, this.#from)) { + return true + } + // If it doesn't, then it should at least satisfy the original spec. + if (!depValid(node, this.rawSpec, this.#accept, this.#from)) { + return false + } + // It satisfies the original spec, not the overriding spec. We need to make + // sure it doesn't use the overridden spec. + // For example: + // we might have an ^8.0.0 rawSpec, and an override that makes + // keySpec=8.23.0 and the override value spec=9.0.0. + // If the node is 9.0.0, then it's okay because it's consistent with spec. + // If the node is 8.24.0, then it's okay because it's consistent with the rawSpec. + // If the node is 8.23.0, then it's not okay because even though it's consistent + // with the rawSpec, it's also consistent with the keySpec. + // So we're looking for ^8.0.0 or 9.0.0 and not 8.23.0. + return !depValid(node, this.overrides.keySpec, this.#accept, this.#from) + } + + // return the edge data, and an explanation of how that edge came to be here + explain (seen = []) { + if (!this.#explanation) { + const explanation = { + type: this.#type, + name: this.#name, + spec: this.spec, + } + if (this.rawSpec !== this.spec) { + explanation.rawSpec = this.rawSpec + explanation.overridden = true + } + if (this.bundled) { + explanation.bundled = this.bundled + } + if (this.error) { + explanation.error = this.error + } + if (this.#from) { + explanation.from = this.#from.explain(null, seen) + } + this.#explanation = explanation + } + return this.#explanation + } + + get bundled () { + return !!this.#from?.package?.bundleDependencies?.includes(this.#name) + } + + get workspace () { + return this.#type === 'workspace' + } + + get prod () { + return this.#type === 'prod' + } + + get dev () { + return this.#type === 'dev' + } + + get optional () { + return this.#type === 'optional' || this.#type === 'peerOptional' + } + + get peer () { + return this.#type === 'peer' || this.#type === 'peerOptional' + } + + get type () { + return this.#type + } + + get name () { + return this.#name + } + + get rawSpec () { + return this.#spec + } + + get spec () { + if (this.overrides?.value && this.overrides.value !== '*' && this.overrides.name === this.#name) { + if (this.overrides.value.startsWith('$')) { + const ref = this.overrides.value.slice(1) + let pkg = this.#from?.sourceReference + ? this.#from?.sourceReference.root.package + : this.#from?.root?.package + + let specValue = this.#calculateReferentialOverrideSpec(ref, pkg) + + // If the package isn't found in the root package, fall back to the local package. + if (!specValue) { + pkg = this.#from?.package + specValue = this.#calculateReferentialOverrideSpec(ref, pkg) + } + + if (specValue) { + return specValue + } + throw new Error(`Unable to resolve reference ${this.overrides.value}`) + } + return this.overrides.value + } + return this.#spec + } + + #calculateReferentialOverrideSpec (ref, pkg) { + if (pkg.devDependencies?.[ref]) { + return pkg.devDependencies[ref] + } + if (pkg.optionalDependencies?.[ref]) { + return pkg.optionalDependencies[ref] + } + if (pkg.dependencies?.[ref]) { + return pkg.dependencies[ref] + } + if (pkg.peerDependencies?.[ref]) { + return pkg.peerDependencies[ref] + } + } + + get accept () { + return this.#accept + } + + get valid () { + return !this.error + } + + get missing () { + return this.error === 'MISSING' + } + + get invalid () { + return this.error === 'INVALID' + } + + get peerLocal () { + return this.error === 'PEER LOCAL' + } + + get error () { + if (!this.#error) { + if (!this.#to) { + if (this.optional) { + this.#error = null + } else { + this.#error = 'MISSING' + } + } else if (this.peer && this.#from === this.#to.parent && !this.#from?.isTop) { + this.#error = 'PEER LOCAL' + } else if (!this.satisfiedBy(this.#to)) { + this.#error = 'INVALID' + } else if (this.overrides && this.#to.edgesOut.size && OverrideSet.doOverrideSetsConflict(this.overrides, this.#to.overrides)) { + // Any inconsistency between the edge's override set and the target's override set is potentially problematic. + // But we only say the edge is in error if the override sets are plainly conflicting. + // Note that if the target doesn't have any dependencies of their own, then this inconsistency is irrelevant. + this.#error = 'INVALID' + } else { + this.#error = 'OK' + } + } + if (this.#error === 'OK') { + return null + } + return this.#error + } + + reload (hard = false) { + this.#explanation = null + + let needToUpdateOverrideSet = false + let newOverrideSet + let oldOverrideSet + if (this.#from?.overrides) { + newOverrideSet = this.#from.overrides.getEdgeRule(this) + if (newOverrideSet && !newOverrideSet.isEqual(this.overrides)) { + // If there's a new different override set we need to propagate it to the nodes. + // If we're deleting the override set then there's no point propagating it right now since it will be filled with another value later. + needToUpdateOverrideSet = true + oldOverrideSet = this.overrides + this.overrides = newOverrideSet + } + } else { + delete this.overrides + } + const newTo = this.#from?.resolve(this.#name) + if (newTo !== this.#to) { + if (this.#to) { + this.#to.deleteEdgeIn(this) + } + this.#to = newTo + this.#error = null + if (this.#to) { + this.#to.addEdgeIn(this) + } + } else if (hard) { + this.#error = null + } else if (needToUpdateOverrideSet && this.#to) { + // Propagate the new override set to the target node. + this.#to.updateOverridesEdgeInRemoved(oldOverrideSet) + this.#to.updateOverridesEdgeInAdded(newOverrideSet) + } + } + + detach () { + this.#explanation = null + if (this.#to) { + this.#to.deleteEdgeIn(this) + } + this.#from?.edgesOut.delete(this.#name) + this.#to = null + this.#error = 'DETACHED' + this.#from = null + } + + get from () { + return this.#from + } + + get to () { + return this.#to + } + + toJSON () { + return new ArboristEdge(this) + } + + [util.inspect.custom] () { + return this.toJSON() + } +} + +module.exports = Edge diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/from-path.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/from-path.js new file mode 100644 index 0000000000000000000000000000000000000000..f7e447495d14465b248f77219e5a141ef387a9f5 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/from-path.js @@ -0,0 +1,30 @@ +// file dependencies need their dependencies resolved based on the location +// where the tarball was found, not the location where they end up getting +// installed. directory (ie, symlink) deps also need to be resolved based on +// their targets, but that's what realpath is + +const { dirname } = require('node:path') +const npa = require('npm-package-arg') + +const fromPath = (node, edge) => { + if (edge && edge.overrides && edge.overrides.name === edge.name && edge.overrides.value) { + // fromPath could be called with a node that has a virtual root, if that + // happens we want to make sure we get the real root node when overrides + // are in use. this is to allow things like overriding a dependency with a + // tarball file that's a relative path from the project root + if (node.sourceReference) { + return node.sourceReference.root.realpath + } + return node.root.realpath + } + + if (node.resolved) { + const spec = npa(node.resolved) + if (spec?.type === 'file') { + return dirname(spec.fetchSpec) + } + } + return node.realpath +} + +module.exports = fromPath diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/gather-dep-set.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/gather-dep-set.js new file mode 100644 index 0000000000000000000000000000000000000000..39180de38db4a997bf2c49083d7a14ee1046fcad --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/gather-dep-set.js @@ -0,0 +1,43 @@ +// Given a set of nodes in a tree, and a filter function to test +// incoming edges to the dep set that should be ignored otherwise. +// +// find the set of deps that are only depended upon by nodes in the set, or +// their dependencies, or edges that are ignored. +// +// Used when figuring out what to prune when replacing a node with a newer +// version, or when an optional dep fails to install. + +const gatherDepSet = (set, edgeFilter) => { + const deps = new Set(set) + + // add the full set of dependencies. note that this loop will continue + // as the deps set increases in size. + for (const node of deps) { + for (const edge of node.edgesOut.values()) { + if (edge.to && edgeFilter(edge)) { + deps.add(edge.to) + } + } + } + + // now remove all nodes in the set that have a dependent outside the set + // if any change is made, then re-check + // continue until no changes made, or deps set evaporates fully. + let changed = true + while (changed === true && deps.size > 0) { + changed = false + for (const dep of deps) { + for (const edge of dep.edgesIn) { + if (!deps.has(edge.from) && edgeFilter(edge)) { + changed = true + deps.delete(dep) + break + } + } + } + } + + return deps +} + +module.exports = gatherDepSet diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/index.js new file mode 100644 index 0000000000000000000000000000000000000000..5baaee6ee7c932ae26806b1f770651eb7d276d30 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/index.js @@ -0,0 +1,6 @@ +module.exports = require('./arborist/index.js') +module.exports.Arborist = module.exports +module.exports.Node = require('./node.js') +module.exports.Link = require('./link.js') +module.exports.Edge = require('./edge.js') +module.exports.Shrinkwrap = require('./shrinkwrap.js') diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/inventory.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/inventory.js new file mode 100644 index 0000000000000000000000000000000000000000..7b3f294fdab2c3f07578df41207f8592ed5ffa10 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/inventory.js @@ -0,0 +1,138 @@ +// a class to manage an inventory and set of indexes of a set of objects based +// on specific fields. +const { hasOwnProperty } = Object.prototype +const debug = require('./debug.js') + +const keys = ['name', 'license', 'funding', 'realpath', 'packageName'] +class Inventory extends Map { + #index + + constructor () { + super() + this.#index = new Map() + for (const key of keys) { + this.#index.set(key, new Map()) + } + } + + // XXX where is this used? + get primaryKey () { + return 'location' + } + + // XXX where is this used? + get indexes () { + return [...keys] + } + + * filter (fn) { + for (const node of this.values()) { + if (fn(node)) { + yield node + } + } + } + + add (node) { + const root = super.get('') + if (root && node.root !== root && node.root !== root.root) { + debug(() => { + throw Object.assign(new Error('adding external node to inventory'), { + root: root.path, + node: node.path, + nodeRoot: node.root.path, + }) + }) + return + } + + const current = super.get(node.location) + if (current) { + if (current === node) { + return + } + this.delete(current) + } + super.set(node.location, node) + for (const [key, map] of this.#index.entries()) { + let val + if (hasOwnProperty.call(node, key)) { + // if the node has the value, use it even if it's false + val = node[key] + } else if (key === 'license' && node.package) { + // handling for the outdated "licenses" array, just pick the first one + // also support the alternative spelling "licence" + if (node.package.license) { + val = node.package.license + } else if (node.package.licence) { + val = node.package.licence + } else if (Array.isArray(node.package.licenses)) { + val = node.package.licenses[0] + } else if (Array.isArray(node.package.licences)) { + val = node.package.licences[0] + } + } else if (node[key]) { + val = node[key] + } else { + val = node.package?.[key] + } + if (val && typeof val === 'object') { + // We currently only use license and funding + /* istanbul ignore next - not used */ + if (key === 'license') { + val = val.type + } else if (key === 'funding') { + val = val.url + } + } + if (!map.has(val)) { + map.set(val, new Set()) + } + map.get(val).add(node) + } + } + + delete (node) { + if (!this.has(node)) { + return + } + + super.delete(node.location) + for (const [key, map] of this.#index.entries()) { + let val + if (node[key] !== undefined) { + val = node[key] + } else { + val = node.package?.[key] + } + const set = map.get(val) + if (set) { + set.delete(node) + if (set.size === 0) { + map.delete(node[key]) + } + } + } + } + + query (key, val) { + const map = this.#index.get(key) + if (arguments.length === 2) { + if (map.has(val)) { + return map.get(val) + } + return new Set() + } + return map.keys() + } + + has (node) { + return super.get(node.location) === node + } + + set () { + throw new Error('direct set() not supported, use inventory.add(node)') + } +} + +module.exports = Inventory diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/link.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/link.js new file mode 100644 index 0000000000000000000000000000000000000000..42bc1faf488609a1d08968296d685aec06eea317 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/link.js @@ -0,0 +1,126 @@ +const relpath = require('./relpath.js') +const Node = require('./node.js') +const _loadDeps = Symbol.for('Arborist.Node._loadDeps') +const _target = Symbol.for('_target') +const { dirname } = require('node:path') +// defined by Node class +const _delistFromMeta = Symbol.for('_delistFromMeta') +const _refreshLocation = Symbol.for('_refreshLocation') +class Link extends Node { + constructor (options) { + const { root, realpath, target, parent, fsParent, isStoreLink } = options + + if (!realpath && !(target && target.path)) { + throw new TypeError('must provide realpath for Link node') + } + + super({ + ...options, + realpath: realpath || target.path, + root: root || (parent ? parent.root + : fsParent ? fsParent.root + : target ? target.root + : null), + }) + + this.isStoreLink = isStoreLink || false + + if (target) { + this.target = target + } else if (this.realpath === this.root.path) { + this.target = this.root + } else { + this.target = new Node({ + ...options, + path: realpath, + parent: null, + fsParent: null, + root: this.root, + }) + } + } + + get version () { + return this.target ? this.target.version : this.package.version || '' + } + + get target () { + return this[_target] + } + + set target (target) { + const current = this[_target] + if (target === current) { + return + } + + if (!target) { + if (current && current.linksIn) { + current.linksIn.delete(this) + } + if (this.path) { + this[_delistFromMeta]() + this[_target] = null + this.package = {} + this[_refreshLocation]() + } else { + this[_target] = null + } + return + } + + if (!this.path) { + // temp node pending assignment to a tree + // we know it's not in the inventory yet, because no path. + if (target.path) { + this.realpath = target.path + } else { + target.path = target.realpath = this.realpath + } + target.root = this.root + this[_target] = target + target.linksIn.add(this) + this.package = target.package + return + } + + // have to refresh metadata, because either realpath or package + // is very likely changing. + this[_delistFromMeta]() + this.package = target.package + this.realpath = target.path + this[_refreshLocation]() + + target.root = this.root + } + + // a link always resolves to the relative path to its target + get resolved () { + // the path/realpath guard is there for the benefit of setting + // these things in the "wrong" order + return this.path && this.realpath + ? `file:${relpath(dirname(this.path), this.realpath)}` + : null + } + + set resolved (r) {} + + // deps are resolved on the target, not the Link + // so this is a no-op + [_loadDeps] () {} + + // links can't have children, only their targets can + // fix it to an empty list so that we can still call + // things that iterate over them, just as a no-op + get children () { + return new Map() + } + + set children (c) {} + + get isLink () { + return true + } +} + +module.exports = Link diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/node.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/node.js new file mode 100644 index 0000000000000000000000000000000000000000..41871756c221cc10872f6f28456781bba958abb6 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/node.js @@ -0,0 +1,1618 @@ +// inventory, path, realpath, root, and parent +// +// node.root is a reference to the root module in the tree (ie, typically the +// cwd project folder) +// +// node.location is the /-delimited path from the root module to the node. In +// the case of link targets that may be outside of the root's package tree, +// this can include some number of /../ path segments. The location of the +// root module is always '.'. node.location thus never contains drive letters +// or absolute paths, and is portable within a given project, suitable for +// inclusion in lockfiles and metadata. +// +// node.path is the path to the place where this node lives on disk. It is +// system-specific and absolute. +// +// node.realpath is the path to where the module actually resides on disk. In +// the case of non-link nodes, node.realpath is equivalent to node.path. In +// the case of link nodes, it is equivalent to node.target.path. +// +// Setting node.parent will set the node's root to the parent's root, as well +// as updating edgesIn and edgesOut to reload dependency resolutions as needed, +// and setting node.path to parent.path/node_modules/name. +// +// node.inventory is a Map of name to a Set() of all the nodes under a given +// root by that name. It's empty for non-root nodes, and changing the root +// reference will remove it from the old root's inventory and add it to the new +// one. This map is useful for cases like `npm update foo` or `npm ls foo` +// where we need to quickly find all instances of a given package name within a +// tree. + +const PackageJson = require('@npmcli/package-json') +const nameFromFolder = require('@npmcli/name-from-folder') +const npa = require('npm-package-arg') +const semver = require('semver') +const util = require('node:util') +const { getPaths: getBinPaths } = require('bin-links') +const { log } = require('proc-log') +const { resolve, relative, dirname, basename } = require('node:path') +const { walkUp } = require('walk-up-path') + +const CaseInsensitiveMap = require('./case-insensitive-map.js') +const Edge = require('./edge.js') +const Inventory = require('./inventory.js') +const OverrideSet = require('./override-set.js') +const consistentResolve = require('./consistent-resolve.js') +const debug = require('./debug.js') +const gatherDepSet = require('./gather-dep-set.js') +const printableTree = require('./printable.js') +const querySelectorAll = require('./query-selector-all.js') +const relpath = require('./relpath.js') +const treeCheck = require('./tree-check.js') + +const _package = Symbol('_package') +const _parent = Symbol('_parent') +const _target = Symbol.for('_target') +const _fsParent = Symbol('_fsParent') +const _reloadNamedEdges = Symbol('_reloadNamedEdges') +// overridden by Link class +const _loadDeps = Symbol.for('Arborist.Node._loadDeps') +const _refreshLocation = Symbol.for('_refreshLocation') +const _changePath = Symbol.for('_changePath') +// used by Link class as well +const _delistFromMeta = Symbol.for('_delistFromMeta') +const _explain = Symbol('_explain') +const _explanation = Symbol('_explanation') + +class Node { + #global + #meta + #root + #workspaces + + constructor (options) { + // NB: path can be null if it's a link target + const { + root, + path, + realpath, + parent, + error, + meta, + fsParent, + resolved, + integrity, + // allow setting name explicitly when we haven't set a path yet + name, + children, + fsChildren, + installLinks = false, + legacyPeerDeps = false, + linksIn, + isInStore = false, + hasShrinkwrap, + overrides, + loadOverrides = false, + extraneous = true, + dev = true, + optional = true, + devOptional = true, + peer = true, + global = false, + dummy = false, + sourceReference = null, + inert = false, + } = options + // this object gives querySelectorAll somewhere to stash context about a node + // while processing a query + this.queryContext = {} + + // true if part of a global install + this.#global = global + + this.#workspaces = null + + this.errors = error ? [error] : [] + this.isInStore = isInStore + + // this will usually be null, except when modeling a + // package's dependencies in a virtual root. + this.sourceReference = sourceReference + + // have to set the internal package ref before assigning the parent, because this.package is read when adding to inventory + if (sourceReference) { + this[_package] = sourceReference.package + } else { + // TODO if this came from pacote.manifest we don't have to do this, we can be told to skip this step + const pkg = new PackageJson() + let content = {} + // TODO this is overly guarded. If pkg is not an object we should not allow it at all. + if (options.pkg && typeof options.pkg === 'object') { + content = options.pkg + } + pkg.fromContent(content) + pkg.syncNormalize() + this[_package] = pkg.content + } + + this.name = name || + nameFromFolder(path || this.package.name || realpath) || + this.package.name || + null + + // should be equal if not a link + this.path = path ? resolve(path) : null + + if (!this.name && (!this.path || this.path !== dirname(this.path))) { + throw new TypeError('could not detect node name from path or package') + } + + this.realpath = !this.isLink ? this.path : resolve(realpath) + + this.resolved = resolved || null + if (!this.resolved) { + // note: this *only* works for non-file: deps, so we avoid even + // trying here. + // file: deps are tracked in package.json will _resolved set to the + // full path to the tarball or link target. However, if the package + // is checked into git or moved to another location, that's 100% not + // portable at all! The _where and _location don't provide much help, + // since _location is just where the module ended up in the tree, + // and _where can be different than the actual root if it's a + // meta-dep deeper in the dependency graph. + // + // If we don't have the other oldest indicators of legacy npm, then it's + // probably what we're getting from pacote, which IS trustworthy. + // + // Otherwise, hopefully a shrinkwrap will help us out. + const resolved = consistentResolve(this.package._resolved) + if (resolved && !(/^file:/.test(resolved) && this.package._where)) { + this.resolved = resolved + } + } + this.integrity = integrity || this.package._integrity || null + this.hasShrinkwrap = hasShrinkwrap || this.package._hasShrinkwrap || false + this.installLinks = installLinks + this.legacyPeerDeps = legacyPeerDeps + + this.children = new CaseInsensitiveMap() + this.fsChildren = new Set() + this.inventory = new Inventory() + this.tops = new Set() + this.linksIn = new Set(linksIn || []) + + // these three are set by an Arborist taking a catalog + // after the tree is built. We don't get this along the way, + // because they have a tendency to change as new children are + // added, especially when they're deduped. Eg, a dev dep may be + // a 3-levels-deep dependency of a non-dev dep. If we calc the + // flags along the way, then they'll tend to be invalid by the + // time we need to look at them. + if (!dummy) { + this.dev = dev + this.optional = optional + this.devOptional = devOptional + this.peer = peer + this.extraneous = extraneous + this.dummy = false + } else { + // true if this is a placeholder for the purpose of serving as a + // fsParent to link targets that get their deps resolved outside + // the root tree folder. + this.dummy = true + this.dev = false + this.optional = false + this.devOptional = false + this.peer = false + this.extraneous = false + } + + this.inert = inert + + this.edgesIn = new Set() + this.edgesOut = new CaseInsensitiveMap() + + if (overrides) { + this.overrides = overrides + } else if (loadOverrides) { + const overrides = this.package.overrides || {} + if (Object.keys(overrides).length > 0) { + this.overrides = new OverrideSet({ + overrides: this.package.overrides, + }) + } + } + + // only relevant for the root and top nodes + this.meta = meta + + // Note: this is _slightly_ less efficient for the initial tree + // building than it could be, but in exchange, it's a much simpler + // algorithm. + // If this node has a bunch of children, and those children satisfy + // its various deps, then we're going to _first_ create all the + // edges, and _then_ assign the children into place, re-resolving + // them all in _reloadNamedEdges. + // A more efficient, but more complicated, approach would be to + // flag this node as being a part of a tree build, so it could + // hold off on resolving its deps until its children are in place. + + // call the parent setter + // Must be set prior to calling _loadDeps, because top-ness is relevant + + // will also assign root if present on the parent + this[_parent] = null + this.parent = parent || null + + this[_fsParent] = null + this.fsParent = fsParent || null + + // see parent/root setters below. + // root is set to parent's root if we have a parent; otherwise, if it's + // null, then it's set to the node itself. + if (!parent && !fsParent) { + this.root = root || null + } + + // mostly a convenience for testing, but also a way to create + // trees in a more declarative way than setting parent on each + if (children) { + for (const c of children) { + new Node({ ...c, parent: this }) + } + } + if (fsChildren) { + for (const c of fsChildren) { + new Node({ ...c, fsParent: this }) + } + } + + // now load all the dep edges + this[_loadDeps]() + } + + get meta () { + return this.#meta + } + + set meta (meta) { + this.#meta = meta + if (meta) { + meta.add(this) + } + } + + get global () { + if (this.#root === this) { + return this.#global + } + return this.#root.global + } + + // true for packages installed directly in the global node_modules folder + get globalTop () { + return this.global && this.parent && this.parent.isProjectRoot + } + + get workspaces () { + return this.#workspaces + } + + set workspaces (workspaces) { + // deletes edges if they already exists + if (this.#workspaces) { + for (const name of this.#workspaces.keys()) { + if (!workspaces.has(name)) { + this.edgesOut.get(name).detach() + } + } + } + + this.#workspaces = workspaces + this.#loadWorkspaces() + this[_loadDeps]() + } + + get binPaths () { + if (!this.parent) { + return [] + } + + return getBinPaths({ + pkg: this.package, + path: this.path, + global: this.global, + top: this.globalTop, + }) + } + + get hasInstallScript () { + const { hasInstallScript, scripts } = this.package + const { install, preinstall, postinstall } = scripts || {} + return !!(hasInstallScript || install || preinstall || postinstall) + } + + get version () { + return this.package.version || '' + } + + get packageName () { + return this.package.name || null + } + + get pkgid () { + const { name = '', version = '' } = this.package + // root package will prefer package name over folder name, + // and never be called an alias. + const { isProjectRoot } = this + const myname = isProjectRoot ? name || this.name + : this.name + const alias = !isProjectRoot && name && myname !== name ? `npm:${name}@` + : '' + return `${myname}@${alias}${version}` + } + + get overridden () { + if (!this.overrides) { + return false + } + if (!this.overrides.value) { + return false + } + if (this.overrides.name !== this.name) { + return false + } + + // The overrides rule is for a package with this name, but some override rules only apply to specific + // versions. To make sure this package was actually overridden, we check whether any edge going in + // had the rule applied to it, in which case its overrides set is different than its source node. + for (const edge of this.edgesIn) { + if (edge.overrides && edge.overrides.name === this.name && edge.overrides.value === this.version) { + if (!edge.overrides.isEqual(edge.from.overrides)) { + return true + } + } + } + + return false + } + + get package () { + return this[_package] + } + + set package (pkg) { + // just detach them all. we could make this _slightly_ more efficient + // by only detaching the ones that changed, but we'd still have to walk + // them all, and the comparison logic gets a bit tricky. we generally + // only do this more than once at the root level, so the resolve() calls + // are only one level deep, and there's not much to be saved, anyway. + // simpler to just toss them all out. + for (const edge of this.edgesOut.values()) { + edge.detach() + } + + this[_explanation] = null + /* istanbul ignore next - should be impossible */ + if (!pkg || typeof pkg !== 'object') { + debug(() => { + throw new Error('setting Node.package to non-object') + }) + pkg = {} + } + this[_package] = pkg + this.#loadWorkspaces() + this[_loadDeps]() + // do a hard reload, since the dependents may now be valid or invalid + // as a result of the package change. + this.edgesIn.forEach(edge => edge.reload(true)) + } + + // node.explain(nodes seen already, edge we're trying to satisfy + // if edge is not specified, it lists every edge into the node. + explain (edge = null, seen = []) { + if (this[_explanation]) { + return this[_explanation] + } + + return this[_explanation] = this[_explain](edge, seen) + } + + [_explain] (edge, seen) { + if (this.isProjectRoot && !this.sourceReference) { + return { + location: this.path, + } + } + + const why = { + name: this.isProjectRoot || this.isTop ? this.packageName : this.name, + version: this.package.version, + } + if (this.errors.length || !this.packageName || !this.package.version) { + why.errors = this.errors.length ? this.errors : [ + new Error('invalid package: lacks name and/or version'), + ] + why.package = this.package + } + + if (this.root.sourceReference) { + const { name, version } = this.root.package + why.whileInstalling = { + name, + version, + path: this.root.sourceReference.path, + } + } + + if (this.sourceReference) { + return this.sourceReference.explain(edge, seen) + } + + if (seen.includes(this)) { + return why + } + + why.location = this.location + why.isWorkspace = this.isWorkspace + + // make a new list each time. we can revisit, but not loop. + seen = seen.concat(this) + + why.dependents = [] + if (edge) { + why.dependents.push(edge.explain(seen)) + } else { + // ignore invalid edges, since those aren't satisfied by this thing, + // and are not keeping it held in this spot anyway. + const edges = [] + for (const edge of this.edgesIn) { + if (!edge.valid && !edge.from.isProjectRoot) { + continue + } + + edges.push(edge) + } + for (const edge of edges) { + why.dependents.push(edge.explain(seen)) + } + } + + if (this.linksIn.size) { + why.linksIn = [...this.linksIn].map(link => link[_explain](edge, seen)) + } + + return why + } + + isDescendantOf (node) { + for (let p = this; p; p = p.resolveParent) { + if (p === node) { + return true + } + } + return false + } + + shouldOmit (omitSet) { + if (!omitSet.size) { + return false + } + + const { top } = this + + // if the top is not the root or workspace then we do not want to omit it + if (!top.isProjectRoot && !top.isWorkspace) { + return false + } + + // omit node if the dep type matches any omit flags that were set + return ( + this.peer && omitSet.has('peer') || + this.dev && omitSet.has('dev') || + this.optional && omitSet.has('optional') || + this.devOptional && omitSet.has('optional') && omitSet.has('dev') + ) + } + + getBundler (path = []) { + // made a cycle, definitely not bundled! + if (path.includes(this)) { + return null + } + + path.push(this) + + const parent = this[_parent] + if (!parent) { + return null + } + + const pBundler = parent.getBundler(path) + if (pBundler) { + return pBundler + } + + const ppkg = parent.package + const bd = ppkg && ppkg.bundleDependencies + // explicit bundling + if (Array.isArray(bd) && bd.includes(this.name)) { + return parent + } + + // deps that are deduped up to the bundling level are bundled. + // however, if they get their dep met further up than that, + // then they are not bundled. Ie, installing a package with + // unmet bundled deps will not cause your deps to be bundled. + for (const edge of this.edgesIn) { + const eBundler = edge.from.getBundler(path) + if (!eBundler) { + continue + } + + if (eBundler === parent) { + return eBundler + } + } + + return null + } + + get inBundle () { + return !!this.getBundler() + } + + // when reifying, if a package is technically in a bundleDependencies list, + // but that list is the root project, we still have to install it. This + // getter returns true if it's in a dependency's bundle list, not the root's. + get inDepBundle () { + const bundler = this.getBundler() + return !!bundler && bundler !== this.root + } + + get isWorkspace () { + if (this.isProjectRoot) { + return false + } + const { root } = this + const { type, to } = root.edgesOut.get(this.packageName) || {} + return type === 'workspace' && to && (to.target === this || to === this) + } + + get isRoot () { + return this === this.root + } + + get isProjectRoot () { + // only treat as project root if it's the actual link that is the root, + // or the target of the root link, but NOT if it's another link to the + // same root that happens to be somewhere else. + return this === this.root || this === this.root.target + } + + get isRegistryDependency () { + if (this.edgesIn.size === 0) { + return false + } + for (const edge of this.edgesIn) { + if (!npa(edge.spec).registry) { + return false + } + } + return true + } + + * ancestry () { + for (let anc = this; anc; anc = anc.resolveParent) { + yield anc + } + } + + set root (root) { + // setting to null means this is the new root + // should only ever be one step + while (root && root.root !== root) { + root = root.root + } + + root = root || this + + // delete from current root inventory + this[_delistFromMeta]() + + // can't set the root (yet) if there's no way to determine location + // this allows us to do new Node({...}) and then set the root later. + // just make the assignment so we don't lose it, and move on. + if (!this.path || !root.realpath || !root.path) { + this.#root = root + return + } + + // temporarily become a root node + this.#root = this + + // break all linksIn, we're going to re-set them if needed later + for (const link of this.linksIn) { + link[_target] = null + this.linksIn.delete(link) + } + + // temporarily break this link as well, we'll re-set if possible later + const { target } = this + if (this.isLink) { + if (target) { + target.linksIn.delete(this) + if (target.root === this) { + target[_delistFromMeta]() + } + } + this[_target] = null + } + + // if this is part of a cascading root set, then don't do this bit + // but if the parent/fsParent is in a different set, we have to break + // that reference before proceeding + if (this.parent && this.parent.root !== root) { + this.parent.children.delete(this.name) + this[_parent] = null + } + if (this.fsParent && this.fsParent.root !== root) { + this.fsParent.fsChildren.delete(this) + this[_fsParent] = null + } + + if (root === this) { + this[_refreshLocation]() + } else { + // setting to some different node. + const loc = relpath(root.realpath, this.path) + const current = root.inventory.get(loc) + + // clobber whatever is there now + if (current) { + current.root = null + } + + this.#root = root + // set this.location and add to inventory + this[_refreshLocation]() + + // try to find our parent/fsParent in the new root inventory + for (const p of walkUp(dirname(this.path))) { + if (p === this.path) { + continue + } + const ploc = relpath(root.realpath, p) + const parent = root.inventory.get(ploc) + if (parent) { + /* istanbul ignore next - impossible */ + if (parent.isLink) { + debug(() => { + throw Object.assign(new Error('assigning parentage to link'), { + path: this.path, + parent: parent.path, + parentReal: parent.realpath, + }) + }) + continue + } + const childLoc = `${ploc}${ploc ? '/' : ''}node_modules/${this.name}` + const isParent = this.location === childLoc + if (isParent) { + const oldChild = parent.children.get(this.name) + if (oldChild && oldChild !== this) { + oldChild.root = null + } + if (this.parent) { + this.parent.children.delete(this.name) + this.parent[_reloadNamedEdges](this.name) + } + parent.children.set(this.name, this) + this[_parent] = parent + // don't do it for links, because they don't have a target yet + // we'll hit them up a bit later on. + if (!this.isLink) { + parent[_reloadNamedEdges](this.name) + } + } else { + /* istanbul ignore if - should be impossible, since we break + * all fsParent/child relationships when moving? */ + if (this.fsParent) { + this.fsParent.fsChildren.delete(this) + } + parent.fsChildren.add(this) + this[_fsParent] = parent + } + break + } + } + + // if it doesn't have a parent, it's a top node + if (!this.parent) { + root.tops.add(this) + } else { + root.tops.delete(this) + } + + // assign parentage for any nodes that need to have this as a parent + // this can happen when we have a node at nm/a/nm/b added *before* + // the node at nm/a, which might have the root node as a fsParent. + // we can't rely on the public setter here, because it calls into + // this function to set up these references! + // check dirname so that /foo isn't treated as the fsparent of /foo-bar + const nmloc = `${this.location}${this.location ? '/' : ''}node_modules/` + // only walk top nodes, since anything else already has a parent. + for (const child of root.tops) { + const isChild = child.location === nmloc + child.name + const isFsChild = + dirname(child.path).startsWith(this.path) && + child !== this && + !child.parent && + ( + !child.fsParent || + child.fsParent === this || + dirname(this.path).startsWith(child.fsParent.path) + ) + + if (!isChild && !isFsChild) { + continue + } + + // set up the internal parentage links + if (this.isLink) { + child.root = null + } else { + // can't possibly have a parent, because it's in tops + if (child.fsParent) { + child.fsParent.fsChildren.delete(child) + } + child[_fsParent] = null + if (isChild) { + this.children.set(child.name, child) + child[_parent] = this + root.tops.delete(child) + } else { + this.fsChildren.add(child) + child[_fsParent] = this + } + } + } + + // look for any nodes with the same realpath. either they're links + // to that realpath, or a thing at that realpath if we're adding a link + // (if we're adding a regular node, we already deleted the old one) + for (const node of root.inventory.query('realpath', this.realpath)) { + if (node === this) { + continue + } + + /* istanbul ignore next - should be impossible */ + debug(() => { + if (node.root !== root) { + throw new Error('inventory contains node from other root') + } + }) + + if (this.isLink) { + const target = node.target + this[_target] = target + this[_package] = target.package + target.linksIn.add(this) + // reload edges here, because now we have a target + if (this.parent) { + this.parent[_reloadNamedEdges](this.name) + } + break + } else { + /* istanbul ignore else - should be impossible */ + if (node.isLink) { + node[_target] = this + node[_package] = this.package + this.linksIn.add(node) + if (node.parent) { + node.parent[_reloadNamedEdges](node.name) + } + } else { + debug(() => { + throw Object.assign(new Error('duplicate node in root setter'), { + path: this.path, + realpath: this.realpath, + root: root.realpath, + }) + }) + } + } + } + } + + // reload all edgesIn where the root doesn't match, so we don't have + // cross-tree dependency graphs + for (const edge of this.edgesIn) { + if (edge.from.root !== root) { + edge.reload() + } + } + // reload all edgesOut where root doesn't match, or is missing, since + // it might not be missing in the new tree + for (const edge of this.edgesOut.values()) { + if (!edge.to || edge.to.root !== root) { + edge.reload() + } + } + + // now make sure our family comes along for the ride! + const family = new Set([ + ...this.fsChildren, + ...this.children.values(), + ...this.inventory.values(), + ].filter(n => n !== this)) + + for (const child of family) { + if (child.root !== root) { + child[_delistFromMeta]() + child[_parent] = null + this.children.delete(child.name) + child[_fsParent] = null + this.fsChildren.delete(child) + for (const l of child.linksIn) { + l[_target] = null + child.linksIn.delete(l) + } + } + } + for (const child of family) { + if (child.root !== root) { + child.root = root + } + } + + // if we had a target, and didn't find one in the new root, then bring + // it over as well, but only if we're setting the link into a new root, + // as we don't want to lose the target any time we remove a link. + if (this.isLink && target && !this.target && root !== this) { + target.root = root + } + + // tree should always be valid upon root setter completion. + treeCheck(this) + if (this !== root) { + treeCheck(root) + } + } + + get root () { + return this.#root || this + } + + #loadWorkspaces () { + if (!this.#workspaces) { + return + } + + for (const [name, path] of this.#workspaces.entries()) { + new Edge({ from: this, name, spec: `file:${path}`, type: 'workspace' }) + } + } + + [_loadDeps] () { + // Caveat! Order is relevant! + // Packages in optionalDependencies are optional. + // Packages in both deps and devDeps are required. + // Note the subtle breaking change from v6: it is no longer possible + // to have a different spec for a devDep than production dep. + + // Linked targets that are disconnected from the tree are tops, + // but don't have a 'path' field, only a 'realpath', because we + // don't know their canonical location. We don't need their devDeps. + const pd = this.package.peerDependencies + const ad = this.package.acceptDependencies || {} + if (pd && typeof pd === 'object' && !this.legacyPeerDeps) { + const pm = this.package.peerDependenciesMeta || {} + const peerDependencies = {} + const peerOptional = {} + for (const [name, dep] of Object.entries(pd)) { + if (pm[name]?.optional) { + peerOptional[name] = dep + } else { + peerDependencies[name] = dep + } + } + this.#loadDepType(peerDependencies, 'peer', ad) + this.#loadDepType(peerOptional, 'peerOptional', ad) + } + + this.#loadDepType(this.package.dependencies, 'prod', ad) + this.#loadDepType(this.package.optionalDependencies, 'optional', ad) + + const { globalTop, isTop, path, sourceReference } = this + const { + globalTop: srcGlobalTop, + isTop: srcTop, + path: srcPath, + } = sourceReference || {} + const thisDev = isTop && !globalTop && path + const srcDev = !sourceReference || srcTop && !srcGlobalTop && srcPath + if (thisDev && srcDev) { + this.#loadDepType(this.package.devDependencies, 'dev', ad) + } + } + + #loadDepType (deps, type, ad) { + // Because of the order in which _loadDeps runs, we always want to + // prioritize a new edge over an existing one + for (const [name, spec] of Object.entries(deps || {})) { + const current = this.edgesOut.get(name) + if (!current || current.type !== 'workspace') { + new Edge({ from: this, name, spec, accept: ad[name], type }) + } + } + } + + get fsParent () { + // in debug setter prevents fsParent from being this + return this[_fsParent] + } + + set fsParent (fsParent) { + if (!fsParent) { + if (this[_fsParent]) { + this.root = null + } + return + } + + debug(() => { + if (fsParent === this) { + throw new Error('setting node to its own fsParent') + } + + if (fsParent.realpath === this.realpath) { + throw new Error('setting fsParent to same path') + } + + // the initial set MUST be an actual walk-up from the realpath + // subsequent sets will re-root on the new fsParent's path. + if (!this[_fsParent] && this.realpath.indexOf(fsParent.realpath) !== 0) { + throw Object.assign(new Error('setting fsParent improperly'), { + path: this.path, + realpath: this.realpath, + fsParent: { + path: fsParent.path, + realpath: fsParent.realpath, + }, + }) + } + }) + + if (fsParent.isLink) { + fsParent = fsParent.target + } + + // setting a thing to its own fsParent is not normal, but no-op for safety + if (this === fsParent || fsParent.realpath === this.realpath) { + return + } + + // nothing to do + if (this[_fsParent] === fsParent) { + return + } + + const oldFsParent = this[_fsParent] + const newPath = !oldFsParent ? this.path + : resolve(fsParent.path, relative(oldFsParent.path, this.path)) + const nmPath = resolve(fsParent.path, 'node_modules', this.name) + + // this is actually the parent, set that instead + if (newPath === nmPath) { + this.parent = fsParent + return + } + + const pathChange = newPath !== this.path + + // remove from old parent/fsParent + const oldParent = this.parent + const oldName = this.name + if (this.parent) { + this.parent.children.delete(this.name) + this[_parent] = null + } + if (this.fsParent) { + this.fsParent.fsChildren.delete(this) + this[_fsParent] = null + } + + // update this.path/realpath for this and all children/fsChildren + if (pathChange) { + this[_changePath](newPath) + } + + if (oldParent) { + oldParent[_reloadNamedEdges](oldName) + } + + // clobbers anything at that path, resets all appropriate references + this.root = fsParent.root + } + + // is it safe to replace one node with another? check the edges to + // make sure no one will get upset. Note that the node might end up + // having its own unmet dependencies, if the new node has new deps. + // Note that there are cases where Arborist will opt to insert a node + // into the tree even though this function returns false! This is + // necessary when a root dependency is added or updated, or when a + // root dependency brings peer deps along with it. In that case, we + // will go ahead and create the invalid state, and then try to resolve + // it with more tree construction, because it's a user request. + canReplaceWith (node, ignorePeers) { + if (node.name !== this.name) { + return false + } + + if (node.packageName !== this.packageName) { + return false + } + + // If this node has no dependencies, then it's irrelevant to check the override + // rules of the replacement node. + if (this.edgesOut.size) { + // XXX need to check for two root nodes? + if (node.overrides) { + if (!node.overrides.isEqual(this.overrides)) { + return false + } + } else { + if (this.overrides) { + return false + } + } + } + + ignorePeers = new Set(ignorePeers) + + // gather up all the deps of this node and that are only depended + // upon by deps of this node. those ones don't count, since + // they'll be replaced if this node is replaced anyway. + const depSet = gatherDepSet([this], e => e.to !== this && e.valid) + + for (const edge of this.edgesIn) { + // when replacing peer sets, we need to be able to replace the entire + // peer group, which means we ignore incoming edges from other peers + // within the replacement set. + if (!this.isTop && + edge.from.parent === this.parent && + edge.peer && + ignorePeers.has(edge.from.name)) { + continue + } + + // only care about edges that don't originate from this node + if (!depSet.has(edge.from) && !edge.satisfiedBy(node)) { + return false + } + } + + return true + } + + canReplace (node, ignorePeers) { + return node.canReplaceWith(this, ignorePeers) + } + + // return true if it's safe to remove this node, because anything that + // is depending on it would be fine with the thing that they would resolve + // to if it was removed, or nothing is depending on it in the first place. + canDedupe (preferDedupe = false, explicitRequest = false) { + // not allowed to mess with shrinkwraps or bundles + if (this.inDepBundle || this.inShrinkwrap) { + return false + } + + // it's a top level pkg, or a dep of one + if (!this.resolveParent || !this.resolveParent.resolveParent) { + return false + } + + // no one wants it, remove it + if (this.edgesIn.size === 0) { + return true + } + + const other = this.resolveParent.resolveParent.resolve(this.name) + + // nothing else, need this one + if (!other) { + return false + } + + // if it's the same thing, then always fine to remove + if (other.matches(this)) { + return true + } + + // if the other thing can't replace this, then skip it + if (!other.canReplace(this)) { + return false + } + + // if we prefer dedupe, or if the version is equal, take the other + if (preferDedupe || semver.eq(other.version, this.version)) { + return true + } + + // if our current version isn't the result of an override, then prefer to take the greater version + if (!this.overridden && semver.gt(other.version, this.version)) { + return true + } + + // if the other version was an explicit request, then prefer to take the other version + if (explicitRequest) { + return true + } + + return false + } + + satisfies (requested) { + if (requested instanceof Edge) { + return this.name === requested.name && requested.satisfiedBy(this) + } + + const parsed = npa(requested) + const { name = this.name, rawSpec: spec } = parsed + return this.name === name && this.satisfies(new Edge({ + from: new Node({ path: this.root.realpath }), + type: 'prod', + name, + spec, + })) + } + + matches (node) { + // if the nodes are literally the same object, obviously a match. + if (node === this) { + return true + } + + // if the names don't match, they're different things, even if + // the package contents are identical. + if (node.name !== this.name) { + return false + } + + // if they're links, they match if the targets match + if (this.isLink) { + return node.isLink && this.target.matches(node.target) + } + + // if they're two project root nodes, they're different if the paths differ + if (this.isProjectRoot && node.isProjectRoot) { + return this.path === node.path + } + + // if the integrity matches, then they're the same. + if (this.integrity && node.integrity) { + return this.integrity === node.integrity + } + + // if no integrity, check resolved + if (this.resolved && node.resolved) { + return this.resolved === node.resolved + } + + // if no resolved, check both package name and version + // otherwise, conclude that they are different things + return this.packageName && node.packageName && + this.packageName === node.packageName && + this.version && node.version && + this.version === node.version + } + + // replace this node with the supplied argument + // Useful when mutating an ideal tree, so we can avoid having to call + // the parent/root setters more than necessary. + replaceWith (node) { + node.replace(this) + } + + replace (node) { + this[_delistFromMeta]() + + // if the name matches, but is not identical, we are intending to clobber + // something case-insensitively, so merely setting name and path won't + // have the desired effect. just set the path so it'll collide in the + // parent's children map, and leave it at that. + if (node.parent?.children.get(this.name) === node) { + this.path = resolve(node.parent.path, 'node_modules', this.name) + } else { + this.path = node.path + this.name = node.name + } + + if (!this.isLink) { + this.realpath = this.path + } + this[_refreshLocation]() + + // keep children when a node replaces another + if (!this.isLink) { + for (const kid of node.children.values()) { + kid.parent = this + } + if (node.isLink && node.target) { + node.target.root = null + } + } + + if (!node.isRoot) { + this.root = node.root + } + + treeCheck(this) + } + + get inShrinkwrap () { + return this.parent && + (this.parent.hasShrinkwrap || this.parent.inShrinkwrap) + } + + get parent () { + // setter prevents _parent from being this + return this[_parent] + } + + // This setter keeps everything in order when we move a node from + // one point in a logical tree to another. Edges get reloaded, + // metadata updated, etc. It's also called when we *replace* a node + // with another by the same name (eg, to update or dedupe). + // This does a couple of walks out on the node_modules tree, recursing + // into child nodes. However, as setting the parent is typically done + // with nodes that don't have many children, and (deduped) package + // trees tend to be broad rather than deep, it's not that bad. + // The only walk that starts from the parent rather than this node is + // limited by edge name. + set parent (parent) { + // when setting to null, just remove it from the tree entirely + if (!parent) { + // but only delete it if we actually had a parent in the first place + // otherwise it's just setting to null when it's already null + if (this[_parent]) { + this.root = null + } + return + } + + if (parent.isLink) { + parent = parent.target + } + + // setting a thing to its own parent is not normal, but no-op for safety + if (this === parent) { + return + } + + const oldParent = this[_parent] + + // nothing to do + if (oldParent === parent) { + return + } + + // ok now we know something is actually changing, and parent is not a link + const newPath = resolve(parent.path, 'node_modules', this.name) + const pathChange = newPath !== this.path + + // remove from old parent/fsParent + if (oldParent) { + oldParent.children.delete(this.name) + this[_parent] = null + } + if (this.fsParent) { + this.fsParent.fsChildren.delete(this) + this[_fsParent] = null + } + + // update this.path/realpath for this and all children/fsChildren + if (pathChange) { + this[_changePath](newPath) + } + + // clobbers anything at that path, resets all appropriate references + this.root = parent.root + } + + // Call this before changing path or updating the _root reference. + // Removes the node from its root the metadata and inventory. + [_delistFromMeta] () { + const root = this.root + if (!root.realpath || !this.path) { + return + } + root.inventory.delete(this) + root.tops.delete(this) + if (root.meta) { + root.meta.delete(this.path) + } + /* istanbul ignore next - should be impossible */ + debug(() => { + if ([...root.inventory.values()].includes(this)) { + throw new Error('failed to delist') + } + }) + } + + // update this.path/realpath and the paths of all children/fsChildren + [_changePath] (newPath) { + // have to de-list before changing paths + this[_delistFromMeta]() + const oldPath = this.path + this.path = newPath + const namePattern = /(?:^|\/|\\)node_modules[\\/](@[^/\\]+[\\/][^\\/]+|[^\\/]+)$/ + const nameChange = newPath.match(namePattern) + if (nameChange && this.name !== nameChange[1]) { + this.name = nameChange[1].replace(/\\/g, '/') + } + + // if we move a link target, update link realpaths + if (!this.isLink) { + this.realpath = newPath + for (const link of this.linksIn) { + link[_delistFromMeta]() + link.realpath = newPath + link[_refreshLocation]() + } + } + // if we move /x to /y, then a module at /x/a/b becomes /y/a/b + for (const child of this.fsChildren) { + child[_changePath](resolve(newPath, relative(oldPath, child.path))) + } + for (const [name, child] of this.children.entries()) { + child[_changePath](resolve(newPath, 'node_modules', name)) + } + + this[_refreshLocation]() + } + + // Called whenever the root/parent is changed. + // NB: need to remove from former root's meta/inventory and then update + // this.path BEFORE calling this method! + [_refreshLocation] () { + const root = this.root + const loc = relpath(root.realpath, this.path) + + this.location = loc + + root.inventory.add(this) + if (root.meta) { + root.meta.add(this) + } + } + + assertRootOverrides () { + if (!this.isProjectRoot || !this.overrides) { + return + } + + for (const edge of this.edgesOut.values()) { + // if these differ an override has been applied, those are not allowed + // for top level dependencies so throw an error + if (edge.spec !== edge.rawSpec && !edge.spec.startsWith('$')) { + throw Object.assign(new Error(`Override for ${edge.name}@${edge.rawSpec} conflicts with direct dependency`), { code: 'EOVERRIDE' }) + } + } + } + + addEdgeOut (edge) { + if (this.overrides) { + edge.overrides = this.overrides.getEdgeRule(edge) + } + + this.edgesOut.set(edge.name, edge) + } + + recalculateOutEdgesOverrides () { + // For each edge out propagate the new overrides through. + for (const edge of this.edgesOut.values()) { + edge.reload(true) + if (edge.to) { + edge.to.updateOverridesEdgeInAdded(edge.overrides) + } + } + } + + updateOverridesEdgeInRemoved (otherOverrideSet) { + // If this edge's overrides isn't equal to this node's overrides, then removing it won't change newOverrideSet later. + if (!this.overrides || !this.overrides.isEqual(otherOverrideSet)) { + return false + } + let newOverrideSet + for (const edge of this.edgesIn) { + if (newOverrideSet && edge.overrides) { + newOverrideSet = OverrideSet.findSpecificOverrideSet(edge.overrides, newOverrideSet) + } else { + newOverrideSet = edge.overrides + } + } + if (this.overrides.isEqual(newOverrideSet)) { + return false + } + this.overrides = newOverrideSet + if (this.overrides) { + // Optimization: if there's any override set at all, then no non-extraneous node has an empty override set. So if we temporarily have no + // override set (for example, we removed all the edges in), there's no use updating all the edges out right now. Let's just wait until + // we have an actual override set later. + this.recalculateOutEdgesOverrides() + } + return true + } + + // This logic isn't perfect either. When we have two edges in that have different override sets, then we have to decide which set is correct. + // This function assumes the more specific override set is applicable, so if we have dependencies A->B->C and A->C + // and an override set that specifies what happens for C under A->B, this will work even if the new A->C edge comes along and tries to change + // the override set. + // The strictly correct logic is not to allow two edges with different overrides to point to the same node, because even if this node can satisfy + // both, one of its dependencies might need to be different depending on the edge leading to it. + // However, this might cause a lot of duplication, because the conflict in the dependencies might never actually happen. + updateOverridesEdgeInAdded (otherOverrideSet) { + if (!otherOverrideSet) { + // Assuming there are any overrides at all, the overrides field is never undefined for any node at the end state of the tree. + // So if the new edge's overrides is undefined it will be updated later. So we can wait with updating the node's overrides field. + return false + } + if (!this.overrides) { + this.overrides = otherOverrideSet + this.recalculateOutEdgesOverrides() + return true + } + if (this.overrides.isEqual(otherOverrideSet)) { + return false + } + const newOverrideSet = OverrideSet.findSpecificOverrideSet(this.overrides, otherOverrideSet) + if (newOverrideSet) { + if (!this.overrides.isEqual(newOverrideSet)) { + this.overrides = newOverrideSet + this.recalculateOutEdgesOverrides() + return true + } + return false + } + // This is an error condition. We can only get here if the new override set is in conflict with the existing. + log.silly('Conflicting override sets', this.name) + } + + deleteEdgeIn (edge) { + this.edgesIn.delete(edge) + if (edge.overrides) { + this.updateOverridesEdgeInRemoved(edge.overrides) + } + } + + addEdgeIn (edge) { + // We need to handle the case where the new edge in has an overrides field which is different from the current value. + if (!this.overrides || !this.overrides.isEqual(edge.overrides)) { + this.updateOverridesEdgeInAdded(edge.overrides) + } + + this.edgesIn.add(edge) + + // try to get metadata from the yarn.lock file + if (this.root.meta) { + this.root.meta.addEdge(edge) + } + } + + [_reloadNamedEdges] (name, rootLoc = this.location) { + const edge = this.edgesOut.get(name) + // if we don't have an edge, do nothing, but keep descending + const rootLocResolved = edge && edge.to && + edge.to.location === `${rootLoc}/node_modules/${edge.name}` + const sameResolved = edge && this.resolve(name) === edge.to + const recheck = rootLocResolved || !sameResolved + if (edge && recheck) { + edge.reload(true) + } + for (const c of this.children.values()) { + c[_reloadNamedEdges](name, rootLoc) + } + + for (const c of this.fsChildren) { + c[_reloadNamedEdges](name, rootLoc) + } + } + + get isLink () { + return false + } + + get target () { + return this + } + + set target (n) { + debug(() => { + throw Object.assign(new Error('cannot set target on non-Link Nodes'), { + path: this.path, + }) + }) + } + + get depth () { + if (this.isTop) { + return 0 + } + return this.parent.depth + 1 + } + + get isTop () { + return !this.parent || this.globalTop + } + + get top () { + if (this.isTop) { + return this + } + return this.parent.top + } + + get isFsTop () { + return !this.fsParent + } + + get fsTop () { + if (this.isFsTop) { + return this + } + return this.fsParent.fsTop + } + + get resolveParent () { + return this.parent || this.fsParent + } + + resolve (name) { + /* istanbul ignore next - should be impossible, + * but I keep doing this mistake in tests */ + debug(() => { + if (typeof name !== 'string' || !name) { + throw new Error('non-string passed to Node.resolve') + } + }) + const mine = this.children.get(name) + if (mine) { + return mine + } + const resolveParent = this.resolveParent + if (resolveParent) { + return resolveParent.resolve(name) + } + return null + } + + inNodeModules () { + const rp = this.realpath + const name = this.name + const scoped = name.charAt(0) === '@' + const d = dirname(rp) + const nm = scoped ? dirname(d) : d + const dir = dirname(nm) + const base = scoped ? `${basename(d)}/${basename(rp)}` : basename(rp) + return base === name && basename(nm) === 'node_modules' ? dir : false + } + + // maybe accept both string value or array of strings + // seems to be what dom API does + querySelectorAll (query, opts) { + return querySelectorAll(this, query, opts) + } + + toJSON () { + return printableTree(this) + } + + [util.inspect.custom] () { + return this.toJSON() + } +} + +module.exports = Node diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/optional-set.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/optional-set.js new file mode 100644 index 0000000000000000000000000000000000000000..76d557c0e52c55b579e4a4c520b8b36096a0533e --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/optional-set.js @@ -0,0 +1,36 @@ +// when an optional dep fails to install, we need to remove the branch of the +// graph up to the first optionalDependencies, as well as any nodes that are +// only required by other nodes in the set. +// +// This function finds the set of nodes that will need to be removed in that +// case. +// +// Note that this is *only* going to work with trees where calcDepFlags +// has been called, because we rely on the node.optional flag. + +const gatherDepSet = require('./gather-dep-set.js') +const optionalSet = node => { + if (!node.optional) { + return new Set() + } + + // start with the node, then walk up the dependency graph until we + // get to the boundaries that define the optional set. since the + // node is optional, we know that all paths INTO this area of the + // graph are optional, but there may be non-optional dependencies + // WITHIN the area. + const set = new Set([node]) + for (const node of set) { + for (const edge of node.edgesIn) { + if (!edge.optional) { + set.add(edge.from) + } + } + } + + // now that we've hit the boundary, gather the rest of the nodes in + // the optional section that don't have dependents outside the set. + return gatherDepSet(set, edge => !set.has(edge.to)) +} + +module.exports = optionalSet diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/override-resolves.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/override-resolves.js new file mode 100644 index 0000000000000000000000000000000000000000..c061cbce1867863ea0581973f5b0f76574ba8875 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/override-resolves.js @@ -0,0 +1,11 @@ +function overrideResolves (resolved, opts) { + const { omitLockfileRegistryResolved = false } = opts + + if (omitLockfileRegistryResolved) { + return undefined + } + + return resolved +} + +module.exports = { overrideResolves } diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/override-set.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/override-set.js new file mode 100644 index 0000000000000000000000000000000000000000..3f05609bfacc1ff97d1c4f325650286cc185673f --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/override-set.js @@ -0,0 +1,209 @@ +const npa = require('npm-package-arg') +const semver = require('semver') +const { log } = require('proc-log') + +class OverrideSet { + constructor ({ overrides, key, parent }) { + this.parent = parent + this.children = new Map() + + if (typeof overrides === 'string') { + overrides = { '.': overrides } + } + + // change a literal empty string to * so we can use truthiness checks on + // the value property later + if (overrides['.'] === '') { + overrides['.'] = '*' + } + + if (parent) { + const spec = npa(key) + if (!spec.name) { + throw new Error(`Override without name: ${key}`) + } + + this.name = spec.name + spec.name = '' + this.key = key + this.keySpec = spec.toString() + this.value = overrides['.'] || this.keySpec + } + + for (const [key, childOverrides] of Object.entries(overrides)) { + if (key === '.') { + continue + } + + const child = new OverrideSet({ + parent: this, + key, + overrides: childOverrides, + }) + + this.children.set(child.key, child) + } + } + + childrenAreEqual (other) { + if (this.children.size !== other.children.size) { + return false + } + for (const [key] of this.children) { + if (!other.children.has(key)) { + return false + } + if (this.children.get(key).value !== other.children.get(key).value) { + return false + } + if (!this.children.get(key).childrenAreEqual(other.children.get(key))) { + return false + } + } + return true + } + + isEqual (other) { + if (this === other) { + return true + } + if (!other) { + return false + } + if (this.key !== other.key || this.value !== other.value) { + return false + } + if (!this.childrenAreEqual(other)) { + return false + } + if (!this.parent) { + return !other.parent + } + return this.parent.isEqual(other.parent) + } + + getEdgeRule (edge) { + for (const rule of this.ruleset.values()) { + if (rule.name !== edge.name) { + continue + } + + // if keySpec is * we found our override + if (rule.keySpec === '*') { + return rule + } + + // We need to use the rawSpec here, because the spec has the overrides applied to it already. + // rawSpec can be undefined, so we need to use the fallback value of spec if it is. + let spec = npa(`${edge.name}@${edge.rawSpec || edge.spec}`) + if (spec.type === 'alias') { + spec = spec.subSpec + } + + if (spec.type === 'git') { + if (spec.gitRange && semver.intersects(spec.gitRange, rule.keySpec)) { + return rule + } + + continue + } + + if (spec.type === 'range' || spec.type === 'version') { + if (semver.intersects(spec.fetchSpec, rule.keySpec)) { + return rule + } + + continue + } + + // if we got this far, the spec type is one of tag, directory or file + // which means we have no real way to make version comparisons, so we + // just accept the override + return rule + } + + return this + } + + getNodeRule (node) { + for (const rule of this.ruleset.values()) { + if (rule.name !== node.name) { + continue + } + + if (semver.satisfies(node.version, rule.keySpec) || + semver.satisfies(node.version, rule.value)) { + return rule + } + } + + return this + } + + getMatchingRule (node) { + for (const rule of this.ruleset.values()) { + if (rule.name !== node.name) { + continue + } + + if (semver.satisfies(node.version, rule.keySpec) || + semver.satisfies(node.version, rule.value)) { + return rule + } + } + + return null + } + + * ancestry () { + for (let ancestor = this; ancestor; ancestor = ancestor.parent) { + yield ancestor + } + } + + get isRoot () { + return !this.parent + } + + get ruleset () { + const ruleset = new Map() + + for (const override of this.ancestry()) { + for (const kid of override.children.values()) { + if (!ruleset.has(kid.key)) { + ruleset.set(kid.key, kid) + } + } + + if (!override.isRoot && !ruleset.has(override.key)) { + ruleset.set(override.key, override) + } + } + + return ruleset + } + + static findSpecificOverrideSet (first, second) { + for (let overrideSet = second; overrideSet; overrideSet = overrideSet.parent) { + if (overrideSet.isEqual(first)) { + return second + } + } + for (let overrideSet = first; overrideSet; overrideSet = overrideSet.parent) { + if (overrideSet.isEqual(second)) { + return first + } + } + + // The override sets are incomparable. Neither one contains the other. + log.silly('Conflicting override sets', first, second) + } + + static doOverrideSetsConflict (first, second) { + // If override sets contain one another then we can try to use the more specific one. + // If neither one is more specific, then we consider them to be in conflict. + return (this.findSpecificOverrideSet(first, second) === undefined) + } +} + +module.exports = OverrideSet diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/packument-cache.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/packument-cache.js new file mode 100644 index 0000000000000000000000000000000000000000..d8e163ba23ba1e6f496f44b105bb7c1be652afc6 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/packument-cache.js @@ -0,0 +1,77 @@ +const { LRUCache } = require('lru-cache') +const { getHeapStatistics } = require('node:v8') +const { log } = require('proc-log') + +// This is an in-memory cache that Pacote uses for packuments. +// Packuments are usually cached on disk. This allows for rapid re-requests +// of the same packument to bypass disk reads. The tradeoff here is memory +// usage for disk reads. +class PackumentCache extends LRUCache { + static #heapLimit = Math.floor(getHeapStatistics().heap_size_limit) + + #sizeKey + #disposed = new Set() + + #log (...args) { + log.silly('packumentCache', ...args) + } + + constructor ({ + // How much of this.#heapLimit to take up + heapFactor = 0.25, + // How much of this.#maxSize we allow any one packument to take up + // Anything over this is not cached + maxEntryFactor = 0.5, + sizeKey = '_contentLength', + } = {}) { + const maxSize = Math.floor(PackumentCache.#heapLimit * heapFactor) + const maxEntrySize = Math.floor(maxSize * maxEntryFactor) + super({ + maxSize, + maxEntrySize, + sizeCalculation: (p) => { + // Don't cache if we don't know the size + // Some versions of pacote set this to `0`, newer versions set it to `null` + if (!p[sizeKey]) { + return maxEntrySize + 1 + } + if (p[sizeKey] < 10_000) { + return p[sizeKey] * 2 + } + if (p[sizeKey] < 1_000_000) { + return Math.floor(p[sizeKey] * 1.5) + } + // It is less beneficial to store a small amount of super large things + // at the cost of all other packuments. + return maxEntrySize + 1 + }, + dispose: (v, k) => { + this.#disposed.add(k) + this.#log(k, 'dispose') + }, + }) + this.#sizeKey = sizeKey + this.#log(`heap:${PackumentCache.#heapLimit} maxSize:${maxSize} maxEntrySize:${maxEntrySize}`) + } + + set (k, v, ...args) { + // we use disposed only for a logging signal if we are setting packuments that + // have already been evicted from the cache previously. logging here could help + // us tune this in the future. + const disposed = this.#disposed.has(k) + /* istanbul ignore next - this doesnt happen consistently so hard to test without resorting to unit tests */ + if (disposed) { + this.#disposed.delete(k) + } + this.#log(k, 'set', `size:${v[this.#sizeKey]} disposed:${disposed}`) + return super.set(k, v, ...args) + } + + has (k, ...args) { + const has = super.has(k, ...args) + this.#log(k, `cache-${has ? 'hit' : 'miss'}`) + return has + } +} + +module.exports = PackumentCache diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/peer-entry-sets.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/peer-entry-sets.js new file mode 100644 index 0000000000000000000000000000000000000000..a2da8c8cb6fcd314fbadae5bb7caec2c8f48ae35 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/peer-entry-sets.js @@ -0,0 +1,77 @@ +// Given a node in a tree, return all of the peer dependency sets that +// it is a part of, with the entry (top or non-peer) edges into the sets +// identified. +// +// With this information, we can determine whether it is appropriate to +// replace the entire peer set with another (and remove the old one), +// push the set deeper into the tree, and so on. +// +// Returns a Map of { edge => Set(peerNodes) }, + +const peerEntrySets = node => { + // this is the union of all peer groups that the node is a part of + // later, we identify all of the entry edges, and create a set of + // 1 or more overlapping sets that this node is a part of. + const unionSet = new Set([node]) + for (const node of unionSet) { + for (const edge of node.edgesOut.values()) { + if (edge.valid && edge.peer && edge.to) { + unionSet.add(edge.to) + } + } + for (const edge of node.edgesIn) { + if (edge.valid && edge.peer) { + unionSet.add(edge.from) + } + } + } + const entrySets = new Map() + for (const peer of unionSet) { + for (const edge of peer.edgesIn) { + // if not valid, it doesn't matter anyway. either it's been previously + // peerConflicted, or it's the thing we're interested in replacing. + if (!edge.valid) { + continue + } + // this is the entry point into the peer set + if (!edge.peer || edge.from.isTop) { + // get the subset of peer brought in by this peer entry edge + const sub = new Set([peer]) + for (const peer of sub) { + for (const edge of peer.edgesOut.values()) { + if (edge.valid && edge.peer && edge.to) { + sub.add(edge.to) + } + } + } + // if this subset does not include the node we are focused on, + // then it is not relevant for our purposes. Example: + // + // a -> (b, c, d) + // b -> PEER(d) b -> d -> e -> f <-> g + // c -> PEER(f, h) c -> (f <-> g, h -> g) + // d -> PEER(e) d -> e -> f <-> g + // e -> PEER(f) + // f -> PEER(g) + // g -> PEER(f) + // h -> PEER(g) + // + // The unionSet(e) will include c, but we don't actually care about + // it. We only expanded to the edge of the peer nodes in order to + // find the entry edges that caused the inclusion of peer sets + // including (e), so we want: + // Map{ + // Edge(a->b) => Set(b, d, e, f, g) + // Edge(a->d) => Set(d, e, f, g) + // } + if (sub.has(node)) { + entrySets.set(edge, sub) + } + } + } + } + + return entrySets +} + +module.exports = peerEntrySets diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/place-dep.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/place-dep.js new file mode 100644 index 0000000000000000000000000000000000000000..c7b3e10d408d0b8ea4ca56227f8de7ae98b5abbd --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/place-dep.js @@ -0,0 +1,569 @@ +// Given a dep, a node that depends on it, and the edge representing that +// dependency, place the dep somewhere in the node's tree, and all of its +// peer dependencies. +// +// Handles all of the tree updating needed to place the dep, including +// removing replaced nodes, pruning now-extraneous or invalidated nodes, +// and saves a set of what was placed and what needs re-evaluation as +// a result. + +const localeCompare = require('@isaacs/string-locale-compare')('en') +const { log } = require('proc-log') +const { redact } = require('@npmcli/redact') +const deepestNestingTarget = require('./deepest-nesting-target.js') +const CanPlaceDep = require('./can-place-dep.js') +const { + KEEP, + CONFLICT, +} = CanPlaceDep +const debug = require('./debug.js') + +const Link = require('./link.js') +const gatherDepSet = require('./gather-dep-set.js') +const peerEntrySets = require('./peer-entry-sets.js') + +class PlaceDep { + constructor (options) { + this.auditReport = options.auditReport + this.dep = options.dep + this.edge = options.edge + this.explicitRequest = options.explicitRequest + this.force = options.force + this.installLinks = options.installLinks + this.installStrategy = options.installStrategy + this.legacyPeerDeps = options.legacyPeerDeps + this.parent = options.parent || null + this.preferDedupe = options.preferDedupe + this.strictPeerDeps = options.strictPeerDeps + this.updateNames = options.updateNames + + this.canPlace = null + this.canPlaceSelf = null + // XXX this only appears to be used by tests + this.checks = new Map() + this.children = [] + this.needEvaluation = new Set() + this.peerConflict = null + this.placed = null + this.target = null + + this.current = this.edge.to + this.name = this.edge.name + this.top = this.parent?.top || this + + // nothing to do if the edge is fine as it is + if (this.edge.to && + !this.edge.error && + !this.explicitRequest && + !this.updateNames.includes(this.edge.name) && + !this.auditReport?.isVulnerable(this.edge.to)) { + return + } + + // walk up the tree until we hit either a top/root node, or a place + // where the dep is not a peer dep. + const start = this.getStartNode() + + for (const target of start.ancestry()) { + // if the current location has a peerDep on it, then we can't place here + // this is pretty rare to hit, since we always prefer deduping peers, + // and the getStartNode will start us out above any peers from the + // thing that depends on it. but we could hit it with something like: + // + // a -> (b@1, c@1) + // +-- c@1 + // +-- b -> PEEROPTIONAL(v) (c@2) + // +-- c@2 -> (v) + // + // So we check if we can place v under c@2, that's fine. + // Then we check under b, and can't, because of the optional peer dep. + // but we CAN place it under a, so the correct thing to do is keep + // walking up the tree. + const targetEdge = target.edgesOut.get(this.edge.name) + if (!target.isTop && targetEdge && targetEdge.peer) { + continue + } + + const cpd = new CanPlaceDep({ + dep: this.dep, + edge: this.edge, + // note: this sets the parent's canPlace as the parent of this + // canPlace, but it does NOT add this canPlace to the parent's + // children. This way, we can know that it's a peer dep, and + // get the top edge easily, while still maintaining the + // tree of checks that factored into the original decision. + parent: this.parent && this.parent.canPlace, + target, + preferDedupe: this.preferDedupe, + explicitRequest: this.explicitRequest, + }) + this.checks.set(target, cpd) + + // It's possible that a "conflict" is a conflict among the *peers* of + // a given node we're trying to place, but there actually is no current + // node. Eg, + // root -> (a, b) + // a -> PEER(c) + // b -> PEER(d) + // d -> PEER(c@2) + // We place (a), and get a peer of (c) along with it. + // then we try to place (b), and get CONFLICT in the check, because + // of the conflicting peer from (b)->(d)->(c@2). In that case, we + // should treat (b) and (d) as OK, and place them in the last place + // where they did not themselves conflict, and skip c@2 if conflict + // is ok by virtue of being forced or not ours and not strict. + if (cpd.canPlaceSelf !== CONFLICT) { + this.canPlaceSelf = cpd + } + + // we found a place this can go, along with all its peer friends. + // we break when we get the first conflict + if (cpd.canPlace !== CONFLICT) { + this.canPlace = cpd + } else { + break + } + + // if it's a load failure, just plop it in the first place attempted, + // since we're going to crash the build or prune it out anyway. + // but, this will frequently NOT be a successful canPlace, because + // it'll have no version or other information. + if (this.dep.errors.length) { + break + } + + // nest packages like npm v1 and v2 + // very disk-inefficient + if (this.installStrategy === 'nested') { + break + } + + // when installing globally, or just in global style, we never place + // deps above the first level. + if (this.installStrategy === 'shallow') { + const rp = target.resolveParent + if (rp && rp.isProjectRoot) { + break + } + } + } + + // if we can't find a target, that means that the last place checked, + // and all the places before it, had a conflict. + if (!this.canPlace) { + // if not forced, and it's our dep, or strictPeerDeps is set, then + // this is an ERESOLVE error. + if (!this.force && (this.isMine || this.strictPeerDeps)) { + return this.failPeerConflict() + } + + // ok! we're gonna allow the conflict, but we should still warn + // if we have a current, then we treat CONFLICT as a KEEP. + // otherwise, we just skip it. Only warn on the one that actually + // could not be placed somewhere. + if (!this.canPlaceSelf) { + this.warnPeerConflict() + return + } + + this.canPlace = this.canPlaceSelf + } + + // now we have a target, a tree of CanPlaceDep results for the peer group, + // and we are ready to go + + /* istanbul ignore next */ + if (!this.canPlace) { + debug(() => { + throw new Error('canPlace not set, but trying to place in tree') + }) + return + } + + const { target } = this.canPlace + + log.silly( + 'placeDep', + target.location || 'ROOT', + `${this.dep.name}@${this.dep.version}`, + this.canPlace.description, + `for: ${this.edge.from.package._id || this.edge.from.location}`, + `want: ${redact(this.edge.spec || '*')}` + ) + + const placementType = this.canPlace.canPlace === CONFLICT + ? this.canPlace.canPlaceSelf + : this.canPlace.canPlace + + // if we're placing in the tree with --force, we can get here even though + // it's a conflict. Treat it as a KEEP, but warn and move on. + if (placementType === KEEP) { + // this was a peerConflicted peer dep + if (this.edge.peer && !this.edge.valid) { + this.warnPeerConflict() + } + + // if we get a KEEP in an update scenario, then we MAY have something + // already duplicating this unnecessarily! For example: + // ``` + // root (dep: y@1) + // +-- x (dep: y@1.1) + // | +-- y@1.1.0 (replacing with 1.1.2, got KEEP at the root) + // +-- y@1.1.2 (updated already from 1.0.0) + // ``` + // Now say we do `reify({update:['y']})`, and the latest version is + // 1.1.2, which we now have in the root. We'll try to place y@1.1.2 + // first in x, then in the root, ending with KEEP, because we already + // have it. In that case, we ought to REMOVE the nm/x/nm/y node, because + // it is an unnecessary duplicate. + this.pruneDedupable(target) + return + } + + // we were told to place it here in the target, so either it does not + // already exist in the tree, OR it's shadowed. + // handle otherwise unresolvable dependency nesting loops by + // creating a symbolic link + // a1 -> b1 -> a2 -> b2 -> a1 -> ... + // instead of nesting forever, when the loop occurs, create + // a symbolic link to the earlier instance + for (let p = target; p; p = p.resolveParent) { + if (p.matches(this.dep) && !p.isTop) { + this.placed = new Link({ parent: target, target: p }) + return + } + } + + // XXX if we are replacing SOME of a peer entry group, we will need to + // remove any that are not being replaced and will now be invalid, and + // re-evaluate them deeper into the tree. + + const virtualRoot = this.dep.parent + this.placed = new this.dep.constructor({ + name: this.dep.name, + pkg: this.dep.package, + resolved: this.dep.resolved, + integrity: this.dep.integrity, + installLinks: this.installLinks, + legacyPeerDeps: this.legacyPeerDeps, + error: this.dep.errors[0], + ...(this.dep.overrides ? { overrides: this.dep.overrides } : {}), + ...(this.dep.isLink ? { target: this.dep.target, realpath: this.dep.realpath } : {}), + }) + + this.oldDep = target.children.get(this.name) + if (this.oldDep) { + this.replaceOldDep() + } else { + this.placed.parent = target + } + + // if it's a peerConflicted peer dep, warn about it + if (this.edge.peer && !this.placed.satisfies(this.edge)) { + this.warnPeerConflict() + } + + // If the edge is not an error, then we're updating something, and + // MAY end up putting a better/identical node further up the tree in + // a way that causes an unnecessary duplication. If so, remove the + // now-unnecessary node. + if (this.edge.valid && this.edge.to && this.edge.to !== this.placed) { + this.pruneDedupable(this.edge.to, false) + } + + // in case we just made some duplicates that can be removed, + // prune anything deeper in the tree that can be replaced by this + for (const node of target.root.inventory.query('name', this.name)) { + if (node.isDescendantOf(target) && !node.isTop) { + this.pruneDedupable(node, false) + // only walk the direct children of the ones we kept + if (node.root === target.root) { + for (const kid of node.children.values()) { + this.pruneDedupable(kid, false) + } + } + } + } + + // also place its unmet or invalid peer deps at this location + // loop through any peer deps from the thing we just placed, and place + // those ones as well. it's safe to do this with the virtual nodes, + // because we're copying rather than moving them out of the virtual root, + // otherwise they'd be gone and the peer set would change throughout + // this loop. + for (const peerEdge of this.placed.edgesOut.values()) { + if (peerEdge.valid || !peerEdge.peer || peerEdge.peerConflicted) { + continue + } + + const peer = virtualRoot.children.get(peerEdge.name) + + // Note: if the virtualRoot *doesn't* have the peer, then that means + // it's an optional peer dep. If it's not being properly met (ie, + // peerEdge.valid is false), then this is likely heading for an + // ERESOLVE error, unless it can walk further up the tree. + if (!peer) { + continue + } + + // peerConflicted peerEdge, just accept what's there already + if (!peer.satisfies(peerEdge)) { + continue + } + + this.children.push(new PlaceDep({ + auditReport: this.auditReport, + explicitRequest: this.explicitRequest, + force: this.force, + installLinks: this.installLinks, + installStrategy: this.installStrategy, + legacyPeerDeps: this.legacyPeerDeps, + preferDedupe: this.preferDedupe, + strictPeerDeps: this.strictPeerDeps, + updateNames: this.updateName, + parent: this, + dep: peer, + node: this.placed, + edge: peerEdge, + })) + } + } + + replaceOldDep () { + const target = this.oldDep.parent + + // XXX handle replacing an entire peer group? + // what about cases where we need to push some other peer groups deeper + // into the tree? all the tree updating should be done here, and track + // all the things that we add and remove, so that we can know what + // to re-evaluate. + + // if we're replacing, we should also remove any nodes for edges that + // are now invalid, and where this (or its deps) is the only dependent, + // and also recurse on that pruning. Otherwise leaving that dep node + // around can result in spurious conflicts pushing nodes deeper into + // the tree than needed in the case of cycles that will be removed + // later anyway. + const oldDeps = [] + for (const [name, edge] of this.oldDep.edgesOut.entries()) { + if (!this.placed.edgesOut.has(name) && edge.to) { + oldDeps.push(...gatherDepSet([edge.to], e => e.to !== edge.to)) + } + } + + // gather all peer edgesIn which are at this level, and will not be + // satisfied by the new dependency. Those are the peer sets that need + // to be either warned about (if they cannot go deeper), or removed and + // re-placed (if they can). + const prunePeerSets = [] + for (const edge of this.oldDep.edgesIn) { + if (this.placed.satisfies(edge) || + !edge.peer || + edge.from.parent !== target || + edge.peerConflicted) { + // not a peer dep, not invalid, or not from this level, so it's fine + // to just let it re-evaluate as a problemEdge later, or let it be + // satisfied by the new dep being placed. + continue + } + for (const entryEdge of peerEntrySets(edge.from).keys()) { + // either this one needs to be pruned and re-evaluated, or marked + // as peerConflicted and warned about. If the entryEdge comes in from + // the root or a workspace, then we have to leave it alone, and in that + // case, it will have already warned or crashed by getting to this point + const entryNode = entryEdge.to + const deepestTarget = deepestNestingTarget(entryNode) + if (deepestTarget !== target && + !(entryEdge.from.isProjectRoot || entryEdge.from.isWorkspace)) { + prunePeerSets.push(...gatherDepSet([entryNode], e => { + return e.to !== entryNode && !e.peerConflicted + })) + } else { + this.warnPeerConflict(edge, this.dep) + } + } + } + + this.placed.replace(this.oldDep) + this.pruneForReplacement(this.placed, oldDeps) + for (const dep of prunePeerSets) { + for (const edge of dep.edgesIn) { + this.needEvaluation.add(edge.from) + } + dep.root = null + } + } + + pruneForReplacement (node, oldDeps) { + // gather up all the now-invalid/extraneous edgesOut, as long as they are + // only depended upon by the old node/deps + const invalidDeps = new Set([...node.edgesOut.values()] + .filter(e => e.to && !e.valid).map(e => e.to)) + for (const dep of oldDeps) { + const set = gatherDepSet([dep], e => e.to !== dep && e.valid) + for (const dep of set) { + invalidDeps.add(dep) + } + } + + // ignore dependency edges from the node being replaced, but + // otherwise filter the set down to just the set with no + // dependencies from outside the set, except the node in question. + const deps = gatherDepSet(invalidDeps, edge => + edge.from !== node && edge.to !== node && edge.valid) + + // now just delete whatever's left, because it's junk + for (const dep of deps) { + dep.root = null + } + } + + // prune all the nodes in a branch of the tree that can be safely removed + // This is only the most basic duplication detection; it finds if there + // is another satisfying node further up the tree, and if so, dedupes. + // Even if installStrategy is nested, we do this amount of deduplication. + pruneDedupable (node, descend = true) { + if (node.canDedupe(this.preferDedupe, this.explicitRequest)) { + // gather up all deps that have no valid edges in from outside + // the dep set, except for this node we're deduping, so that we + // also prune deps that would be made extraneous. + const deps = gatherDepSet([node], e => e.to !== node && e.valid) + for (const node of deps) { + node.root = null + } + return + } + if (descend) { + // sort these so that they're deterministically ordered + // otherwise, resulting tree shape is dependent on the order + // in which they happened to be resolved. + const nodeSort = (a, b) => localeCompare(a.location, b.location) + + const children = [...node.children.values()].sort(nodeSort) + for (const child of children) { + this.pruneDedupable(child) + } + const fsChildren = [...node.fsChildren].sort(nodeSort) + for (const topNode of fsChildren) { + const children = [...topNode.children.values()].sort(nodeSort) + for (const child of children) { + this.pruneDedupable(child) + } + } + } + } + + get isMine () { + const { edge } = this.top + const { from: node } = edge + + if (node.isWorkspace || node.isProjectRoot) { + return true + } + + if (!edge.peer) { + return false + } + + // re-entry case. check if any non-peer edges come from the project, + // or any entryEdges on peer groups are from the root. + let hasPeerEdges = false + for (const edge of node.edgesIn) { + if (edge.peer) { + hasPeerEdges = true + continue + } + if (edge.from.isWorkspace || edge.from.isProjectRoot) { + return true + } + } + if (hasPeerEdges) { + for (const edge of peerEntrySets(node).keys()) { + if (edge.from.isWorkspace || edge.from.isProjectRoot) { + return true + } + } + } + + return false + } + + warnPeerConflict (edge, dep) { + edge = edge || this.edge + dep = dep || this.dep + edge.peerConflicted = true + const expl = this.explainPeerConflict(edge, dep) + log.warn('ERESOLVE', 'overriding peer dependency', expl) + } + + failPeerConflict (edge, dep) { + edge = edge || this.top.edge + dep = dep || this.top.dep + const expl = this.explainPeerConflict(edge, dep) + throw Object.assign(new Error('could not resolve'), expl) + } + + explainPeerConflict (edge, dep) { + const { from: node } = edge + const curNode = node.resolve(edge.name) + + // XXX decorate more with this.canPlace and this.canPlaceSelf, + // this.checks, this.children, walk over conflicted peers, etc. + const expl = { + code: 'ERESOLVE', + edge: edge.explain(), + dep: dep.explain(edge), + force: this.force, + isMine: this.isMine, + strictPeerDeps: this.strictPeerDeps, + } + + if (this.parent) { + // this is the conflicted peer + expl.current = curNode && curNode.explain(edge) + expl.peerConflict = this.current && this.current.explain(this.edge) + } else { + expl.current = curNode && curNode.explain() + if (this.canPlaceSelf && this.canPlaceSelf.canPlaceSelf !== CONFLICT) { + // failed while checking for a child dep + const cps = this.canPlaceSelf + for (const peer of cps.conflictChildren) { + if (peer.current) { + expl.peerConflict = { + current: peer.current.explain(), + peer: peer.dep.explain(peer.edge), + } + break + } + } + } else { + expl.peerConflict = { + current: this.current && this.current.explain(), + peer: this.dep.explain(this.edge), + } + } + } + + return expl + } + + getStartNode () { + // if we are a peer, then we MUST be at least as shallow as the peer + // dependent + const from = this.parent?.getStartNode() || this.edge.from + return deepestNestingTarget(from, this.name) + } + + // XXX this only appears to be used by tests + get allChildren () { + const set = new Set(this.children) + for (const child of set) { + for (const grandchild of child.children) { + set.add(grandchild) + } + } + return [...set] + } +} + +module.exports = PlaceDep diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/printable.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/printable.js new file mode 100644 index 0000000000000000000000000000000000000000..53c3f7a5756dfc2ca58db4e67a175cf60f399631 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/printable.js @@ -0,0 +1,198 @@ +// helper function to output a clearer visualization +// of the current node and its descendents +const localeCompare = require('@isaacs/string-locale-compare')('en') +const util = require('node:util') +const relpath = require('./relpath.js') + +class ArboristNode { + constructor (tree, path) { + this.name = tree.name + if (tree.packageName && tree.packageName !== this.name) { + this.packageName = tree.packageName + } + if (tree.version) { + this.version = tree.version + } + this.location = tree.location + this.path = tree.path + if (tree.realpath !== this.path) { + this.realpath = tree.realpath + } + if (tree.resolved !== null) { + this.resolved = tree.resolved + } + if (tree.extraneous) { + this.extraneous = true + } + if (tree.dev) { + this.dev = true + } + if (tree.optional) { + this.optional = true + } + if (tree.devOptional && !tree.dev && !tree.optional) { + this.devOptional = true + } + if (tree.peer) { + this.peer = true + } + if (tree.inBundle) { + this.bundled = true + } + if (tree.inDepBundle) { + this.bundler = tree.getBundler().location + } + if (tree.isProjectRoot) { + this.isProjectRoot = true + } + if (tree.isWorkspace) { + this.isWorkspace = true + } + const bd = tree.package && tree.package.bundleDependencies + if (bd && bd.length) { + this.bundleDependencies = bd + } + if (tree.inShrinkwrap) { + this.inShrinkwrap = true + } else if (tree.hasShrinkwrap) { + this.hasShrinkwrap = true + } + if (tree.error) { + this.error = treeError(tree.error) + } + if (tree.errors && tree.errors.length) { + this.errors = tree.errors.map(treeError) + } + + if (tree.overrides) { + this.overrides = new Map([...tree.overrides.ruleset.values()] + .map((override) => [override.key, override.value])) + } + + // edgesOut sorted by name + if (tree.edgesOut.size) { + this.edgesOut = new Map([...tree.edgesOut.entries()] + .sort(([a], [b]) => localeCompare(a, b)) + .map(([name, edge]) => [name, new EdgeOut(edge)])) + } + + // edgesIn sorted by location + if (tree.edgesIn.size) { + this.edgesIn = new Set([...tree.edgesIn] + .sort((a, b) => localeCompare(a.from.location, b.from.location)) + .map(edge => new EdgeIn(edge))) + } + + if (tree.workspaces && tree.workspaces.size) { + this.workspaces = new Map([...tree.workspaces.entries()] + .map(([name, path]) => [name, relpath(tree.root.realpath, path)])) + } + + // fsChildren sorted by path + if (tree.fsChildren.size) { + this.fsChildren = new Set([...tree.fsChildren] + .sort(({ path: a }, { path: b }) => localeCompare(a, b)) + .map(tree => printableTree(tree, path))) + } + + // children sorted by name + if (tree.children.size) { + this.children = new Map([...tree.children.entries()] + .sort(([a], [b]) => localeCompare(a, b)) + .map(([name, tree]) => [name, printableTree(tree, path)])) + } + } +} + +class ArboristVirtualNode extends ArboristNode { + constructor (tree, path) { + super(tree, path) + this.sourceReference = printableTree(tree.sourceReference, path) + } +} + +class ArboristLink extends ArboristNode { + constructor (tree, path) { + super(tree, path) + this.target = printableTree(tree.target, path) + } +} + +const treeError = ({ code, path }) => ({ + code, + ...(path ? { path } : {}), +}) + +// print out edges without dumping the full node all over again +// this base class will toJSON as a plain old object, but the +// util.inspect() output will be a bit cleaner +class Edge { + constructor (edge) { + this.type = edge.type + this.name = edge.name + this.spec = edge.rawSpec || '*' + if (edge.rawSpec !== edge.spec) { + this.override = edge.spec + } + if (edge.error) { + this.error = edge.error + } + if (edge.peerConflicted) { + this.peerConflicted = edge.peerConflicted + } + } +} + +// don't care about 'from' for edges out +class EdgeOut extends Edge { + constructor (edge) { + super(edge) + this.to = edge.to && edge.to.location + } + + [util.inspect.custom] () { + return `{ ${this.type} ${this.name}@${this.spec}${ + this.override ? ` overridden:${this.override}` : '' + }${ + this.to ? ' -> ' + this.to : '' + }${ + this.error ? ' ' + this.error : '' + }${ + this.peerConflicted ? ' peerConflicted' : '' + } }` + } +} + +// don't care about 'to' for edges in +class EdgeIn extends Edge { + constructor (edge) { + super(edge) + this.from = edge.from && edge.from.location + } + + [util.inspect.custom] () { + return `{ ${this.from || '""'} ${this.type} ${this.name}@${this.spec}${ + this.error ? ' ' + this.error : '' + }${ + this.peerConflicted ? ' peerConflicted' : '' + } }` + } +} + +const printableTree = (tree, path = []) => { + if (!tree) { + return tree + } + + const Cls = tree.isLink ? ArboristLink + : tree.sourceReference ? ArboristVirtualNode + : ArboristNode + if (path.includes(tree)) { + const obj = Object.create(Cls.prototype) + return Object.assign(obj, { location: tree.location }) + } + path.push(tree) + return new Cls(tree, path) +} + +module.exports = printableTree diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/query-selector-all.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/query-selector-all.js new file mode 100644 index 0000000000000000000000000000000000000000..db0d8ea2edb11346f57a5204afc871cfb565c067 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/query-selector-all.js @@ -0,0 +1,945 @@ +'use strict' + +const { resolve } = require('node:path') +const { parser, arrayDelimiter } = require('@npmcli/query') +const localeCompare = require('@isaacs/string-locale-compare')('en') +const { log } = require('proc-log') +const { minimatch } = require('minimatch') +const npa = require('npm-package-arg') +const pacote = require('pacote') +const semver = require('semver') +const npmFetch = require('npm-registry-fetch') + +// handle results for parsed query asts, results are stored in a map that has a +// key that points to each ast selector node and stores the resulting array of +// arborist nodes as its value, that is essential to how we handle multiple +// query selectors, e.g: `#a, #b, #c` <- 3 diff ast selector nodes +class Results { + #currentAstSelector + #initialItems + #inventory + #outdatedCache = new Map() + #vulnCache + #pendingCombinator + #results = new Map() + #targetNode + + constructor (opts) { + this.#currentAstSelector = opts.rootAstNode.nodes[0] + this.#inventory = opts.inventory + this.#initialItems = opts.initialItems + this.#vulnCache = opts.vulnCache + this.#targetNode = opts.targetNode + + this.currentResults = this.#initialItems + + // We get this when first called and need to pass it to pacote + this.flatOptions = opts.flatOptions || {} + + // reset by rootAstNode walker + this.currentAstNode = opts.rootAstNode + } + + get currentResults () { + return this.#results.get(this.#currentAstSelector) + } + + set currentResults (value) { + this.#results.set(this.#currentAstSelector, value) + } + + // retrieves the initial items to which start the filtering / matching + // for most of the different types of recognized ast nodes, e.g: class (aka + // depType), id, *, etc in different contexts we need to start with the + // current list of filtered results, for example a query for `.workspace` + // actually means the same as `*.workspace` so we want to start with the full + // inventory if that's the first ast node we're reading but if it appears in + // the middle of a query it should respect the previous filtered results, + // combinators are a special case in which we always want to have the + // complete inventory list in order to use the left-hand side ast node as a + // filter combined with the element on its right-hand side + get initialItems () { + const firstParsed = + (this.currentAstNode.parent.nodes[0] === this.currentAstNode) && + (this.currentAstNode.parent.parent.type === 'root') + + if (firstParsed) { + return this.#initialItems + } + + if (this.currentAstNode.prev().type === 'combinator') { + return this.#inventory + } + return this.currentResults + } + + // combinators need information about previously filtered items along + // with info of the items parsed / retrieved from the selector right + // past the combinator, for this reason combinators are stored and + // only ran as the last part of each selector logic + processPendingCombinator (nextResults) { + if (this.#pendingCombinator) { + const res = this.#pendingCombinator(this.currentResults, nextResults) + this.#pendingCombinator = null + this.currentResults = res + } else { + this.currentResults = nextResults + } + } + + // when collecting results to a root astNode, we traverse the list of child + // selector nodes and collect all of their resulting arborist nodes into a + // single/flat Set of items, this ensures we also deduplicate items + collect (rootAstNode) { + return new Set(rootAstNode.nodes.flatMap(n => this.#results.get(n))) + } + + // selector types map to the '.type' property of the ast nodes via `${astNode.type}Type` + // + // attribute selector [name=value], etc + attributeType () { + const nextResults = this.initialItems.filter(node => + attributeMatch(this.currentAstNode, node.package) + ) + this.processPendingCombinator(nextResults) + } + + // dependency type selector (i.e. .prod, .dev, etc) + // css calls this class, we interpret is as dependency type + classType () { + const depTypeFn = depTypes[String(this.currentAstNode)] + if (!depTypeFn) { + throw Object.assign( + new Error(`\`${String(this.currentAstNode)}\` is not a supported dependency type.`), + { code: 'EQUERYNODEPTYPE' } + ) + } + const nextResults = depTypeFn(this.initialItems) + this.processPendingCombinator(nextResults) + } + + // combinators (i.e. '>', ' ', '~') + combinatorType () { + this.#pendingCombinator = combinators[String(this.currentAstNode)] + } + + // name selectors (i.e. #foo) + // css calls this id, we interpret it as name + idType () { + const name = this.currentAstNode.value + const nextResults = this.initialItems.filter(node => + (name === node.name) || (name === node.package.name) + ) + this.processPendingCombinator(nextResults) + } + + // pseudo selectors (prefixed with :) + async pseudoType () { + const pseudoFn = `${this.currentAstNode.value.slice(1)}Pseudo` + if (!this[pseudoFn]) { + throw Object.assign( + new Error(`\`${this.currentAstNode.value + }\` is not a supported pseudo selector.`), + { code: 'EQUERYNOPSEUDO' } + ) + } + const nextResults = await this[pseudoFn]() + this.processPendingCombinator(nextResults) + } + + selectorType () { + this.#currentAstSelector = this.currentAstNode + // starts a new array in which resulting items + // can be stored for each given ast selector + if (!this.currentResults) { + this.currentResults = [] + } + } + + universalType () { + this.processPendingCombinator(this.initialItems) + } + + // pseudo selectors map to the 'value' property of the pseudo selectors in the ast nodes + // via selectors via `${value.slice(1)}Pseudo` + attrPseudo () { + const { lookupProperties, attributeMatcher } = this.currentAstNode + + return this.initialItems.filter(node => { + let objs = [node.package] + for (const prop of lookupProperties) { + // if an isArray symbol is found that means we'll need to iterate + // over the previous found array to basically make sure we traverse + // all its indexes testing for possible objects that may eventually + // hold more keys specified in a selector + if (prop === arrayDelimiter) { + objs = objs.flat() + continue + } + + // otherwise just maps all currently found objs + // to the next prop from the lookup properties list, + // filters out any empty key lookup + objs = objs.flatMap(obj => obj[prop] || []) + + // in case there's no property found in the lookup + // just filters that item out + const noAttr = objs.every(obj => !obj) + if (noAttr) { + return false + } + } + + // if any of the potential object matches + // that item should be in the final result + return objs.some(obj => attributeMatch(attributeMatcher, obj)) + }) + } + + emptyPseudo () { + return this.initialItems.filter(node => node.edgesOut.size === 0) + } + + extraneousPseudo () { + return this.initialItems.filter(node => node.extraneous) + } + + async hasPseudo () { + const found = [] + for (const item of this.initialItems) { + // This is the one time initialItems differs from inventory + const res = await retrieveNodesFromParsedAst({ + flatOptions: this.flatOptions, + initialItems: [item], + inventory: this.#inventory, + rootAstNode: this.currentAstNode.nestedNode, + targetNode: item, + vulnCache: this.#vulnCache, + }) + if (res.size > 0) { + found.push(item) + } + } + return found + } + + invalidPseudo () { + const found = [] + for (const node of this.initialItems) { + for (const edge of node.edgesIn) { + if (edge.invalid) { + found.push(node) + break + } + } + } + return found + } + + async isPseudo () { + const res = await retrieveNodesFromParsedAst({ + flatOptions: this.flatOptions, + initialItems: this.initialItems, + inventory: this.#inventory, + rootAstNode: this.currentAstNode.nestedNode, + targetNode: this.currentAstNode, + vulnCache: this.#vulnCache, + }) + return [...res] + } + + linkPseudo () { + return this.initialItems.filter(node => node.isLink || (node.isTop && !node.isRoot)) + } + + missingPseudo () { + return this.#inventory.reduce((res, node) => { + for (const edge of node.edgesOut.values()) { + if (edge.missing) { + const pkg = { name: edge.name, version: edge.spec } + const item = new this.#targetNode.constructor({ pkg }) + item.queryContext = { + missing: true, + } + item.edgesIn = new Set([edge]) + res.push(item) + } + } + return res + }, []) + } + + async notPseudo () { + const res = await retrieveNodesFromParsedAst({ + flatOptions: this.flatOptions, + initialItems: this.initialItems, + inventory: this.#inventory, + rootAstNode: this.currentAstNode.nestedNode, + targetNode: this.currentAstNode, + vulnCache: this.#vulnCache, + }) + const internalSelector = new Set(res) + return this.initialItems.filter(node => + !internalSelector.has(node)) + } + + overriddenPseudo () { + return this.initialItems.filter(node => node.overridden) + } + + pathPseudo () { + return this.initialItems.filter(node => { + if (!this.currentAstNode.pathValue) { + return true + } + return minimatch( + node.realpath.replace(/\\+/g, '/'), + resolve(node.root.realpath, this.currentAstNode.pathValue).replace(/\\+/g, '/') + ) + }) + } + + privatePseudo () { + return this.initialItems.filter(node => node.package.private) + } + + rootPseudo () { + return this.initialItems.filter(node => node === this.#targetNode.root) + } + + scopePseudo () { + return this.initialItems.filter(node => node === this.#targetNode) + } + + semverPseudo () { + const { + attributeMatcher, + lookupProperties, + semverFunc = 'infer', + semverValue, + } = this.currentAstNode + const { qualifiedAttribute } = attributeMatcher + + if (!semverValue) { + // DEPRECATED: remove this warning and throw an error as part of @npmcli/arborist@6 + log.warn('query', 'usage of :semver() with no parameters is deprecated') + return this.initialItems + } + + if (!semver.valid(semverValue) && !semver.validRange(semverValue)) { + throw Object.assign( + new Error(`\`${semverValue}\` is not a valid semver version or range`), + { code: 'EQUERYINVALIDSEMVER' }) + } + + const valueIsVersion = !!semver.valid(semverValue) + + const nodeMatches = (node, obj) => { + // if we already have an operator, the user provided some test as part of the selector + // we evaluate that first because if it fails we don't want this node anyway + if (attributeMatcher.operator) { + if (!attributeMatch(attributeMatcher, obj)) { + // if the initial operator doesn't match, we're done + return false + } + } + + const attrValue = obj[qualifiedAttribute] + // both valid and validRange return null for undefined, so this will skip both nodes that + // do not have the attribute defined as well as those where the attribute value is invalid + // and those where the value from the package.json is not a string + if ((!semver.valid(attrValue) && !semver.validRange(attrValue)) || + typeof attrValue !== 'string') { + return false + } + + const attrIsVersion = !!semver.valid(attrValue) + + let actualFunc = semverFunc + + // if we're asked to infer, we examine outputs to make a best guess + if (actualFunc === 'infer') { + if (valueIsVersion && attrIsVersion) { + // two versions -> semver.eq + actualFunc = 'eq' + } else if (!valueIsVersion && !attrIsVersion) { + // two ranges -> semver.intersects + actualFunc = 'intersects' + } else { + // anything else -> semver.satisfies + actualFunc = 'satisfies' + } + } + + if (['eq', 'neq', 'gt', 'gte', 'lt', 'lte'].includes(actualFunc)) { + // both sides must be versions, but one is not + if (!valueIsVersion || !attrIsVersion) { + return false + } + + return semver[actualFunc](attrValue, semverValue) + } else if (['gtr', 'ltr', 'satisfies'].includes(actualFunc)) { + // at least one side must be a version, but neither is + if (!valueIsVersion && !attrIsVersion) { + return false + } + + return valueIsVersion + ? semver[actualFunc](semverValue, attrValue) + : semver[actualFunc](attrValue, semverValue) + } else if (['intersects', 'subset'].includes(actualFunc)) { + // these accept two ranges and since a version is also a range, anything goes + return semver[actualFunc](attrValue, semverValue) + } else { + // user provided a function we don't know about, throw an error + throw Object.assign(new Error(`\`semver.${actualFunc}\` is not a supported operator.`), + { code: 'EQUERYINVALIDOPERATOR' }) + } + } + + return this.initialItems.filter((node) => { + // no lookupProperties just means its a top level property, see if it matches + if (!lookupProperties.length) { + return nodeMatches(node, node.package) + } + + // this code is mostly duplicated from attrPseudo to traverse into the package until we get + // to our deepest requested object + let objs = [node.package] + for (const prop of lookupProperties) { + if (prop === arrayDelimiter) { + objs = objs.flat() + continue + } + + objs = objs.flatMap(obj => obj[prop] || []) + const noAttr = objs.every(obj => !obj) + if (noAttr) { + return false + } + + return objs.some(obj => nodeMatches(node, obj)) + } + }) + } + + typePseudo () { + if (!this.currentAstNode.typeValue) { + return this.initialItems + } + return this.initialItems + .flatMap(node => { + const found = [] + for (const edge of node.edgesIn) { + if (npa(`${edge.name}@${edge.spec}`).type === this.currentAstNode.typeValue) { + found.push(edge.to) + } + } + return found + }) + } + + dedupedPseudo () { + return this.initialItems.filter(node => node.target.edgesIn.size > 1) + } + + async vulnPseudo () { + if (!this.initialItems.length) { + return this.initialItems + } + if (!this.#vulnCache) { + const packages = {} + // We have to map the items twice, once to get the request, and a second time to filter out the results of that request + this.initialItems.map((node) => { + if (node.isProjectRoot || node.package.private) { + return + } + if (!packages[node.name]) { + packages[node.name] = [] + } + if (!packages[node.name].includes(node.version)) { + packages[node.name].push(node.version) + } + }) + const res = await npmFetch('/-/npm/v1/security/advisories/bulk', { + ...this.flatOptions, + registry: this.flatOptions.auditRegistry || this.flatOptions.registry, + method: 'POST', + gzip: true, + body: packages, + }) + this.#vulnCache = await res.json() + } + const advisories = this.#vulnCache + const { vulns } = this.currentAstNode + return this.initialItems.filter(item => { + const vulnerable = advisories[item.name]?.filter(advisory => { + // This could be for another version of this package elsewhere in the tree + if (!semver.intersects(advisory.vulnerable_versions, item.version)) { + return false + } + if (!vulns) { + return true + } + // vulns are OR with each other, if any one matches we're done + for (const vuln of vulns) { + if (vuln.severity && !vuln.severity.includes('*')) { + if (!vuln.severity.includes(advisory.severity)) { + continue + } + } + + if (vuln?.cwe) { + // * is special, it means "has a cwe" + if (vuln.cwe.includes('*')) { + if (!advisory.cwe.length) { + continue + } + } else if (!vuln.cwe.every(cwe => advisory.cwe.includes(`CWE-${cwe}`))) { + continue + } + } + return true + } + }) + if (vulnerable?.length) { + item.queryContext = { + advisories: vulnerable, + } + return true + } + return false + }) + } + + async outdatedPseudo () { + const { outdatedKind = 'any' } = this.currentAstNode + + // filter the initialItems + // NOTE: this uses a Promise.all around a map without in-line concurrency handling + // since the only async action taken is retrieving the packument, which is limited + // based on the max-sockets config in make-fetch-happen + const initialResults = await Promise.all(this.initialItems.map(async (node) => { + // the root can't be outdated, skip it + if (node.isProjectRoot) { + return false + } + + // private packages can't be published, skip them + if (node.package.private) { + return false + } + + // we cache the promise representing the full versions list, this helps reduce the + // number of requests we send by keeping population of the cache in a single tick + // making it less likely that multiple requests for the same package will be inflight + if (!this.#outdatedCache.has(node.name)) { + this.#outdatedCache.set(node.name, getPackageVersions(node.name, this.flatOptions)) + } + const availableVersions = await this.#outdatedCache.get(node.name) + + // we attach _all_ versions to the queryContext to allow consumers to do their own + // filtering and comparisons + node.queryContext.versions = availableVersions + + // next we further reduce the set to versions that are greater than the current one + const greaterVersions = availableVersions.filter((available) => { + return semver.gt(available, node.version) + }) + + // no newer versions than the current one, drop this node from the result set + if (!greaterVersions.length) { + return false + } + + // if we got here, we know that newer versions exist, if the kind is 'any' we're done + if (outdatedKind === 'any') { + return node + } + + // look for newer versions that differ from current by a specific part of the semver version + if (['major', 'minor', 'patch'].includes(outdatedKind)) { + // filter the versions greater than our current one based on semver.diff + const filteredVersions = greaterVersions.filter((version) => { + return semver.diff(node.version, version) === outdatedKind + }) + + // no available versions are of the correct diff type + if (!filteredVersions.length) { + return false + } + + return node + } + + // look for newer versions that satisfy at least one edgeIn to this node + if (outdatedKind === 'in-range') { + const inRangeContext = [] + for (const edge of node.edgesIn) { + const inRangeVersions = greaterVersions.filter((version) => { + return semver.satisfies(version, edge.spec) + }) + + // this edge has no in-range candidates, just move on + if (!inRangeVersions.length) { + continue + } + + inRangeContext.push({ + from: edge.from.location, + versions: inRangeVersions, + }) + } + + // if we didn't find at least one match, drop this node + if (!inRangeContext.length) { + return false + } + + // now add to the context each version that is in-range for each edgeIn + node.queryContext.outdated = { + ...node.queryContext.outdated, + inRange: inRangeContext, + } + + return node + } + + // look for newer versions that _do not_ satisfy at least one edgeIn + if (outdatedKind === 'out-of-range') { + const outOfRangeContext = [] + for (const edge of node.edgesIn) { + const outOfRangeVersions = greaterVersions.filter((version) => { + return !semver.satisfies(version, edge.spec) + }) + + // this edge has no out-of-range candidates, skip it + if (!outOfRangeVersions.length) { + continue + } + + outOfRangeContext.push({ + from: edge.from.location, + versions: outOfRangeVersions, + }) + } + + // if we didn't add at least one thing to the context, this node is not a match + if (!outOfRangeContext.length) { + return false + } + + // attach the out-of-range context to the node + node.queryContext.outdated = { + ...node.queryContext.outdated, + outOfRange: outOfRangeContext, + } + + return node + } + + // any other outdatedKind is unknown and will never match + return false + })) + + // return an array with the holes for non-matching nodes removed + return initialResults.filter(Boolean) + } +} + +// operators for attribute selectors +const attributeOperators = { + // attribute value is equivalent + '=' ({ attr, value }) { + return attr === value + }, + // attribute value contains word + '~=' ({ attr, value }) { + return (attr.match(/\w+/g) || []).includes(value) + }, + // attribute value contains string + '*=' ({ attr, value }) { + return attr.includes(value) + }, + // attribute value is equal or starts with + '|=' ({ attr, value }) { + return attr.startsWith(`${value}-`) + }, + // attribute value starts with + '^=' ({ attr, value }) { + return attr.startsWith(value) + }, + // attribute value ends with + '$=' ({ attr, value }) { + return attr.endsWith(value) + }, +} + +const attributeOperator = ({ attr, value, insensitive, operator }) => { + if (typeof attr === 'number') { + attr = String(attr) + } + if (typeof attr !== 'string') { + // It's an object or an array, bail + return false + } + if (insensitive) { + attr = attr.toLowerCase() + } + + return attributeOperators[operator]({ + attr, + insensitive, + value, + }) +} + +const attributeMatch = (matcher, obj) => { + const insensitive = !!matcher.insensitive + const operator = matcher.operator || '' + const attribute = matcher.qualifiedAttribute + let value = matcher.value || '' + // return early if checking existence + if (operator === '') { + return Boolean(obj[attribute]) + } + if (insensitive) { + value = value.toLowerCase() + } + // in case the current object is an array + // then we try to match every item in the array + if (Array.isArray(obj[attribute])) { + return obj[attribute].find((i, index) => { + const attr = obj[attribute][index] || '' + return attributeOperator({ attr, value, insensitive, operator }) + }) + } else { + const attr = obj[attribute] || '' + return attributeOperator({ attr, value, insensitive, operator }) + } +} + +const edgeIsType = (node, type, seen = new Set()) => { + for (const edgeIn of node.edgesIn) { + // TODO Need a test with an infinite loop + if (seen.has(edgeIn)) { + continue + } + seen.add(edgeIn) + if (edgeIn.type === type || edgeIn.from[type] || edgeIsType(edgeIn.from, type, seen)) { + return true + } + } + return false +} + +const filterByType = (nodes, type) => { + const found = [] + for (const node of nodes) { + if (node[type] || edgeIsType(node, type)) { + found.push(node) + } + } + return found +} + +const depTypes = { + // dependency + '.prod' (prevResults) { + const found = [] + for (const node of prevResults) { + if (!node.dev) { + found.push(node) + } + } + return found + }, + // devDependency + '.dev' (prevResults) { + return filterByType(prevResults, 'dev') + }, + // optionalDependency + '.optional' (prevResults) { + return filterByType(prevResults, 'optional') + }, + // peerDependency + '.peer' (prevResults) { + return filterByType(prevResults, 'peer') + }, + // workspace + '.workspace' (prevResults) { + return prevResults.filter(node => node.isWorkspace) + }, + // bundledDependency + '.bundled' (prevResults) { + return prevResults.filter(node => node.inBundle) + }, +} + +// checks if a given node has a direct parent in any of the nodes provided in +// the compare nodes array +const hasParent = (node, compareNodes) => { + // All it takes is one so we loop and return on the first hit + for (let compareNode of compareNodes) { + if (compareNode.isLink) { + compareNode = compareNode.target + } + + // follows logical parent for link ancestors + if (node.isTop && (node.resolveParent === compareNode)) { + return true + } + // follows edges-in to check if they match a possible parent + for (const edge of node.edgesIn) { + if (edge && edge.from === compareNode) { + return true + } + } + } + return false +} + +// checks if a given node is a descendant of any of the nodes provided in the +// compareNodes array +const hasAscendant = (node, compareNodes, seen = new Set()) => { + // TODO (future) loop over ancestry property + if (hasParent(node, compareNodes)) { + return true + } + + if (node.isTop && node.resolveParent) { + /* istanbul ignore if - investigate if linksIn check obviates need for this */ + if (hasAscendant(node.resolveParent, compareNodes)) { + return true + } + } + for (const edge of node.edgesIn) { + // TODO Need a test with an infinite loop + if (seen.has(edge)) { + continue + } + seen.add(edge) + if (edge && edge.from && hasAscendant(edge.from, compareNodes, seen)) { + return true + } + } + for (const linkNode of node.linksIn) { + if (hasAscendant(linkNode, compareNodes, seen)) { + return true + } + } + return false +} + +const combinators = { + // direct descendant + '>' (prevResults, nextResults) { + return nextResults.filter(node => hasParent(node, prevResults)) + }, + // any descendant + ' ' (prevResults, nextResults) { + return nextResults.filter(node => hasAscendant(node, prevResults)) + }, + // sibling + '~' (prevResults, nextResults) { + // Return any node in nextResults that is a sibling of (aka shares a + // parent with) a node in prevResults + const parentNodes = new Set() // Parents of everything in prevResults + for (const node of prevResults) { + for (const edge of node.edgesIn) { + // edge.from always exists cause it's from another node's edgesIn + parentNodes.add(edge.from) + } + } + return nextResults.filter(node => + !prevResults.includes(node) && hasParent(node, [...parentNodes]) + ) + }, +} + +// get a list of available versions of a package filtered to respect --before +// NOTE: this runs over each node and should not throw +const getPackageVersions = async (name, opts) => { + let packument + try { + packument = await pacote.packument(name, { + ...opts, + fullMetadata: false, // we only need the corgi + }) + } catch (err) { + // if the fetch fails, log a warning and pretend there are no versions + log.warn('query', `could not retrieve packument for ${name}: ${err.message}`) + return [] + } + + // start with a sorted list of all versions (lowest first) + let candidates = Object.keys(packument.versions).sort(semver.compare) + + // if the packument has a time property, and the user passed a before flag, then + // we filter this list down to only those versions that existed before the specified date + if (packument.time && opts.before) { + candidates = candidates.filter((version) => { + // this version isn't found in the times at all, drop it + if (!packument.time[version]) { + return false + } + + return Date.parse(packument.time[version]) <= opts.before + }) + } + + return candidates +} + +const retrieveNodesFromParsedAst = async (opts) => { + // when we first call this it's the parsed query. all other times it's + // results.currentNode.nestedNode + const rootAstNode = opts.rootAstNode + + if (!rootAstNode.nodes) { + return new Set() + } + + const results = new Results(opts) + + const astNodeQueue = new Set() + // walk is sync, so we have to build up our async functions and then await them later + rootAstNode.walk((nextAstNode) => { + astNodeQueue.add(nextAstNode) + }) + + for (const nextAstNode of astNodeQueue) { + // This is the only place we reset currentAstNode + results.currentAstNode = nextAstNode + const updateFn = `${results.currentAstNode.type}Type` + if (typeof results[updateFn] !== 'function') { + throw Object.assign( + new Error(`\`${results.currentAstNode.type}\` is not a supported selector.`), + { code: 'EQUERYNOSELECTOR' } + ) + } + await results[updateFn]() + } + + return results.collect(rootAstNode) +} + +const querySelectorAll = async (targetNode, query, flatOptions) => { + // This never changes ever we just pass it around. But we can't scope it to + // this whole file if we ever want to support concurrent calls to this + // function. + const inventory = [...targetNode.root.inventory.values()] + // res is a Set of items returned for each parsed css ast selector + const res = await retrieveNodesFromParsedAst({ + initialItems: inventory, + inventory, + flatOptions, + rootAstNode: parser(query), + targetNode, + }) + + // returns nodes ordered by realpath + return [...res].sort((a, b) => localeCompare(a.location, b.location)) +} + +module.exports = querySelectorAll diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/realpath.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/realpath.js new file mode 100644 index 0000000000000000000000000000000000000000..6e5ad9b77ba5a5c5c9af627618e7a38605d62e5d --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/realpath.js @@ -0,0 +1,95 @@ +// look up the realpath, but cache stats to minimize overhead +// If the parent folder is in the realpath cache, then we just +// lstat the child, since there's no need to do a full realpath +// This is not a separate module, and is much simpler than Node's +// built-in fs.realpath, because we only care about symbolic links, +// so we can handle many fewer edge cases. + +const { lstat, readlink } = require('node:fs/promises') +const { resolve, basename, dirname } = require('node:path') + +const realpathCached = (path, rpcache, stcache, depth) => { + // just a safety against extremely deep eloops + /* istanbul ignore next */ + if (depth > 2000) { + throw eloop(path) + } + + path = resolve(path) + if (rpcache.has(path)) { + return Promise.resolve(rpcache.get(path)) + } + + const dir = dirname(path) + const base = basename(path) + + if (base && rpcache.has(dir)) { + return realpathChild(dir, base, rpcache, stcache, depth) + } + + // if it's the root, then we know it's real + if (!base) { + rpcache.set(dir, dir) + return Promise.resolve(dir) + } + + // the parent, what is that? + // find out, and then come back. + return realpathCached(dir, rpcache, stcache, depth + 1).then(() => + realpathCached(path, rpcache, stcache, depth + 1)) +} + +const lstatCached = (path, stcache) => { + if (stcache.has(path)) { + return Promise.resolve(stcache.get(path)) + } + + const p = lstat(path).then(st => { + stcache.set(path, st) + return st + }) + stcache.set(path, p) + return p +} + +// This is a slight fib, as it doesn't actually occur during a stat syscall. +// But file systems are giant piles of lies, so whatever. +const eloop = path => + Object.assign(new Error( + `ELOOP: too many symbolic links encountered, stat '${path}'`), { + errno: -62, + syscall: 'stat', + code: 'ELOOP', + path: path, + }) + +const realpathChild = (dir, base, rpcache, stcache, depth) => { + const realdir = rpcache.get(dir) + // that unpossible + /* istanbul ignore next */ + if (typeof realdir === 'undefined') { + throw new Error('in realpathChild without parent being in realpath cache') + } + + const realish = resolve(realdir, base) + return lstatCached(realish, stcache).then(st => { + if (!st.isSymbolicLink()) { + rpcache.set(resolve(dir, base), realish) + return realish + } + + return readlink(realish).then(target => { + const resolved = resolve(realdir, target) + if (realish === resolved) { + throw eloop(realish) + } + + return realpathCached(resolved, rpcache, stcache, depth + 1) + }).then(real => { + rpcache.set(resolve(dir, base), real) + return real + }) + }) +} + +module.exports = realpathCached diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/relpath.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/relpath.js new file mode 100644 index 0000000000000000000000000000000000000000..a4187b5f6095fb3ef9d0ff80278d7d26da2f7d9d --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/relpath.js @@ -0,0 +1,3 @@ +const { relative } = require('node:path') +const relpath = (from, to) => relative(from, to).replace(/\\/g, '/') +module.exports = relpath diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/reset-dep-flags.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/reset-dep-flags.js new file mode 100644 index 0000000000000000000000000000000000000000..e259e901a56254a87ef6c0574f9ee4878de93986 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/reset-dep-flags.js @@ -0,0 +1,15 @@ +// Sometimes we need to actually do a walk from the root, because you can +// have a cycle of deps that all depend on each other, but no path from root. +// Also, since the ideal tree is loaded from the shrinkwrap, it had extraneous +// flags set false that might now be actually extraneous, and dev/optional +// flags that are also now incorrect. This method sets all flags to true, so +// we can find the set that is actually extraneous. +module.exports = tree => { + for (const node of tree.inventory.values()) { + node.extraneous = true + node.dev = true + node.devOptional = true + node.peer = true + node.optional = true + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/retire-path.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/retire-path.js new file mode 100644 index 0000000000000000000000000000000000000000..5bff79a15a1654820d9dbf67b74984c4a11eb5cf --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/retire-path.js @@ -0,0 +1,19 @@ +const crypto = require('node:crypto') +const { dirname, basename, resolve } = require('node:path') + +// use sha1 because it's faster, and collisions extremely unlikely anyway +const pathSafeHash = s => + crypto.createHash('sha1') + .update(s) + .digest('base64') + .replace(/[^a-zA-Z0-9]+/g, '') + .slice(0, 8) + +const retirePath = from => { + const d = dirname(from) + const b = basename(from) + const hash = pathSafeHash(from) + return resolve(d, `.${b}-${hash}`) +} + +module.exports = retirePath diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/shrinkwrap.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/shrinkwrap.js new file mode 100644 index 0000000000000000000000000000000000000000..8313e05d61c376b3f9ed7e76e9109aa3da3990aa --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/shrinkwrap.js @@ -0,0 +1,1181 @@ +// a module that manages a shrinkwrap file (npm-shrinkwrap.json or +// package-lock.json). + +// Increment whenever the lockfile version updates +// v1 - npm <=6 +// v2 - arborist v1, npm v7, backwards compatible with v1, add 'packages' +// v3 will drop the 'dependencies' field, backwards comp with v2, not v1 +// +// We cannot bump to v3 until npm v6 is out of common usage, and +// definitely not before npm v8. + +const localeCompare = require('@isaacs/string-locale-compare')('en') +const defaultLockfileVersion = 3 + +// for comparing nodes to yarn.lock entries +const mismatch = (a, b) => a && b && a !== b + +// this.tree => the root node for the tree (ie, same path as this) +// - Set the first time we do `this.add(node)` for a path matching this.path +// +// this.add(node) => +// - decorate the node with the metadata we have, if we have it, and it matches +// - add to the map of nodes needing to be committed, so that subsequent +// changes are captured when we commit that location's metadata. +// +// this.commit() => +// - commit all nodes awaiting update to their metadata entries +// - re-generate this.data and this.yarnLock based on this.tree +// +// Note that between this.add() and this.commit(), `this.data` will be out of +// date! Always call `commit()` before relying on it. +// +// After calling this.commit(), any nodes not present in the tree will have +// been removed from the shrinkwrap data as well. + +const { log } = require('proc-log') +const YarnLock = require('./yarn-lock.js') +const { + readFile, + readdir, + readlink, + rm, + stat, + writeFile, +} = require('node:fs/promises') + +const { resolve, basename, relative } = require('node:path') +const specFromLock = require('./spec-from-lock.js') +const versionFromTgz = require('./version-from-tgz.js') +const npa = require('npm-package-arg') +const pkgJson = require('@npmcli/package-json') +const parseJSON = require('parse-conflict-json') +const nameFromFolder = require('@npmcli/name-from-folder') + +const stringify = require('json-stringify-nice') +const swKeyOrder = [ + 'name', + 'version', + 'lockfileVersion', + 'resolved', + 'integrity', + 'requires', + 'packages', + 'dependencies', +] + +// used to rewrite from yarn registry to npm registry +const yarnRegRe = /^https?:\/\/registry\.yarnpkg\.com\// +const npmRegRe = /^https?:\/\/registry\.npmjs\.org\// + +// sometimes resolved: is weird or broken, or something npa can't handle +const specFromResolved = resolved => { + try { + return npa(resolved) + } catch (er) { + return {} + } +} + +const relpath = require('./relpath.js') + +const consistentResolve = require('./consistent-resolve.js') +const { overrideResolves } = require('./override-resolves.js') + +const pkgMetaKeys = [ + // note: name is included if necessary, for alias packages + 'version', + 'dependencies', + 'peerDependencies', + 'peerDependenciesMeta', + 'optionalDependencies', + 'bundleDependencies', + 'acceptDependencies', + 'funding', + 'engines', + 'os', + 'cpu', + '_integrity', + 'license', + '_hasShrinkwrap', + 'hasInstallScript', + 'bin', + 'deprecated', + 'workspaces', +] + +const nodeMetaKeys = [ + 'integrity', + 'inBundle', + 'hasShrinkwrap', + 'hasInstallScript', +] + +const metaFieldFromPkg = (pkg, key) => { + const val = pkg[key] + if (val) { + // get only the license type, not the full object + if (key === 'license' && typeof val === 'object' && val.type) { + return val.type + } + // skip empty objects and falsey values + if (typeof val !== 'object' || Object.keys(val).length) { + return val + } + } + return null +} + +// check to make sure that there are no packages newer than or missing from the hidden lockfile +const assertNoNewer = async (path, data, lockTime, dir, seen) => { + const base = basename(dir) + const isNM = dir !== path && base === 'node_modules' + const isScope = dir !== path && base.startsWith('@') + const isParent = (dir === path) || isNM || isScope + + const parent = isParent ? dir : resolve(dir, 'node_modules') + const rel = relpath(path, dir) + seen.add(rel) + let entries + if (dir === path) { + entries = [{ name: 'node_modules', isDirectory: () => true }] + } else { + const { mtime: dirTime } = await stat(dir) + if (dirTime > lockTime) { + throw new Error(`out of date, updated: ${rel}`) + } + if (!isScope && !isNM && !data.packages[rel]) { + throw new Error(`missing from lockfile: ${rel}`) + } + entries = await readdir(parent, { withFileTypes: true }).catch(() => []) + } + + // TODO limit concurrency here, this is recursive + await Promise.all(entries.map(async dirent => { + const child = resolve(parent, dirent.name) + if (dirent.isDirectory() && !dirent.name.startsWith('.')) { + await assertNoNewer(path, data, lockTime, child, seen) + } else if (dirent.isSymbolicLink()) { + const target = resolve(parent, await readlink(child)) + const tstat = await stat(target).catch( + /* istanbul ignore next - windows */ () => null) + seen.add(relpath(path, child)) + /* istanbul ignore next - windows cannot do this */ + if (tstat?.isDirectory() && !seen.has(relpath(path, target))) { + await assertNoNewer(path, data, lockTime, target, seen) + } + } + })) + + if (dir !== path) { + return + } + + // assert that all the entries in the lockfile were seen + for (const loc in data.packages) { + if (!seen.has(loc)) { + throw new Error(`missing from node_modules: ${loc}`) + } + } +} + +class Shrinkwrap { + static get defaultLockfileVersion () { + return defaultLockfileVersion + } + + static load (options) { + return new Shrinkwrap(options).load() + } + + static get keyOrder () { + return swKeyOrder + } + + static async reset (options) { + // still need to know if it was loaded from the disk, but don't + // bother reading it if we're gonna just throw it away. + const s = new Shrinkwrap(options) + s.reset() + + const [sw, lock] = await s.resetFiles + + // XXX this is duplicated in this.load(), but using loadFiles instead of resetFiles + if (s.hiddenLockfile) { + s.filename = resolve(s.path, 'node_modules/.package-lock.json') + } else if (s.shrinkwrapOnly || sw) { + s.filename = resolve(s.path, 'npm-shrinkwrap.json') + } else { + s.filename = resolve(s.path, 'package-lock.json') + } + s.loadedFromDisk = !!(sw || lock) + // TODO what uses this? + s.type = basename(s.filename) + + return s + } + + static metaFromNode (node, path, options = {}) { + if (node.isLink) { + return { + resolved: relpath(path, node.realpath), + link: true, + } + } + + const meta = {} + for (const key of pkgMetaKeys) { + const val = metaFieldFromPkg(node.package, key) + if (val) { + meta[key.replace(/^_/, '')] = val + } + } + // we only include name if different from the node path name, and for the + // root to help prevent churn based on the name of the directory the + // project is in + const pname = node.packageName + // when Target package name and Target node share the same name, we include the name, target node should have name as per realpath. + if (pname && (node === node.root || pname !== node.name || nameFromFolder(node.realpath) !== pname)) { + meta.name = pname + } + + if (node.isTop && node.package.devDependencies) { + meta.devDependencies = node.package.devDependencies + } + + for (const key of nodeMetaKeys) { + if (node[key]) { + meta[key] = node[key] + } + } + + const resolved = consistentResolve(node.resolved, node.path, path, true) + // hide resolved from registry dependencies. + if (!resolved) { + // no-op + } else if (node.isRegistryDependency) { + meta.resolved = overrideResolves(resolved, options) + } else { + meta.resolved = resolved + } + + if (node.extraneous) { + meta.extraneous = true + } else { + if (node.peer) { + meta.peer = true + } + if (node.dev) { + meta.dev = true + } + if (node.optional) { + meta.optional = true + } + if (node.devOptional && !node.dev && !node.optional) { + meta.devOptional = true + } + } + return meta + } + + #awaitingUpdate = new Map() + + constructor (options = {}) { + const { + path, + indent = 2, + newline = '\n', + shrinkwrapOnly = false, + hiddenLockfile = false, + lockfileVersion, + resolveOptions = {}, + } = options + + if (hiddenLockfile) { + this.lockfileVersion = 3 + } else if (lockfileVersion) { + this.lockfileVersion = parseInt(lockfileVersion, 10) + } else { + this.lockfileVersion = null + } + + this.tree = null + this.path = resolve(path || '.') + this.filename = null + this.data = null + this.indent = indent + this.newline = newline + this.loadedFromDisk = false + this.type = null + this.yarnLock = null + this.hiddenLockfile = hiddenLockfile + this.loadingError = null + this.resolveOptions = resolveOptions + // only load npm-shrinkwrap.json in dep trees, not package-lock + this.shrinkwrapOnly = shrinkwrapOnly + } + + // check to see if a spec is present in the yarn.lock file, and if so, + // if we should use it, and what it should resolve to. This is only + // done when we did not load a shrinkwrap from disk. Also, decorate + // the options object if provided with the resolved and integrity that + // we expect. + checkYarnLock (spec, options = {}) { + spec = npa(spec) + const { yarnLock, loadedFromDisk } = this + const useYarnLock = yarnLock && !loadedFromDisk + const fromYarn = useYarnLock && yarnLock.entries.get(spec.raw) + if (fromYarn && fromYarn.version) { + // if it's the yarn or npm default registry, use the version as + // our effective spec. if it's any other kind of thing, use that. + const { resolved, version, integrity } = fromYarn + const isYarnReg = spec.registry && yarnRegRe.test(resolved) + const isnpmReg = spec.registry && !isYarnReg && npmRegRe.test(resolved) + const isReg = isnpmReg || isYarnReg + // don't use the simple version if the "registry" url is + // something else entirely! + const tgz = isReg && versionFromTgz(spec.name, resolved) || {} + let yspec = resolved + if (tgz.name === spec.name && tgz.version === version) { + yspec = version + } else if (isReg && tgz.name && tgz.version) { + yspec = `npm:${tgz.name}@${tgz.version}` + } + if (yspec) { + options.resolved = resolved.replace(yarnRegRe, 'https://registry.npmjs.org/') + options.integrity = integrity + return npa(`${spec.name}@${yspec}`) + } + } + return spec + } + + // throw away the shrinkwrap data so we can start fresh + // still worth doing a load() first so we know which files to write. + reset () { + this.tree = null + this.#awaitingUpdate = new Map() + const lockfileVersion = this.lockfileVersion || defaultLockfileVersion + this.originalLockfileVersion = lockfileVersion + + this.data = { + lockfileVersion, + requires: true, + packages: {}, + dependencies: {}, + } + } + + // files to potentially read from and write to, in order of priority + get #filenameSet () { + if (this.shrinkwrapOnly) { + return [`${this.path}/npm-shrinkwrap.json`] + } + if (this.hiddenLockfile) { + return [`${this.path}/node_modules/.package-lock.json`] + } + return [ + `${this.path}/npm-shrinkwrap.json`, + `${this.path}/package-lock.json`, + `${this.path}/yarn.lock`, + ] + } + + get loadFiles () { + return Promise.all( + this.#filenameSet.map(file => file && readFile(file, 'utf8').then(d => d, er => { + /* istanbul ignore else - can't test without breaking module itself */ + if (er.code === 'ENOENT') { + return '' + } else { + throw er + } + })) + ) + } + + get resetFiles () { + // slice out yarn, we only care about lock or shrinkwrap when checking + // this way, since we're not actually loading the full lock metadata + return Promise.all(this.#filenameSet.slice(0, 2) + .map(file => file && stat(file).then(st => st.isFile(), er => { + /* istanbul ignore else - can't test without breaking module itself */ + if (er.code === 'ENOENT') { + return null + } else { + throw er + } + }) + ) + ) + } + + inferFormattingOptions (packageJSONData) { + const { + [Symbol.for('indent')]: indent, + [Symbol.for('newline')]: newline, + } = packageJSONData + if (indent !== undefined) { + this.indent = indent + } + if (newline !== undefined) { + this.newline = newline + } + } + + async load () { + // we don't need to load package-lock.json except for top of tree nodes, + // only npm-shrinkwrap.json. + let data + try { + const [sw, lock, yarn] = await this.loadFiles + data = sw || lock || '{}' + + // use shrinkwrap only for deps; otherwise, prefer package-lock + // and ignore npm-shrinkwrap if both are present. + // TODO: emit a warning here or something if both are present. + if (this.hiddenLockfile) { + this.filename = resolve(this.path, 'node_modules/.package-lock.json') + } else if (this.shrinkwrapOnly || sw) { + this.filename = resolve(this.path, 'npm-shrinkwrap.json') + } else { + this.filename = resolve(this.path, 'package-lock.json') + } + this.type = basename(this.filename) + this.loadedFromDisk = Boolean(sw || lock) + + if (yarn) { + this.yarnLock = new YarnLock() + // ignore invalid yarn data. we'll likely clobber it later anyway. + try { + this.yarnLock.parse(yarn) + } catch { + // ignore errors + } + } + + data = parseJSON(data) + this.inferFormattingOptions(data) + + if (this.hiddenLockfile && data.packages) { + // add a few ms just to account for jitter + const lockTime = +(await stat(this.filename)).mtime + 10 + await assertNoNewer(this.path, data, lockTime, this.path, new Set()) + } + + // all good! hidden lockfile is the newest thing in here. + } catch (er) { + /* istanbul ignore else */ + if (typeof this.filename === 'string') { + const rel = relpath(this.path, this.filename) + log.verbose('shrinkwrap', `failed to load ${rel}`, er.message) + } else { + log.verbose('shrinkwrap', `failed to load ${this.path}`, er.message) + } + this.loadingError = er + this.loadedFromDisk = false + this.ancientLockfile = false + data = {} + } + // auto convert v1 lockfiles to v3 + // leave v2 in place unless configured + // v3 by default + let lockfileVersion = defaultLockfileVersion + if (this.lockfileVersion) { + lockfileVersion = this.lockfileVersion + } else if (data.lockfileVersion && data.lockfileVersion !== 1) { + lockfileVersion = data.lockfileVersion + } + + this.data = { + ...data, + lockfileVersion, + requires: true, + packages: data.packages || {}, + dependencies: data.dependencies || {}, + } + + this.originalLockfileVersion = data.lockfileVersion + + // use default if it wasn't explicitly set, and the current file is + // less than our default. otherwise, keep whatever is in the file, + // unless we had an explicit setting already. + if (!this.lockfileVersion) { + this.lockfileVersion = this.data.lockfileVersion = lockfileVersion + } + this.ancientLockfile = this.loadedFromDisk && + !(data.lockfileVersion >= 2) && !data.requires + + // load old lockfile deps into the packages listing + if (data.dependencies && !data.packages) { + let pkg + try { + pkg = await pkgJson.normalize(this.path) + pkg = pkg.content + } catch { + pkg = {} + } + this.#loadAll('', null, this.data) + this.#fixDependencies(pkg) + } + return this + } + + #loadAll (location, name, lock) { + // migrate a v1 package lock to the new format. + const meta = this.#metaFromLock(location, name, lock) + // dependencies nested under a link are actually under the link target + if (meta.link) { + location = meta.resolved + } + if (lock.dependencies) { + for (const name in lock.dependencies) { + const loc = location + (location ? '/' : '') + 'node_modules/' + name + this.#loadAll(loc, name, lock.dependencies[name]) + } + } + } + + // v1 lockfiles track the optional/dev flags, but they don't tell us + // which thing had what kind of dep on what other thing, so we need + // to correct that now, or every link will be considered prod + #fixDependencies (pkg) { + // we need the root package.json because legacy shrinkwraps just + // have requires:true at the root level, which is even less useful + // than merging all dep types into one object. + const root = this.data.packages[''] + for (const key of pkgMetaKeys) { + const val = metaFieldFromPkg(pkg, key) + if (val) { + root[key.replace(/^_/, '')] = val + } + } + + for (const loc in this.data.packages) { + const meta = this.data.packages[loc] + if (!meta.requires || !loc) { + continue + } + + // resolve each require to a meta entry + // if this node isn't optional, but the dep is, then it's an optionalDep + // likewise for dev deps. + // This isn't perfect, but it's a pretty good approximation, and at + // least gets us out of having all 'prod' edges, which throws off the + // buildIdealTree process + for (const name in meta.requires) { + const dep = this.#resolveMetaNode(loc, name) + // this overwrites the false value set above + // default to dependencies if the dep just isn't in the tree, which + // maybe should be an error, since it means that the shrinkwrap is + // invalid, but we can't do much better without any info. + let depType = 'dependencies' + /* istanbul ignore else - dev deps are only for the root level */ + if (dep?.optional && !meta.optional) { + depType = 'optionalDependencies' + } else if (dep?.dev && !meta.dev) { + // XXX is this even reachable? + depType = 'devDependencies' + } + if (!meta[depType]) { + meta[depType] = {} + } + meta[depType][name] = meta.requires[name] + } + delete meta.requires + } + } + + #resolveMetaNode (loc, name) { + for (let path = loc; true; path = path.replace(/(^|\/)[^/]*$/, '')) { + const check = `${path}${path ? '/' : ''}node_modules/${name}` + if (this.data.packages[check]) { + return this.data.packages[check] + } + + if (!path) { + break + } + } + return null + } + + #lockFromLoc (lock, path, i = 0) { + if (!lock) { + return null + } + + if (path[i] === '') { + i++ + } + + if (i >= path.length) { + return lock + } + + if (!lock.dependencies) { + return null + } + + return this.#lockFromLoc(lock.dependencies[path[i]], path, i + 1) + } + + // pass in a path relative to the root path, or an absolute path, + // get back a /-normalized location based on root path. + #pathToLoc (path) { + return relpath(this.path, resolve(this.path, path)) + } + + delete (nodePath) { + if (!this.data) { + throw new Error('run load() before getting or setting data') + } + const location = this.#pathToLoc(nodePath) + this.#awaitingUpdate.delete(location) + + delete this.data.packages[location] + const path = location.split(/(?:^|\/)node_modules\//) + const name = path.pop() + const pLock = this.#lockFromLoc(this.data, path) + if (pLock && pLock.dependencies) { + delete pLock.dependencies[name] + } + } + + get (nodePath) { + if (!this.data) { + throw new Error('run load() before getting or setting data') + } + + const location = this.#pathToLoc(nodePath) + if (this.#awaitingUpdate.has(location)) { + this.#updateWaitingNode(location) + } + + // first try to get from the newer spot, which we know has + // all the things we need. + if (this.data.packages[location]) { + return this.data.packages[location] + } + + // otherwise, fall back to the legacy metadata, and hope for the best + // get the node in the shrinkwrap corresponding to this spot + const path = location.split(/(?:^|\/)node_modules\//) + const name = path[path.length - 1] + const lock = this.#lockFromLoc(this.data, path) + + return this.#metaFromLock(location, name, lock) + } + + #metaFromLock (location, name, lock) { + // This function tries as hard as it can to figure out the metadata + // from a lockfile which may be outdated or incomplete. Since v1 + // lockfiles used the "version" field to contain a variety of + // different possible types of data, this gets a little complicated. + if (!lock) { + return {} + } + + // try to figure out a npm-package-arg spec from the lockfile entry + // This will return null if we could not get anything valid out of it. + const spec = specFromLock(name, lock, this.path) + + if (spec.type === 'directory') { + // the "version" was a file: url to a non-tarball path + // this is a symlink dep. We don't store much metadata + // about symlinks, just the target. + const target = relpath(this.path, spec.fetchSpec) + this.data.packages[location] = { + link: true, + resolved: target, + } + // also save the link target, omitting version since we don't know + // what it is, but we know it isn't a link to itself! + if (!this.data.packages[target]) { + this.#metaFromLock(target, name, { ...lock, version: null }) + } + return this.data.packages[location] + } + + const meta = {} + // when calling loadAll we'll change these into proper dep objects + if (lock.requires && typeof lock.requires === 'object') { + meta.requires = lock.requires + } + + if (lock.optional) { + meta.optional = true + } + if (lock.dev) { + meta.dev = true + } + + // the root will typically have a name from the root project's + // package.json file. + if (location === '') { + meta.name = lock.name + } + + // if we have integrity, save it now. + if (lock.integrity) { + meta.integrity = lock.integrity + } + + if (lock.version && !lock.integrity) { + // this is usually going to be a git url or symlink, but it could + // also be a registry dependency that did not have integrity at + // the time it was saved. + // Symlinks were already handled above, so that leaves git. + // + // For git, always save the full SSH url. we'll actually fetch the + // tgz most of the time, since it's faster, but it won't work for + // private repos, and we can't get back to the ssh from the tgz, + // so we store the ssh instead. + // For unknown git hosts, just resolve to the raw spec in lock.version + if (spec.type === 'git') { + meta.resolved = consistentResolve(spec, this.path, this.path) + + // return early because there is nothing else we can do with this + return this.data.packages[location] = meta + } else if (spec.registry) { + // registry dep that didn't save integrity. grab the version, and + // fall through to pick up the resolved and potentially name. + meta.version = lock.version + } + // only other possible case is a tarball without integrity. + // fall through to do what we can with the filename later. + } + + // at this point, we know that the spec is either a registry dep + // (ie, version, because locking, which means a resolved url), + // or a remote dep, or file: url. Remote deps and file urls + // have a fetchSpec equal to the fully resolved thing. + // Registry deps, we take what's in the lockfile. + if (lock.resolved || (spec.type && !spec.registry)) { + if (spec.registry) { + meta.resolved = lock.resolved + } else if (spec.type === 'file') { + meta.resolved = consistentResolve(spec, this.path, this.path, true) + } else if (spec.fetchSpec) { + meta.resolved = spec.fetchSpec + } + } + + // at this point, if still we don't have a version, do our best to + // infer it from the tarball url/file. This works a surprising + // amount of the time, even though it's not guaranteed. + if (!meta.version) { + if (spec.type === 'file' || spec.type === 'remote') { + const fromTgz = versionFromTgz(spec.name, spec.fetchSpec) || + versionFromTgz(spec.name, meta.resolved) + if (fromTgz) { + meta.version = fromTgz.version + if (fromTgz.name !== name) { + meta.name = fromTgz.name + } + } + } else if (spec.type === 'alias') { + meta.name = spec.subSpec.name + meta.version = spec.subSpec.fetchSpec + } else if (spec.type === 'version') { + meta.version = spec.fetchSpec + } + // ok, I did my best! good luck! + } + + if (lock.bundled) { + meta.inBundle = true + } + + // save it for next time + return this.data.packages[location] = meta + } + + add (node) { + if (!this.data) { + throw new Error('run load() before getting or setting data') + } + + // will be actually updated on read + const loc = relpath(this.path, node.path) + if (node.path === this.path) { + this.tree = node + } + + // if we have metadata about this node, and it's a match, then + // try to decorate it. + if (node.resolved === null || node.integrity === null) { + const { + resolved, + integrity, + hasShrinkwrap, + version, + } = this.get(node.path) + + let pathFixed = null + if (resolved) { + if (!/^file:/.test(resolved)) { + pathFixed = resolved + } else { + pathFixed = `file:${resolve(this.path, resolved.slice(5))}` + } + } + + // if we have one, only set the other if it matches + // otherwise it could be for a completely different thing. + const resolvedOk = !resolved || !node.resolved || + node.resolved === pathFixed + const integrityOk = !integrity || !node.integrity || + node.integrity === integrity + const versionOk = !version || !node.version || version === node.version + + const allOk = (resolved || integrity || version) && + resolvedOk && integrityOk && versionOk + + if (allOk) { + node.resolved = node.resolved || pathFixed || null + node.integrity = node.integrity || integrity || null + node.hasShrinkwrap = node.hasShrinkwrap || hasShrinkwrap || false + } else { + // try to read off the package or node itself + const { + resolved, + integrity, + hasShrinkwrap, + } = Shrinkwrap.metaFromNode(node, this.path, this.resolveOptions) + node.resolved = node.resolved || resolved || null + node.integrity = node.integrity || integrity || null + node.hasShrinkwrap = node.hasShrinkwrap || hasShrinkwrap || false + } + } + this.#awaitingUpdate.set(loc, node) + } + + addEdge (edge) { + if (!this.yarnLock || !edge.valid) { + return + } + + const { to: node } = edge + + // if it's already set up, nothing to do + if (node.resolved !== null && node.integrity !== null) { + return + } + + // if the yarn lock is empty, nothing to do + if (!this.yarnLock.entries || !this.yarnLock.entries.size) { + return + } + + // we relativize the path here because that's how it shows up in the lock + // XXX why is this different from pathFixed in this.add?? + let pathFixed = null + if (node.resolved) { + if (!/file:/.test(node.resolved)) { + pathFixed = node.resolved + } else { + pathFixed = consistentResolve(node.resolved, node.path, this.path, true) + } + } + + const spec = npa(`${node.name}@${edge.spec}`) + const entry = this.yarnLock.entries.get(`${node.name}@${edge.spec}`) + + if (!entry || + mismatch(node.version, entry.version) || + mismatch(node.integrity, entry.integrity) || + mismatch(pathFixed, entry.resolved)) { + return + } + + if (entry.resolved && yarnRegRe.test(entry.resolved) && spec.registry) { + entry.resolved = entry.resolved.replace(yarnRegRe, 'https://registry.npmjs.org/') + } + + node.integrity = node.integrity || entry.integrity || null + node.resolved = node.resolved || + consistentResolve(entry.resolved, this.path, node.path) || null + + this.#awaitingUpdate.set(relpath(this.path, node.path), node) + } + + #updateWaitingNode (loc) { + const node = this.#awaitingUpdate.get(loc) + this.#awaitingUpdate.delete(loc) + this.data.packages[loc] = Shrinkwrap.metaFromNode( + node, + this.path, + this.resolveOptions) + } + + commit () { + if (this.tree) { + if (this.yarnLock) { + this.yarnLock.fromTree(this.tree) + } + const root = Shrinkwrap.metaFromNode( + this.tree.target, + this.path, + this.resolveOptions) + this.data.packages = {} + if (Object.keys(root).length) { + this.data.packages[''] = root + } + for (const node of this.tree.root.inventory.values()) { + // only way this.tree is not root is if the root is a link to it + if (node === this.tree || node.isRoot || node.location === '') { + continue + } + const loc = relpath(this.path, node.path) + this.data.packages[loc] = Shrinkwrap.metaFromNode( + node, + this.path, + this.resolveOptions) + } + } else if (this.#awaitingUpdate.size > 0) { + for (const loc of this.#awaitingUpdate.keys()) { + this.#updateWaitingNode(loc) + } + } + + // if we haven't set it by now, use the default + if (!this.lockfileVersion) { + this.lockfileVersion = defaultLockfileVersion + } + this.data.lockfileVersion = this.lockfileVersion + + // hidden lockfiles don't include legacy metadata or a root entry + if (this.hiddenLockfile) { + delete this.data.packages[''] + delete this.data.dependencies + } else if (this.tree && this.lockfileVersion <= 3) { + this.#buildLegacyLockfile(this.tree, this.data) + } + + // lf version 1 = dependencies only + // lf version 2 = dependencies and packages + // lf version 3 = packages only + if (this.lockfileVersion >= 3) { + const { dependencies, ...data } = this.data + return data + } else if (this.lockfileVersion < 2) { + const { packages, ...data } = this.data + return data + } else { + return { ...this.data } + } + } + + #buildLegacyLockfile (node, lock, path = []) { + if (node === this.tree) { + // the root node + lock.name = node.packageName || node.name + if (node.version) { + lock.version = node.version + } + } + + // npm v6 and before tracked 'from', meaning "the request that led + // to this package being installed". However, that's inherently + // racy and non-deterministic in a world where deps are deduped + // ahead of fetch time. In order to maintain backwards compatibility + // with v6 in the lockfile, we do this trick where we pick a valid + // dep link out of the edgesIn set. Choose the edge with the fewest + // number of `node_modules` sections in the requestor path, and then + // lexically sort afterwards. + const edge = [...node.edgesIn].filter(e => e.valid).sort((a, b) => { + const aloc = a.from.location.split('node_modules') + const bloc = b.from.location.split('node_modules') + /* istanbul ignore next - sort calling order is indeterminate */ + if (aloc.length > bloc.length) { + return 1 + } + if (bloc.length > aloc.length) { + return -1 + } + return localeCompare(aloc[aloc.length - 1], bloc[bloc.length - 1]) + })[0] + + const res = consistentResolve(node.resolved, this.path, this.path, true) + const rSpec = specFromResolved(res) + + // if we don't have anything (ie, it's extraneous) then use the resolved + // value as if that was where we got it from, since at least it's true. + // if we don't have either, just an empty object so nothing matches below. + // This will effectively just save the version and resolved, as if it's + // a standard version/range dep, which is a reasonable default. + let spec = rSpec + if (edge) { + spec = npa.resolve(node.name, edge.spec, edge.from.realpath) + } + + if (node.isLink) { + lock.version = `file:${relpath(this.path, node.realpath)}` + } else if (spec && (spec.type === 'file' || spec.type === 'remote')) { + lock.version = spec.saveSpec + } else if (spec && spec.type === 'git' || rSpec.type === 'git') { + lock.version = node.resolved + /* istanbul ignore else - don't think there are any cases where a git + * spec (or indeed, ANY npa spec) doesn't have a .raw member */ + if (spec.raw) { + lock.from = spec.raw + } + } else if (!node.isRoot && + node.package && + node.packageName && + node.packageName !== node.name) { + lock.version = `npm:${node.packageName}@${node.version}` + } else if (node.package && node.version) { + lock.version = node.version + } + + if (node.inDepBundle) { + lock.bundled = true + } + + // when we didn't resolve to git, file, or dir, and didn't request + // git, file, dir, or remote, then the resolved value is necessary. + if (node.resolved && + !node.isLink && + rSpec.type !== 'git' && + rSpec.type !== 'file' && + rSpec.type !== 'directory' && + spec.type !== 'directory' && + spec.type !== 'git' && + spec.type !== 'file' && + spec.type !== 'remote') { + lock.resolved = overrideResolves(node.resolved, this.resolveOptions) + } + + if (node.integrity) { + lock.integrity = node.integrity + } + + if (node.extraneous) { + lock.extraneous = true + } else if (!node.isLink) { + if (node.peer) { + lock.peer = true + } + + if (node.devOptional && !node.dev && !node.optional) { + lock.devOptional = true + } + + if (node.dev) { + lock.dev = true + } + + if (node.optional) { + lock.optional = true + } + } + + const depender = node.target + if (depender.edgesOut.size > 0) { + if (node !== this.tree) { + const entries = [...depender.edgesOut.entries()] + lock.requires = entries.reduce((set, [k, v]) => { + // omit peer deps from legacy lockfile requires field, because + // npm v6 doesn't handle peer deps, and this triggers some bad + // behavior if the dep can't be found in the dependencies list. + const { spec, peer } = v + if (peer) { + return set + } + if (spec.startsWith('file:')) { + // turn absolute file: paths into relative paths from the node + // this especially shows up with workspace edges when the root + // node is also a workspace in the set. + const p = resolve(node.realpath, spec.slice('file:'.length)) + set[k] = `file:${relpath(node.realpath, p)}` + } else { + set[k] = spec + } + return set + }, {}) + } else { + lock.requires = true + } + } + + // now we walk the children, putting them in the 'dependencies' object + const { children } = node.target + if (!children.size) { + delete lock.dependencies + } else { + const kidPath = [...path, node.realpath] + const dependencies = {} + // skip any that are already in the descent path, so cyclical link + // dependencies don't blow up with ELOOP. + let found = false + for (const [name, kid] of children.entries()) { + if (path.includes(kid.realpath)) { + continue + } + dependencies[name] = this.#buildLegacyLockfile(kid, {}, kidPath) + found = true + } + if (found) { + lock.dependencies = dependencies + } + } + return lock + } + + toJSON () { + if (!this.data) { + throw new Error('run load() before getting or setting data') + } + + return this.commit() + } + + toString (options = {}) { + const data = this.toJSON() + const { format = true } = options + const defaultIndent = this.indent || 2 + const indent = format === true ? defaultIndent + : format || 0 + const eol = format ? this.newline || '\n' : '' + return stringify(data, swKeyOrder, indent).replace(/\n/g, eol) + } + + save (options = {}) { + if (!this.data) { + throw new Error('run load() before saving data') + } + + // This must be called before the lockfile conversion check below since it sets properties as part of `commit()` + const json = this.toString(options) + if ( + !this.hiddenLockfile + && this.originalLockfileVersion !== undefined + && this.originalLockfileVersion !== this.lockfileVersion + ) { + log.warn( + 'shrinkwrap', + `Converting lock file (${relative(process.cwd(), this.filename)}) from v${this.originalLockfileVersion} -> v${this.lockfileVersion}` + ) + } + + return Promise.all([ + writeFile(this.filename, json).catch(er => { + if (this.hiddenLockfile) { + // well, we did our best. + // if we reify, and there's nothing there, then it might be lacking + // a node_modules folder, but then the lockfile is not important. + // Remove the file, so that in case there WERE deps, but we just + // failed to update the file for some reason, it's not out of sync. + return rm(this.filename, { recursive: true, force: true }) + } + throw er + }), + this.yarnLock && this.yarnLock.entries.size && + writeFile(this.path + '/yarn.lock', this.yarnLock.toString()), + ]) + } +} + +module.exports = Shrinkwrap diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/signal-handling.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/signal-handling.js new file mode 100644 index 0000000000000000000000000000000000000000..18841d944ffe7829c92ef625b2d86f3f97cf1050 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/signal-handling.js @@ -0,0 +1,74 @@ +const signals = require('./signals.js') + +// for testing, expose the process being used +module.exports = Object.assign(fn => setup(fn), { process }) + +// do all of this in a setup function so that we can call it +// multiple times for multiple reifies that might be going on. +// Otherwise, Arborist.reify() is a global action, which is a +// new constraint we'd be adding with this behavior. +const setup = fn => { + const { process } = module.exports + + const sigListeners = { loaded: false } + + const unload = () => { + if (!sigListeners.loaded) { + return + } + for (const sig of signals) { + try { + process.removeListener(sig, sigListeners[sig]) + } catch { + // ignore errors + } + } + process.removeListener('beforeExit', onBeforeExit) + sigListeners.loaded = false + } + + const onBeforeExit = () => { + // this trick ensures that we exit with the same signal we caught + // Ie, if you press ^C and npm gets a SIGINT, we'll do the rollback + // and then exit with a SIGINT signal once we've removed the handler. + // The timeout is there because signals are asynchronous, so we need + // the process to NOT exit on its own, which means we have to have + // something keeping the event loop looping. Hence this hack. + unload() + process.kill(process.pid, signalReceived) + setTimeout(() => {}, 500) + } + + let signalReceived = null + const listener = (sig, fn) => () => { + signalReceived = sig + + // if we exit normally, but caught a signal which would have been fatal, + // then re-send it once we're done with whatever cleanup we have to do. + unload() + if (process.listeners(sig).length < 1) { + process.once('beforeExit', onBeforeExit) + } + + fn({ signal: sig }) + } + + // do the actual loading here + for (const sig of signals) { + sigListeners[sig] = listener(sig, fn) + const max = process.getMaxListeners() + try { + // if we call this a bunch of times, avoid triggering the warning + const { length } = process.listeners(sig) + if (length >= max) { + process.setMaxListeners(length + 1) + } + process.on(sig, sigListeners[sig]) + } catch { + // ignore errors + } + } + sigListeners.loaded = true + + return unload +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/signals.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/signals.js new file mode 100644 index 0000000000000000000000000000000000000000..8dcd585c4c06577a6e16c53b773d73a79e309fb5 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/signals.js @@ -0,0 +1,58 @@ +// copied from signal-exit + +// This is not the set of all possible signals. +// +// It IS, however, the set of all signals that trigger +// an exit on either Linux or BSD systems. Linux is a +// superset of the signal names supported on BSD, and +// the unknown signals just fail to register, so we can +// catch that easily enough. +// +// Don't bother with SIGKILL. It's uncatchable, which +// means that we can't fire any callbacks anyway. +// +// If a user does happen to register a handler on a non- +// fatal signal like SIGWINCH or something, and then +// exit, it'll end up firing `process.emit('exit')`, so +// the handler will be fired anyway. +// +// SIGBUS, SIGFPE, SIGSEGV and SIGILL, when not raised +// artificially, inherently leave the process in a +// state from which it is not safe to try and enter JS +// listeners. + +const platform = global.__ARBORIST_FAKE_PLATFORM__ || process.platform + +module.exports = [ + 'SIGABRT', + 'SIGALRM', + 'SIGHUP', + 'SIGINT', + 'SIGTERM', +] + +if (platform !== 'win32') { + module.exports.push( + 'SIGVTALRM', + 'SIGXCPU', + 'SIGXFSZ', + 'SIGUSR2', + 'SIGTRAP', + 'SIGSYS', + 'SIGQUIT', + 'SIGIOT' + // should detect profiler and enable/disable accordingly. + // see #21 + // 'SIGPROF' + ) +} + +if (platform === 'linux') { + module.exports.push( + 'SIGIO', + 'SIGPOLL', + 'SIGPWR', + 'SIGSTKFLT', + 'SIGUNUSED' + ) +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/spec-from-lock.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/spec-from-lock.js new file mode 100644 index 0000000000000000000000000000000000000000..49b53c8f6aaca51d4e1f0c7434dbd0d5f0606336 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/spec-from-lock.js @@ -0,0 +1,34 @@ +const npa = require('npm-package-arg') + +// extracted from npm v6 lib/install/realize-shrinkwrap-specifier.js +const specFromLock = (name, lock, where) => { + try { + if (lock.version) { + const spec = npa.resolve(name, lock.version, where) + if (lock.integrity || spec.type === 'git') { + return spec + } + } + if (lock.from) { + // legacy metadata includes "from", but not integrity + const spec = npa.resolve(name, lock.from, where) + if (spec.registry && lock.version) { + return npa.resolve(name, lock.version, where) + } else if (!lock.resolved) { + return spec + } + } + if (lock.resolved) { + return npa.resolve(name, lock.resolved, where) + } + } catch { + // ignore errors + } + try { + return npa.resolve(name, lock.version, where) + } catch { + return {} + } +} + +module.exports = specFromLock diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/tracker.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/tracker.js new file mode 100644 index 0000000000000000000000000000000000000000..4a754d995dfcd2320f071e63f9ef141740eb23d6 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/tracker.js @@ -0,0 +1,86 @@ +const proggy = require('proggy') + +module.exports = cls => class Tracker extends cls { + #progress = new Map() + + #createTracker (key, name) { + const tracker = new proggy.Tracker(name ?? key) + tracker.on('done', () => this.#progress.delete(key)) + this.#progress.set(key, tracker) + } + + addTracker (section, subsection = null, key = null) { + if (section === null || section === undefined) { + this.#onError(`Tracker can't be null or undefined`) + } + + if (key === null) { + key = subsection + } + + const hasTracker = this.#progress.has(section) + const hasSubtracker = this.#progress.has(`${section}:${key}`) + + if (hasTracker && subsection === null) { + // 0. existing tracker, no subsection + this.#onError(`Tracker "${section}" already exists`) + } else if (!hasTracker && subsection === null) { + // 1. no existing tracker, no subsection + // Create a new progress tracker + this.#createTracker(section) + } else if (!hasTracker && subsection !== null) { + // 2. no parent tracker and subsection + this.#onError(`Parent tracker "${section}" does not exist`) + } else if (!hasTracker || !hasSubtracker) { + // 3. existing parent tracker, no subsection tracker + // Create a new subtracker and update parents + const parentTracker = this.#progress.get(section) + parentTracker.update(parentTracker.value, parentTracker.total + 1) + this.#createTracker(`${section}:${key}`, `${section}:${subsection}`) + } + // 4. existing parent tracker, existing subsection tracker + // skip it + } + + finishTracker (section, subsection = null, key = null) { + if (section === null || section === undefined) { + this.#onError(`Tracker can't be null or undefined`) + } + + if (key === null) { + key = subsection + } + + const hasTracker = this.#progress.has(section) + const hasSubtracker = this.#progress.has(`${section}:${key}`) + + // 0. parent tracker exists, no subsection + // Finish parent tracker and remove from this.#progress + if (hasTracker && subsection === null) { + // check if parent tracker does + // not have any remaining children + const keys = this.#progress.keys() + for (const key of keys) { + if (key.match(new RegExp(section + ':'))) { + this.finishTracker(section, key) + } + } + // remove parent tracker + this.#progress.get(section).finish() + } else if (!hasTracker && subsection === null) { + // 1. no existing parent tracker, no subsection + this.#onError(`Tracker "${section}" does not exist`) + } else if (!hasTracker || hasSubtracker) { + // 2. subtracker exists + // Finish subtracker and remove from this.#progress + const parentTracker = this.#progress.get(section) + parentTracker.update(parentTracker.value + 1) + this.#progress.get(`${section}:${key}`).finish() + } + // 3. existing parent tracker, no subsection + } + + #onError (msg) { + throw new Error(msg) + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/tree-check.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/tree-check.js new file mode 100644 index 0000000000000000000000000000000000000000..62a50bc75bdb580b847783e8337678ee550ad2f5 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/tree-check.js @@ -0,0 +1,155 @@ +const debug = require('./debug.js') + +const checkTree = (tree, checkUnreachable = true) => { + const log = [['START TREE CHECK', tree.path]] + + // this can only happen in tests where we have a "tree" object + // that isn't actually a tree. + if (!tree.root || !tree.root.inventory) { + return tree + } + + const { inventory } = tree.root + const seen = new Set() + const check = (node, via = tree, viaType = 'self') => { + log.push([ + 'CHECK', + node && node.location, + via && via.location, + viaType, + 'seen=' + seen.has(node), + 'promise=' + !!(node && node.then), + 'root=' + !!(node && node.isRoot), + ]) + + if (!node || seen.has(node) || node.then) { + return + } + + seen.add(node) + + if (node.isRoot && node !== tree.root) { + throw Object.assign(new Error('double root'), { + node: node.path, + realpath: node.realpath, + tree: tree.path, + root: tree.root.path, + via: via.path, + viaType, + log, + }) + } + + if (node.root !== tree.root) { + throw Object.assign(new Error('node from other root in tree'), { + node: node.path, + realpath: node.realpath, + tree: tree.path, + root: tree.root.path, + via: via.path, + viaType, + otherRoot: node.root && node.root.path, + log, + }) + } + + if (!node.isRoot && node.inventory.size !== 0) { + throw Object.assign(new Error('non-root has non-zero inventory'), { + node: node.path, + tree: tree.path, + root: tree.root.path, + via: via.path, + viaType, + inventory: [...node.inventory.values()].map(node => + [node.path, node.location]), + log, + }) + } + + if (!node.isRoot && !inventory.has(node) && !node.dummy) { + throw Object.assign(new Error('not in inventory'), { + node: node.path, + tree: tree.path, + root: tree.root.path, + via: via.path, + viaType, + log, + }) + } + + const devEdges = [...node.edgesOut.values()].filter(e => e.dev) + if (!node.isTop && devEdges.length) { + throw Object.assign(new Error('dev edges on non-top node'), { + node: node.path, + tree: tree.path, + root: tree.root.path, + via: via.path, + viaType, + devEdges: devEdges.map(e => [e.type, e.name, e.spec, e.error]), + log, + }) + } + + if (node.path === tree.root.path && node !== tree.root && !tree.root.isLink) { + throw Object.assign(new Error('node with same path as root'), { + node: node.path, + tree: tree.path, + root: tree.root.path, + via: via.path, + viaType, + log, + }) + } + + if (!node.isLink && node.path !== node.realpath) { + throw Object.assign(new Error('non-link with mismatched path/realpath'), { + node: node.path, + tree: tree.path, + realpath: node.realpath, + root: tree.root.path, + via: via.path, + viaType, + log, + }) + } + + const { parent, fsParent, target } = node + check(parent, node, 'parent') + check(fsParent, node, 'fsParent') + check(target, node, 'target') + log.push(['CHILDREN', node.location, ...node.children.keys()]) + for (const kid of node.children.values()) { + check(kid, node, 'children') + } + for (const kid of node.fsChildren) { + check(kid, node, 'fsChildren') + } + for (const link of node.linksIn) { + check(link, node, 'linksIn') + } + for (const top of node.tops) { + check(top, node, 'tops') + } + log.push(['DONE', node.location]) + } + check(tree) + if (checkUnreachable) { + for (const node of inventory.values()) { + if (!seen.has(node) && node !== tree.root) { + throw Object.assign(new Error('unreachable in inventory'), { + node: node.path, + realpath: node.realpath, + location: node.location, + root: tree.root.path, + tree: tree.path, + log, + }) + } + } + } + return tree +} + +// should only ever run this check in debug mode +module.exports = tree => tree +debug(() => module.exports = checkTree) diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/version-from-tgz.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/version-from-tgz.js new file mode 100644 index 0000000000000000000000000000000000000000..77e3956c1c17130a639b76a5ab36b670b00a0d6c --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/version-from-tgz.js @@ -0,0 +1,47 @@ +const semver = require('semver') +const { basename } = require('node:path') +const { URL } = require('node:url') +module.exports = (name, tgz) => { + const base = basename(tgz) + if (!base.endsWith('.tgz')) { + return null + } + + if (tgz.startsWith('http:/') || tgz.startsWith('https:/')) { + const u = new URL(tgz) + // registry url? check for most likely pattern. + // either /@foo/bar/-/bar-1.2.3.tgz or + // /foo/-/foo-1.2.3.tgz, and fall through to + // basename checking. Note that registries can + // be mounted below the root url, so /a/b/-/x/y/foo/-/foo-1.2.3.tgz + // is a potential option. + const tfsplit = u.pathname.slice(1).split('/-/') + if (tfsplit.length > 1) { + const afterTF = tfsplit.pop() + if (afterTF === base) { + const pre = tfsplit.pop() + const preSplit = pre.split(/\/|%2f/i) + const project = preSplit.pop() + const scope = preSplit.pop() + return versionFromBaseScopeName(base, scope, project) + } + } + } + + const split = name.split(/\/|%2f/i) + const project = split.pop() + const scope = split.pop() + return versionFromBaseScopeName(base, scope, project) +} + +const versionFromBaseScopeName = (base, scope, name) => { + if (!base.startsWith(name + '-')) { + return null + } + + const parsed = semver.parse(base.substring(name.length + 1, base.length - 4)) + return parsed ? { + name: scope && scope.charAt(0) === '@' ? `${scope}/${name}` : name, + version: parsed.version, + } : null +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/vuln.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/vuln.js new file mode 100644 index 0000000000000000000000000000000000000000..2bffe54f2dacdc91f2a920e5e7975d7da2ea77dd --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/vuln.js @@ -0,0 +1,214 @@ +// An object representing a vulnerability either as the result of an +// advisory or due to the package in question depending exclusively on +// vulnerable versions of a dep. +// +// - name: package name +// - range: Set of vulnerable versions +// - nodes: Set of nodes affected +// - effects: Set of vulns triggered by this one +// - advisories: Set of advisories (including metavulns) causing this vuln. +// All of the entries in via are vulnerability objects returned by +// @npmcli/metavuln-calculator +// - via: dependency vulns which cause this one + +const { satisfies, simplifyRange } = require('semver') +const semverOpt = { loose: true, includePrerelease: true } + +const localeCompare = require('@isaacs/string-locale-compare')('en') +const npa = require('npm-package-arg') + +const severities = new Map([ + ['info', 0], [0, 'info'], + ['low', 1], [1, 'low'], + ['moderate', 2], [2, 'moderate'], + ['high', 3], [3, 'high'], + ['critical', 4], [4, 'critical'], + [null, -1], [-1, null], +]) + +class Vuln { + #range = null + #simpleRange = null + // assume a fix is available unless it hits a top node + // that locks it in place, setting this false or {isSemVerMajor, version}. + #fixAvailable = true + + constructor ({ name, advisory }) { + this.name = name + this.via = new Set() + this.advisories = new Set() + this.severity = null + this.effects = new Set() + this.topNodes = new Set() + this.nodes = new Set() + this.addAdvisory(advisory) + this.packument = advisory.packument + this.versions = advisory.versions + } + + get fixAvailable () { + return this.#fixAvailable + } + + set fixAvailable (f) { + this.#fixAvailable = f + // if there's a fix available for this at the top level, it means that + // it will also fix the vulns that led to it being there. to get there, + // we set the vias to the most "strict" of fix availables. + // - false: no fix is available + // - {name, version, isSemVerMajor} fix requires -f, is semver major + // - {name, version} fix requires -f, not semver major + // - true: fix does not require -f + // TODO: duped entries may require different fixes but the current + // structure does not support this, so the case were a top level fix + // corrects a duped entry may mean you have to run fix more than once + for (const v of this.via) { + // don't blow up on loops + if (v.fixAvailable === f) { + continue + } + + if (f === false) { + v.fixAvailable = f + } else if (v.fixAvailable === true) { + v.fixAvailable = f + } else if (typeof f === 'object' && ( + typeof v.fixAvailable !== 'object' || !v.fixAvailable.isSemVerMajor)) { + v.fixAvailable = f + } + } + } + + get isDirect () { + for (const node of this.nodes.values()) { + for (const edge of node.edgesIn) { + if (edge.from.isProjectRoot || edge.from.isWorkspace) { + return true + } + } + } + return false + } + + testSpec (spec) { + const specObj = npa(spec) + if (!specObj.registry) { + return true + } + + if (specObj.subSpec) { + spec = specObj.subSpec.rawSpec + } + + for (const v of this.versions) { + if (satisfies(v, spec) && !satisfies(v, this.range, semverOpt)) { + return false + } + } + return true + } + + toJSON () { + return { + name: this.name, + severity: this.severity, + isDirect: this.isDirect, + // just loop over the advisories, since via is only Vuln objects, + // and calculated advisories have all the info we need + via: [...this.advisories].map(v => v.type === 'metavuln' ? v.dependency : { + ...v, + versions: undefined, + vulnerableVersions: undefined, + id: undefined, + }).sort((a, b) => + localeCompare(String(a.source || a), String(b.source || b))), + effects: [...this.effects].map(v => v.name).sort(localeCompare), + range: this.simpleRange, + nodes: [...this.nodes].map(n => n.location).sort(localeCompare), + fixAvailable: this.#fixAvailable, + } + } + + addVia (v) { + this.via.add(v) + v.effects.add(this) + // call the setter since we might add vias _after_ setting fixAvailable + this.fixAvailable = this.fixAvailable + } + + deleteVia (v) { + this.via.delete(v) + v.effects.delete(this) + } + + deleteAdvisory (advisory) { + this.advisories.delete(advisory) + // make sure we have the max severity of all the vulns causing this one + this.severity = null + this.#range = null + this.#simpleRange = null + // refresh severity + for (const advisory of this.advisories) { + this.addAdvisory(advisory) + } + + // remove any effects that are no longer relevant + const vias = new Set([...this.advisories].map(a => a.dependency)) + for (const via of this.via) { + if (!vias.has(via.name)) { + this.deleteVia(via) + } + } + } + + addAdvisory (advisory) { + this.advisories.add(advisory) + const sev = severities.get(advisory.severity) + this.#range = null + this.#simpleRange = null + if (sev > severities.get(this.severity)) { + this.severity = advisory.severity + } + } + + get range () { + if (!this.#range) { + this.#range = [...this.advisories].map(v => v.range).join(' || ') + } + return this.#range + } + + get simpleRange () { + if (this.#simpleRange && this.#simpleRange === this.#range) { + return this.#simpleRange + } + + const versions = [...this.advisories][0].versions + const range = this.range + this.#simpleRange = simplifyRange(versions, range, semverOpt) + this.#range = this.#simpleRange + return this.#simpleRange + } + + isVulnerable (node) { + if (this.nodes.has(node)) { + return true + } + + const { version } = node.package + if (!version) { + return false + } + + for (const v of this.advisories) { + if (v.testVersion(version)) { + this.nodes.add(node) + return true + } + } + + return false + } +} + +module.exports = Vuln diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/yarn-lock.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/yarn-lock.js new file mode 100644 index 0000000000000000000000000000000000000000..fc62806506acdcf89c2cbc36ca78e9c46e5babe8 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/lib/yarn-lock.js @@ -0,0 +1,377 @@ +// parse a yarn lock file +// basic format +// +// [, ...]: +// +// : +// +// +// Assume that any key or value might be quoted, though that's only done +// in practice if certain chars are in the string. When writing back, we follow +// Yarn's rules for quoting, to cause minimal friction. +// +// The data format would support nested objects, but at this time, it +// appears that yarn does not use that for anything, so in the interest +// of a simpler parser algorithm, this implementation only supports a +// single layer of sub objects. +// +// This doesn't deterministically define the shape of the tree, and so +// cannot be used (on its own) for Arborist.loadVirtual. +// But it can give us resolved, integrity, and version, which is useful +// for Arborist.loadActual and for building the ideal tree. +// +// At the very least, when a yarn.lock file is present, we update it +// along the way, and save it back in Shrinkwrap.save() +// +// NIHing this rather than using @yarnpkg/lockfile because that module +// is an impenetrable 10kloc of webpack flow output, which is overkill +// for something relatively simple and tailored to Arborist's use case. + +const localeCompare = require('@isaacs/string-locale-compare')('en') +const consistentResolve = require('./consistent-resolve.js') +const { dirname } = require('node:path') +const { breadth } = require('treeverse') + +// Sort Yarn entries respecting the yarn.lock sort order +const yarnEntryPriorities = { + name: 1, + version: 2, + uid: 3, + resolved: 4, + integrity: 5, + registry: 6, + dependencies: 7, +} + +const priorityThenLocaleCompare = (a, b) => { + if (!yarnEntryPriorities[a] && !yarnEntryPriorities[b]) { + return localeCompare(a, b) + } + /* istanbul ignore next */ + return (yarnEntryPriorities[a] || 100) > (yarnEntryPriorities[b] || 100) ? 1 : -1 +} + +const quoteIfNeeded = val => { + if ( + typeof val === 'boolean' || + typeof val === 'number' || + val.startsWith('true') || + val.startsWith('false') || + /[:\s\n\\",[\]]/g.test(val) || + !/^[a-zA-Z]/g.test(val) + ) { + return JSON.stringify(val) + } + + return val +} + +// sort a key/value object into a string of JSON stringified keys and vals +const sortKV = obj => Object.keys(obj) + .sort(localeCompare) + .map(k => ` ${quoteIfNeeded(k)} ${quoteIfNeeded(obj[k])}`) + .join('\n') + +// for checking against previous entries +const match = (p, n) => + p.integrity && n.integrity ? p.integrity === n.integrity + : p.resolved && n.resolved ? p.resolved === n.resolved + : p.version && n.version ? p.version === n.version + : true + +const prefix = +`# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +` + +const nullSymbol = Symbol('null') +class YarnLock { + static parse (data) { + return new YarnLock().parse(data) + } + + static fromTree (tree) { + return new YarnLock().fromTree(tree) + } + + constructor () { + this.entries = null + this.endCurrent() + } + + endCurrent () { + this.current = null + this.subkey = nullSymbol + } + + parse (data) { + const ENTRY_START = /^[^\s].*:$/ + const SUBKEY = /^ {2}[^\s]+:$/ + const SUBVAL = /^ {4}[^\s]+ .+$/ + const METADATA = /^ {2}[^\s]+ .+$/ + this.entries = new Map() + this.current = null + const linere = /([^\r\n]*)\r?\n/gm + let match + let lineNum = 0 + if (!/\n$/.test(data)) { + data += '\n' + } + while (match = linere.exec(data)) { + const line = match[1] + lineNum++ + if (line.charAt(0) === '#') { + continue + } + if (line === '') { + this.endCurrent() + continue + } + if (ENTRY_START.test(line)) { + this.endCurrent() + const specs = this.splitQuoted(line.slice(0, -1), /, */) + this.current = new YarnLockEntry(specs) + specs.forEach(spec => this.entries.set(spec, this.current)) + continue + } + if (SUBKEY.test(line)) { + this.subkey = line.slice(2, -1) + this.current[this.subkey] = {} + continue + } + if (SUBVAL.test(line) && this.current && this.current[this.subkey]) { + const subval = this.splitQuoted(line.trimLeft(), ' ') + if (subval.length === 2) { + this.current[this.subkey][subval[0]] = subval[1] + continue + } + } + // any other metadata + if (METADATA.test(line) && this.current) { + const metadata = this.splitQuoted(line.trimLeft(), ' ') + if (metadata.length === 2) { + // strip off the legacy shasum hashes + if (metadata[0] === 'resolved') { + metadata[1] = metadata[1].replace(/#.*/, '') + } + this.current[metadata[0]] = metadata[1] + continue + } + } + + throw Object.assign(new Error('invalid or corrupted yarn.lock file'), { + position: match.index, + content: match[0], + line: lineNum, + }) + } + this.endCurrent() + return this + } + + splitQuoted (str, delim) { + // a,"b,c",d"e,f => ['a','"b','c"','d"e','f'] => ['a','b,c','d"e','f'] + const split = str.split(delim) + const out = [] + let o = 0 + for (let i = 0; i < split.length; i++) { + const chunk = split[i] + if (/^".*"$/.test(chunk)) { + out[o++] = chunk.trim().slice(1, -1) + } else if (/^"/.test(chunk)) { + let collect = chunk.trimLeft().slice(1) + while (++i < split.length) { + const n = split[i] + // something that is not a slash, followed by an even number + // of slashes then a " then end => ending on an unescaped " + if (/[^\\](\\\\)*"$/.test(n)) { + collect += n.trimRight().slice(0, -1) + break + } else { + collect += n + } + } + out[o++] = collect + } else { + out[o++] = chunk.trim() + } + } + return out + } + + toString () { + return prefix + [...new Set([...this.entries.values()])] + .map(e => e.toString()) + .sort((a, b) => localeCompare(a.replace(/"/g, ''), b.replace(/"/g, ''))).join('\n\n') + '\n' + } + + fromTree (tree) { + this.entries = new Map() + // walk the tree in a deterministic order, breadth-first, alphabetical + breadth({ + tree, + visit: node => this.addEntryFromNode(node), + getChildren: node => [...node.children.values(), ...node.fsChildren] + .sort((a, b) => a.depth - b.depth || localeCompare(a.name, b.name)), + }) + return this + } + + addEntryFromNode (node) { + const specs = [...node.edgesIn] + .map(e => `${node.name}@${e.spec}`) + .sort(localeCompare) + + // Note: + // yarn will do excessive duplication in a case like this: + // root -> (x@1.x, y@1.x, z@1.x) + // y@1.x -> (x@1.1, z@2.x) + // z@1.x -> () + // z@2.x -> (x@1.x) + // + // where x@1.2 exists, because the "x@1.x" spec will *always* resolve + // to x@1.2, which doesn't work for y's dep on x@1.1, so you'll get this: + // + // root + // +-- x@1.2.0 + // +-- y + // | +-- x@1.1.0 + // | +-- z@2 + // | +-- x@1.2.0 + // +-- z@1 + // + // instead of this more deduped tree that arborist builds by default: + // + // root + // +-- x@1.2.0 (dep is x@1.x, from root) + // +-- y + // | +-- x@1.1.0 + // | +-- z@2 (dep on x@1.x deduped to x@1.1.0 under y) + // +-- z@1 + // + // In order to not create an invalid yarn.lock file with conflicting + // entries, AND not tell yarn to create an invalid tree, we need to + // ignore the x@1.x spec coming from z, since it's already in the entries. + // + // So, if the integrity and resolved don't match a previous entry, skip it. + // We call this method on shallower nodes first, so this is fine. + const n = this.entryDataFromNode(node) + let priorEntry = null + const newSpecs = [] + for (const s of specs) { + const prev = this.entries.get(s) + // no previous entry for this spec at all, so it's new + if (!prev) { + // if we saw a match already, then assign this spec to it as well + if (priorEntry) { + priorEntry.addSpec(s) + } else { + newSpecs.push(s) + } + continue + } + + const m = match(prev, n) + // there was a prior entry, but a different thing. skip this one + if (!m) { + continue + } + + // previous matches, but first time seeing it, so already has this spec. + // go ahead and add all the previously unseen specs, though + if (!priorEntry) { + priorEntry = prev + for (const s of newSpecs) { + priorEntry.addSpec(s) + this.entries.set(s, priorEntry) + } + newSpecs.length = 0 + continue + } + + // have a prior entry matching n, and matching the prev we just saw + // add the spec to it + priorEntry.addSpec(s) + this.entries.set(s, priorEntry) + } + + // if we never found a matching prior, then this is a whole new thing + if (!priorEntry) { + const entry = Object.assign(new YarnLockEntry(newSpecs), n) + for (const s of newSpecs) { + this.entries.set(s, entry) + } + } else { + // pick up any new info that we got for this node, so that we can + // decorate with integrity/resolved/etc. + Object.assign(priorEntry, n) + } + } + + entryDataFromNode (node) { + const n = {} + if (node.package.dependencies) { + n.dependencies = node.package.dependencies + } + if (node.package.optionalDependencies) { + n.optionalDependencies = node.package.optionalDependencies + } + if (node.version) { + n.version = node.version + } + if (node.resolved) { + n.resolved = consistentResolve( + node.resolved, + node.isLink ? dirname(node.path) : node.path, + node.root.path, + true + ) + } + if (node.integrity) { + n.integrity = node.integrity + } + + return n + } + + static get Entry () { + return YarnLockEntry + } +} + +class YarnLockEntry { + #specs + constructor (specs) { + this.#specs = new Set(specs) + this.resolved = null + this.version = null + this.integrity = null + this.dependencies = null + this.optionalDependencies = null + } + + toString () { + // sort objects to the bottom, then alphabetical + return ([...this.#specs] + .sort(localeCompare) + .map(quoteIfNeeded).join(', ') + + ':\n' + + Object.getOwnPropertyNames(this) + .filter(prop => this[prop] !== null) + .sort(priorityThenLocaleCompare) + .map(prop => + typeof this[prop] !== 'object' + ? ` ${prop} ${prop === 'integrity' ? this[prop] : JSON.stringify(this[prop])}\n` + : Object.keys(this[prop]).length === 0 ? '' + : ` ${prop}:\n` + sortKV(this[prop]) + '\n') + .join('')).trim() + } + + addSpec (spec) { + this.#specs.add(spec) + } +} + +module.exports = YarnLock diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/package.json new file mode 100644 index 0000000000000000000000000000000000000000..ed00181eceaec0be53bdf249e7c838060d362f92 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/arborist/package.json @@ -0,0 +1,98 @@ +{ + "name": "@npmcli/arborist", + "version": "9.1.6", + "description": "Manage node_modules trees", + "dependencies": { + "@isaacs/string-locale-compare": "^1.1.0", + "@npmcli/fs": "^4.0.0", + "@npmcli/installed-package-contents": "^3.0.0", + "@npmcli/map-workspaces": "^5.0.0", + "@npmcli/metavuln-calculator": "^9.0.2", + "@npmcli/name-from-folder": "^3.0.0", + "@npmcli/node-gyp": "^4.0.0", + "@npmcli/package-json": "^7.0.0", + "@npmcli/query": "^4.0.0", + "@npmcli/redact": "^3.0.0", + "@npmcli/run-script": "^10.0.0", + "bin-links": "^5.0.0", + "cacache": "^20.0.1", + "common-ancestor-path": "^1.0.1", + "hosted-git-info": "^9.0.0", + "json-stringify-nice": "^1.1.4", + "lru-cache": "^11.2.1", + "minimatch": "^10.0.3", + "nopt": "^8.0.0", + "npm-install-checks": "^7.1.0", + "npm-package-arg": "^13.0.0", + "npm-pick-manifest": "^11.0.1", + "npm-registry-fetch": "^19.0.0", + "pacote": "^21.0.2", + "parse-conflict-json": "^4.0.0", + "proc-log": "^5.0.0", + "proggy": "^3.0.0", + "promise-all-reject-late": "^1.0.0", + "promise-call-limit": "^3.0.1", + "semver": "^7.3.7", + "ssri": "^12.0.0", + "treeverse": "^3.0.0", + "walk-up-path": "^4.0.0" + }, + "devDependencies": { + "@npmcli/eslint-config": "^5.0.1", + "@npmcli/mock-registry": "^1.0.0", + "@npmcli/template-oss": "4.25.1", + "benchmark": "^2.1.4", + "minify-registry-metadata": "^4.0.0", + "nock": "^13.3.3", + "tap": "^16.3.8", + "tar-stream": "^3.0.0", + "tcompare": "^5.0.6" + }, + "scripts": { + "test": "tap", + "posttest": "npm run lint", + "snap": "tap", + "test-proxy": "ARBORIST_TEST_PROXY=1 tap --snapshot", + "lint": "npm run eslint", + "lintfix": "npm run eslint -- --fix", + "benchmark": "node scripts/benchmark.js", + "benchclean": "rm -rf scripts/benchmark/*/", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/cli.git", + "directory": "workspaces/arborist" + }, + "author": "GitHub Inc.", + "license": "ISC", + "files": [ + "bin/", + "lib/" + ], + "main": "lib/index.js", + "bin": { + "arborist": "bin/index.js" + }, + "tap": { + "after": "test/fixtures/cleanup.js", + "test-env": [ + "LC_ALL=sk" + ], + "timeout": "720", + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "engines": { + "node": "^20.17.0 || >=22.9.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.25.1", + "content": "../../scripts/template-oss/index.js" + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..19cec97b1846830f5628807533a144313cd67532 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/README.md b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/README.md new file mode 100644 index 0000000000000000000000000000000000000000..6a948d9b11a9122ff3db037560f8c43112f3cb48 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/README.md @@ -0,0 +1,226 @@ +# `@npmcli/config` + +Configuration management for the npm cli. + +This module is the spiritual descendant of [`npmconf`](http://npm.im/npmconf), and the code that once lived in npm's +`lib/config/` folder. + +It does the management of configuration files that npm uses, but importantly, does _not_ define all the configuration defaults or types, as those parts make more sense to live within the npm CLI itself. + +The only exceptions: + +- The `prefix` config value has some special semantics, setting the local prefix if specified on the CLI options and not in global mode, or the global prefix otherwise. +- The `project` config file is loaded based on the local prefix (which can only be set by the CLI config options, and otherwise defaults to a walk up the folder tree to the first parent containing a `node_modules` folder, `package.json` file, or `package-lock.json` file.) +- The `userconfig` value, as set by the environment and CLI (defaulting to + `~/.npmrc`, is used to load user configs. +- The `globalconfig` value, as set by the environment, CLI, and + `userconfig` file (defaulting to `$PREFIX/etc/npmrc`) is used to load global configs. +- A `builtin` config, read from a `npmrc` file in the root of the npm project itself, overrides all defaults. + +The resulting hierarchy of configs: + +- CLI switches. + eg `--some-key=some-value` on the command line. + These are parsed by [`nopt`](http://npm.im/nopt), which is not a great choice, but it's the one that npm has used forever, and changing it will be difficult. +- Environment variables. + eg `npm_config_some_key=some_value` in the environment. + There is no way at this time to modify this prefix. +- INI-formatted project configs. + eg `some-key = some-value` in the + `localPrefix` folder (ie, the `cwd`, or its nearest parent that contains either a `node_modules` folder or `package.json` file.) +- INI-formatted userconfig file. + eg `some-key = some-value` in `~/.npmrc`. + The `userconfig` config value can be overridden by the `cli`, `env`, or + `project` configs to change this value. +- INI-formatted globalconfig file. + eg `some-key = some-value` in the `globalPrefix` folder, which is inferred by looking at the location of the node executable, or the `prefix` setting in the `cli`, `env`, `project`, or `userconfig`. + The `globalconfig` value at any of those levels can override this. +- INI-formatted builtin config file. + eg `some-key = some-value` in `/usr/local/lib/node_modules/npm/npmrc`. + This is not configurable, and is determined by looking in the `npmPath` folder. +- Default values (passed in by npm when it loads this module). + +## USAGE + +```js +const Config = require('@npmcli/config') +const { shorthands, definitions, flatten } = require('@npmcli/config/lib/definitions') + +const conf = new Config({ + // path to the npm module being run + npmPath: resolve(__dirname, '..'), + definitions, + shorthands, + flatten, + // optional, defaults to process.argv + // argv: [] <- if you are using this package in your own cli + // and don't want to have colliding argv + argv: process.argv, + // optional, defaults to process.env + env: process.env, + // optional, defaults to process.execPath + execPath: process.execPath, + // optional, defaults to process.platform + platform: process.platform, + // optional, defaults to process.cwd() + cwd: process.cwd(), +}) + +// emits log events on the process object +// see `proc-log` for more info +process.on('log', (level, ...args) => { + console.log(level, ...args) +}) + +// returns a promise that fails if config loading fails, and +// resolves when the config object is ready for action +conf.load().then(() => { + conf.validate() + console.log('loaded ok! some-key = ' + conf.get('some-key')) +}).catch(er => { + console.error('error loading configs!', er) +}) +``` + +## API + +The `Config` class is the sole export. + +```js +const Config = require('@npmcli/config') +``` + +### static `Config.typeDefs` + +The type definitions passed to `nopt` for CLI option parsing and known configuration validation. + +### constructor `new Config(options)` + +Options: + +- `types` Types of all known config values. +Note that some are effectively given semantic value in the config loading process itself. +- `shorthands` An object mapping a shorthand value to an array of CLI arguments that replace it. +- `defaults` Default values for each of the known configuration keys. + These should be defined for all configs given a type, and must be valid. +- `npmPath` The path to the `npm` module, for loading the `builtin` config file. +- `cwd` Optional, defaults to `process.cwd()`, used for inferring the + `localPrefix` and loading the `project` config. +- `platform` Optional, defaults to `process.platform`. +Used when inferring the `globalPrefix` from the `execPath`, since this is done differently on Windows. +- `execPath` Optional, defaults to `process.execPath`. +Used to infer the + `globalPrefix`. +- `env` Optional, defaults to `process.env`. +Source of the environment variables for configuration. +- `argv` Optional, defaults to `process.argv`. +Source of the CLI options used for configuration. + +Returns a `config` object, which is not yet loaded. + +Fields: + +- `config.globalPrefix` The prefix for `global` operations. +Set by the + `prefix` config value, or defaults based on the location of the + `execPath` option. +- `config.localPrefix` The prefix for `local` operations. +Set by the + `prefix` config value on the CLI only, or defaults to either the `cwd` or its nearest ancestor containing a `node_modules` folder or `package.json` file. +- `config.sources` A read-only `Map` of the file (or a comment, if no file found, or relevant) to the config level loaded from that source. +- `config.data` A `Map` of config level to `ConfigData` objects. +These objects should not be modified directly under any circumstances. + - `source` The source where this data was loaded from. + - `raw` The raw data used to generate this config data, as it was parsed initially from the environment, config file, or CLI options. + - `data` The data object reflecting the inheritance of configs up to this point in the chain. + - `loadError` Any errors encountered that prevented the loading of this config data. +- `config.list` A list sorted in priority of all the config data objects in the prototype chain. +`config.list[0]` is the `cli` level, + `config.list[1]` is the `env` level, and so on. +- `cwd` The `cwd` param +- `env` The `env` param +- `argv` The `argv` param +- `execPath` The `execPath` param +- `platform` The `platform` param +- `defaults` The `defaults` param +- `shorthands` The `shorthands` param +- `types` The `types` param +- `npmPath` The `npmPath` param +- `globalPrefix` The effective `globalPrefix` +- `localPrefix` The effective `localPrefix` +- `prefix` If `config.get('global')` is true, then `globalPrefix`, otherwise `localPrefix` +- `home` The user's home directory, found by looking at `env.HOME` or calling `os.homedir()`. +- `loaded` A boolean indicating whether or not configs are loaded +- `valid` A getter that returns `true` if all the config objects are valid. + Any data objects that have been modified with `config.set(...)` will be re-evaluated when `config.valid` is read. + +### `config.load()` + +Load configuration from the various sources of information. + +Returns a `Promise` that resolves when configuration is loaded, and fails if a fatal error is encountered. + +### `config.find(key)` + +Find the effective place in the configuration levels a given key is set. +Returns one of: `cli`, `env`, `project`, `user`, `global`, `builtin`, or +`default`. + +Returns `null` if the key is not set. + +### `config.get(key, where = 'cli')` + +Load the given key from the config stack. + +### `config.set(key, value, where = 'cli')` + +Set the key to the specified value, at the specified level in the config stack. + +### `config.delete(key, where = 'cli')` + +Delete the configuration key from the specified level in the config stack. + +### `config.validate(where)` + +Verify that all known configuration options are set to valid values, and log a warning if they are invalid. + +Invalid auth options will cause this method to throw an error with a `code` property of `ERR_INVALID_AUTH`, and a `problems` property listing the specific concerns with the current configuration. + +If `where` is not set, then all config objects are validated. + +Returns `true` if all configs are valid. + +Note that it's usually enough (and more efficient) to just check +`config.valid`, since each data object is marked for re-evaluation on every +`config.set()` operation. + +### `config.repair(problems)` + +Accept an optional array of problems (as thrown by `config.validate()`) and perform the necessary steps to resolve them. +If no problems are provided, this method will call `config.validate()` internally to retrieve them. + +Note that you must `await config.save('user')` in order to persist the changes. + +### `config.isDefault(key)` + +Returns `true` if the value is coming directly from the default definitions, if the current value for the key config is coming from any other source, returns `false`. + +This method can be used for avoiding or tweaking default values, e.g: + +> Given a global default definition of foo='foo' it's possible to read that value such as: +> +> ```js +> const save = config.get('foo') +> ``` +> +> Now in a different place of your app it's possible to avoid using the `foo` default value, by checking to see if the current config value is currently one that was defined by the default definitions: +> +> ```js +> const save = config.isDefault('foo') ? 'bar' : config.get('foo') +> ``` + +### `config.save(where)` + +Save the config file specified by the `where` param. +Must be one of +`project`, `user`, `global`, `builtin`. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/lib/definitions/definition.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/lib/definitions/definition.js new file mode 100644 index 0000000000000000000000000000000000000000..26ba0c0bc14b9a4cf0d364d0f6ba853edbf98d5b --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/lib/definitions/definition.js @@ -0,0 +1,253 @@ +// class that describes a config key we know about +// this keeps us from defining a config key and not +// providing a default, description, etc. +// +// TODO: some kind of categorization system, so we can +// say "these are for registry access", "these are for +// version resolution" etc. + +const required = ['type', 'description', 'default', 'key'] + +const allowed = [ + 'default', + 'defaultDescription', + 'deprecated', + 'description', + 'exclusive', + 'flatten', + 'hint', + 'key', + 'short', + 'type', + 'typeDescription', + 'usage', + 'envExport', +] + +const { + semver: { type: semver }, + Umask: { type: Umask }, + url: { type: url }, + path: { type: path }, +} = require('../type-defs.js') + +class Definition { + constructor (key, def) { + this.key = key + // if it's set falsey, don't export it; otherwise, we do by default + this.envExport = true + Object.assign(this, def) + this.validate() + if (!this.defaultDescription) { + this.defaultDescription = describeValue(this.default) + } + if (!this.typeDescription) { + this.typeDescription = describeType(this.type) + } + // hint is only used for non-boolean values + if (!this.hint) { + if (this.type === Number) { + this.hint = '' + } else { + this.hint = `<${this.key}>` + } + } + if (!this.usage) { + this.usage = describeUsage(this) + } + } + + validate () { + for (const req of required) { + if (!Object.prototype.hasOwnProperty.call(this, req)) { + throw new Error(`config lacks ${req}: ${this.key}`) + } + } + if (!this.key) { + throw new Error(`config lacks key: ${this.key}`) + } + for (const field of Object.keys(this)) { + if (!allowed.includes(field)) { + throw new Error(`config defines unknown field ${field}: ${this.key}`) + } + } + } + + // a textual description of this config, suitable for help output + describe () { + const description = unindent(this.description) + const noEnvExport = this.envExport + ? '' + : ` +This value is not exported to the environment for child processes. +` + const deprecated = !this.deprecated ? '' : `* DEPRECATED: ${unindent(this.deprecated)}\n` + /* eslint-disable-next-line max-len */ + const exclusive = !this.exclusive ? '' : `\nThis config cannot be used with: \`${this.exclusive.join('`, `')}\`` + return wrapAll(`#### \`${this.key}\` + +* Default: ${unindent(this.defaultDescription)} +* Type: ${unindent(this.typeDescription)} +${deprecated} +${description} +${exclusive} +${noEnvExport}`) + } +} + +const describeUsage = def => { + let key = '' + + // Single type + if (!Array.isArray(def.type)) { + if (def.short) { + key = `-${def.short}|` + } + + if (def.type === Boolean && def.default !== false) { + key = `${key}--no-${def.key}` + } else { + key = `${key}--${def.key}` + } + + if (def.type !== Boolean) { + key = `${key} ${def.hint}` + } + + return key + } + + key = `--${def.key}` + if (def.short) { + key = `-${def.short}|--${def.key}` + } + + // Multiple types + let types = def.type + const multiple = types.includes(Array) + const bool = types.includes(Boolean) + + // null type means optional and doesn't currently affect usage output since + // all non-optional params have defaults so we render everything as optional + types = types.filter(t => t !== null && t !== Array && t !== Boolean) + + if (!types.length) { + return key + } + + let description + if (!types.some(t => typeof t !== 'string')) { + // Specific values, use specifics given + description = `<${types.filter(d => d).join('|')}>` + } else { + // Generic values, use hint + description = def.hint + } + + if (bool) { + // Currently none of our multi-type configs with boolean values default to + // false so all their hints should show `--no-`, if we ever add ones that + // default to false we can branch the logic here + key = `--no-${def.key}|${key}` + } + + const usage = `${key} ${description}` + if (multiple) { + return `${usage} [${usage} ...]` + } else { + return usage + } +} + +const describeType = type => { + if (Array.isArray(type)) { + const descriptions = type.filter(t => t !== Array).map(t => describeType(t)) + + // [a] => "a" + // [a, b] => "a or b" + // [a, b, c] => "a, b, or c" + // [a, Array] => "a (can be set multiple times)" + // [a, Array, b] => "a or b (can be set multiple times)" + const last = descriptions.length > 1 ? [descriptions.pop()] : [] + const oxford = descriptions.length > 1 ? ', or ' : ' or ' + const words = [descriptions.join(', ')].concat(last).join(oxford) + const multiple = type.includes(Array) ? ' (can be set multiple times)' : '' + return `${words}${multiple}` + } + + // Note: these are not quite the same as the description printed + // when validation fails. In that case, we want to give the user + // a bit more information to help them figure out what's wrong. + switch (type) { + case String: + return 'String' + case Number: + return 'Number' + case Umask: + return 'Octal numeric string in range 0000..0777 (0..511)' + case Boolean: + return 'Boolean' + case Date: + return 'Date' + case path: + return 'Path' + case semver: + return 'SemVer string' + case url: + return 'URL' + default: + return describeValue(type) + } +} + +// if it's a string, quote it. otherwise, just cast to string. +const describeValue = val => (typeof val === 'string' ? JSON.stringify(val) : String(val)) + +const unindent = s => { + // get the first \n followed by a bunch of spaces, and pluck off + // that many spaces from the start of every line. + const match = s.match(/\n +/) + return !match ? s.trim() : s.split(match[0]).join('\n').trim() +} + +const wrap = s => { + const cols = Math.min(Math.max(20, process.stdout.columns) || 80, 80) - 5 + return unindent(s) + .split(/[ \n]+/) + .reduce((left, right) => { + const last = left.split('\n').pop() + const join = last.length && last.length + right.length > cols ? '\n' : ' ' + return left + join + right + }) +} + +const wrapAll = s => { + let inCodeBlock = false + return s + .split('\n\n') + .map(block => { + if (inCodeBlock || block.startsWith('```')) { + inCodeBlock = !block.endsWith('```') + return block + } + + if (block.charAt(0) === '*') { + return ( + '* ' + + block + .slice(1) + .trim() + .split('\n* ') + .map(li => { + return wrap(li).replace(/\n/g, '\n ') + }) + .join('\n* ') + ) + } else { + return wrap(block) + } + }) + .join('\n\n') +} + +module.exports = Definition diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/lib/definitions/definitions.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/lib/definitions/definitions.js new file mode 100644 index 0000000000000000000000000000000000000000..739428508d2fe9c2f71f5cc639477301d5fcb1cf --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/lib/definitions/definitions.js @@ -0,0 +1,2318 @@ +const Definition = require('./definition.js') + +const ciInfo = require('ci-info') +const querystring = require('node:querystring') +const { join } = require('node:path') + +const isWindows = process.platform === 'win32' + +// used by cafile flattening to flatOptions.ca +const { readFileSync } = require('node:fs') +const maybeReadFile = file => { + try { + return readFileSync(file, 'utf8') + } catch (er) { + if (er.code !== 'ENOENT') { + throw er + } + return null + } +} + +const buildOmitList = obj => { + const include = obj.include || [] + const omit = obj.omit || [] + + const only = obj.only + if (/^prod(uction)?$/.test(only) || obj.production) { + omit.push('dev') + } else if (obj.production === false) { + include.push('dev') + } + + if (/^dev/.test(obj.also)) { + include.push('dev') + } + + if (obj.dev) { + include.push('dev') + } + + if (obj.optional === false) { + omit.push('optional') + } else if (obj.optional === true) { + include.push('optional') + } + + obj.omit = [...new Set(omit)].filter(type => !include.includes(type)) + obj.include = [...new Set(include)] + + if (obj.omit.includes('dev')) { + process.env.NODE_ENV = 'production' + } + + return obj.omit +} + +const editor = process.env.EDITOR || + process.env.VISUAL || + (isWindows ? `${process.env.SYSTEMROOT}\\notepad.exe` : 'vi') + +const shell = isWindows ? process.env.ComSpec || 'cmd' + : process.env.SHELL || 'sh' + +const { networkInterfaces } = require('node:os') +const getLocalAddresses = () => { + try { + return Object.values(networkInterfaces()).map( + int => int.map(({ address }) => address) + ).reduce((set, addrs) => set.concat(addrs), [null]) + } catch (e) { + return [null] + } +} + +const unicode = /UTF-?8$/i.test( + process.env.LC_ALL || + process.env.LC_CTYPE || + process.env.LANG +) + +// use LOCALAPPDATA on Windows, if set +// https://github.com/npm/cli/pull/899 +const cacheRoot = (isWindows && process.env.LOCALAPPDATA) || '~' +const cacheExtra = isWindows ? 'npm-cache' : '.npm' +const cache = `${cacheRoot}/${cacheExtra}` + +// TODO: refactor these type definitions so that they are less +// weird to pull out of the config module. +// TODO: use better type definition/validation API, nopt's is so weird. +const { + semver: { type: Semver }, + Umask: { type: Umask }, + url: { type: url }, + path: { type: path }, +} = require('../type-defs.js') + +// basic flattening function, just copy it over camelCase +const flatten = (key, obj, flatOptions) => { + const camel = key.replace(/-([a-z])/g, (_0, _1) => _1.toUpperCase()) + flatOptions[camel] = obj[key] +} + +// TODO: +// Instead of having each definition provide a flatten method, +// provide the (?list of?) flat option field(s?) that it impacts. +// When that config is set, we mark the relevant flatOption fields +// dirty. Then, a getter for that field defines how we actually +// set it. +// +// So, `save-dev`, `save-optional`, `save-prod`, et al would indicate +// that they affect the `saveType` flat option. Then the config.flat +// object has a `get saveType () { ... }` that looks at the "real" +// config settings from files etc and returns the appropriate value. +// +// Getters will also (maybe?) give us a hook to audit flat option +// usage, so we can document and group these more appropriately. +// +// This will be a problem with cases where we currently do: +// const opts = { ...npm.flatOptions, foo: 'bar' }, but we can maybe +// instead do `npm.config.set('foo', 'bar')` prior to passing the +// config object down where it needs to go. +// +// This way, when we go hunting for "where does saveType come from anyway!?" +// while fixing some Arborist bug, we won't have to hunt through too +// many places. + +// XXX: We should really deprecate all these `--save-blah` switches +// in favor of a single `--save-type` option. The unfortunate shortcut +// we took for `--save-peer --save-optional` being `--save-type=peerOptional` +// makes this tricky, and likely a breaking change. + +// Define all config keys we know about. They are indexed by their own key for +// ease of lookup later. This duplication is an optimization so that we don't +// have to do an extra function call just to "reuse" the key in both places. + +const definitions = { + _auth: new Definition('_auth', { + default: null, + type: [null, String], + description: ` + A basic-auth string to use when authenticating against the npm registry. + This will ONLY be used to authenticate against the npm registry. For other + registries you will need to scope it like "//other-registry.tld/:_auth" + + Warning: This should generally not be set via a command-line option. It + is safer to use a registry-provided authentication bearer token stored in + the ~/.npmrc file by running \`npm login\`. + `, + flatten, + }), + access: new Definition('access', { + default: null, + defaultDescription: ` + 'public' for new packages, existing packages it will not change the current level + `, + type: [null, 'restricted', 'public'], + description: ` + If you do not want your scoped package to be publicly viewable (and + installable) set \`--access=restricted\`. + + Unscoped packages cannot be set to \`restricted\`. + + Note: This defaults to not changing the current access level for existing + packages. Specifying a value of \`restricted\` or \`public\` during + publish will change the access for an existing package the same way that + \`npm access set status\` would. + `, + flatten, + }), + all: new Definition('all', { + default: false, + type: Boolean, + short: 'a', + description: ` + When running \`npm outdated\` and \`npm ls\`, setting \`--all\` will show + all outdated or installed packages, rather than only those directly + depended upon by the current project. + `, + flatten, + }), + 'allow-same-version': new Definition('allow-same-version', { + default: false, + type: Boolean, + description: ` + Prevents throwing an error when \`npm version\` is used to set the new + version to the same value as the current version. + `, + flatten, + }), + also: new Definition('also', { + default: null, + type: [null, 'dev', 'development'], + description: ` + When set to \`dev\` or \`development\`, this is an alias for + \`--include=dev\`. + `, + deprecated: 'Please use --include=dev instead.', + flatten (key, obj, flatOptions) { + definitions.omit.flatten('omit', obj, flatOptions) + }, + }), + audit: new Definition('audit', { + default: true, + type: Boolean, + description: ` + When "true" submit audit reports alongside the current npm command to the + default registry and all registries configured for scopes. See the + documentation for [\`npm audit\`](/commands/npm-audit) for details on what + is submitted. + `, + flatten, + }), + 'audit-level': new Definition('audit-level', { + default: null, + type: [null, 'info', 'low', 'moderate', 'high', 'critical', 'none'], + description: ` + The minimum level of vulnerability for \`npm audit\` to exit with + a non-zero exit code. + `, + flatten, + }), + 'auth-type': new Definition('auth-type', { + default: 'web', + type: ['legacy', 'web'], + description: ` + What authentication strategy to use with \`login\`. + Note that if an \`otp\` config is given, this value will always be set to \`legacy\`. + `, + flatten, + }), + before: new Definition('before', { + default: null, + hint: '', + type: [null, Date], + description: ` + If passed to \`npm install\`, will rebuild the npm tree such that only + versions that were available **on or before** the given date are + installed. If there are no versions available for the current set of + dependencies, the command will error. + + If the requested version is a \`dist-tag\` and the given tag does not + pass the \`--before\` filter, the most recent version less than or equal + to that tag will be used. For example, \`foo@latest\` might install + \`foo@1.2\` even though \`latest\` is \`2.0\`. + `, + flatten, + }), + 'bin-links': new Definition('bin-links', { + default: true, + type: Boolean, + description: ` + Tells npm to create symlinks (or \`.cmd\` shims on Windows) for package + executables. + + Set to false to have it not do this. This can be used to work around the + fact that some file systems don't support symlinks, even on ostensibly + Unix systems. + `, + flatten, + }), + browser: new Definition('browser', { + default: null, + defaultDescription: ` + macOS: \`"open"\`, Windows: \`"start"\`, Others: \`"xdg-open"\` + `, + type: [null, Boolean, String], + description: ` + The browser that is called by npm commands to open websites. + + Set to \`false\` to suppress browser behavior and instead print urls to + terminal. + + Set to \`true\` to use default system URL opener. + `, + flatten, + }), + ca: new Definition('ca', { + default: null, + type: [null, String, Array], + description: ` + The Certificate Authority signing certificate that is trusted for SSL + connections to the registry. Values should be in PEM format (Windows + calls it "Base-64 encoded X.509 (.CER)") with newlines replaced by the + string "\\n". For example: + + \`\`\`ini + ca="-----BEGIN CERTIFICATE-----\\nXXXX\\nXXXX\\n-----END CERTIFICATE-----" + \`\`\` + + Set to \`null\` to only allow "known" registrars, or to a specific CA + cert to trust only that specific signing authority. + + Multiple CAs can be trusted by specifying an array of certificates: + + \`\`\`ini + ca[]="..." + ca[]="..." + \`\`\` + + See also the \`strict-ssl\` config. + `, + flatten, + }), + cache: new Definition('cache', { + default: cache, + defaultDescription: ` + Windows: \`%LocalAppData%\\npm-cache\`, Posix: \`~/.npm\` + `, + type: path, + description: ` + The location of npm's cache directory. + `, + flatten (key, obj, flatOptions) { + flatOptions.cache = join(obj.cache, '_cacache') + flatOptions.npxCache = join(obj.cache, '_npx') + flatOptions.tufCache = join(obj.cache, '_tuf') + }, + }), + 'cache-max': new Definition('cache-max', { + default: Infinity, + type: Number, + description: ` + \`--cache-max=0\` is an alias for \`--prefer-online\` + `, + deprecated: ` + This option has been deprecated in favor of \`--prefer-online\` + `, + flatten (key, obj, flatOptions) { + if (obj[key] <= 0) { + flatOptions.preferOnline = true + } + }, + }), + 'cache-min': new Definition('cache-min', { + default: 0, + type: Number, + description: ` + \`--cache-min=9999 (or bigger)\` is an alias for \`--prefer-offline\`. + `, + deprecated: ` + This option has been deprecated in favor of \`--prefer-offline\`. + `, + flatten (key, obj, flatOptions) { + if (obj[key] >= 9999) { + flatOptions.preferOffline = true + } + }, + }), + cafile: new Definition('cafile', { + default: null, + type: path, + description: ` + A path to a file containing one or multiple Certificate Authority signing + certificates. Similar to the \`ca\` setting, but allows for multiple + CA's, as well as for the CA information to be stored in a file on disk. + `, + flatten (key, obj, flatOptions) { + // always set to null in defaults + if (!obj.cafile) { + return + } + + const raw = maybeReadFile(obj.cafile) + if (!raw) { + return + } + + const delim = '-----END CERTIFICATE-----' + flatOptions.ca = raw.replace(/\r\n/g, '\n').split(delim) + .filter(section => section.trim()) + .map(section => section.trimLeft() + delim) + }, + }), + call: new Definition('call', { + default: '', + type: String, + short: 'c', + description: ` + Optional companion option for \`npm exec\`, \`npx\` that allows for + specifying a custom command to be run along with the installed packages. + + \`\`\`bash + npm exec --package yo --package generator-node --call "yo node" + \`\`\` + `, + flatten, + }), + cert: new Definition('cert', { + default: null, + type: [null, String], + description: ` + A client certificate to pass when accessing the registry. Values should + be in PEM format (Windows calls it "Base-64 encoded X.509 (.CER)") with + newlines replaced by the string "\\n". For example: + + \`\`\`ini + cert="-----BEGIN CERTIFICATE-----\\nXXXX\\nXXXX\\n-----END CERTIFICATE-----" + \`\`\` + + It is _not_ the path to a certificate file, though you can set a registry-scoped + "certfile" path like "//other-registry.tld/:certfile=/path/to/cert.pem". + `, + deprecated: ` + \`key\` and \`cert\` are no longer used for most registry operations. + Use registry scoped \`keyfile\` and \`certfile\` instead. + Example: + //other-registry.tld/:keyfile=/path/to/key.pem + //other-registry.tld/:certfile=/path/to/cert.crt + `, + flatten, + }), + cidr: new Definition('cidr', { + default: null, + type: [null, String, Array], + description: ` + This is a list of CIDR address to be used when configuring limited access + tokens with the \`npm token create\` command. + `, + flatten, + }), + // This should never be directly used, the flattened value is the derived value + // and is sent to other modules, and is also exposed as `npm.color` for use + // inside npm itself. + color: new Definition('color', { + default: !process.env.NO_COLOR || process.env.NO_COLOR === '0', + usage: '--color|--no-color|--color always', + defaultDescription: ` + true unless the NO_COLOR environ is set to something other than '0' + `, + type: ['always', Boolean], + description: ` + If false, never shows colors. If \`"always"\` then always shows colors. + If true, then only prints color codes for tty file descriptors. + `, + flatten (key, obj, flatOptions) { + flatOptions.color = !obj.color ? false + : obj.color === 'always' ? true + : !!process.stdout.isTTY + flatOptions.logColor = !obj.color ? false + : obj.color === 'always' ? true + : !!process.stderr.isTTY + }, + }), + 'commit-hooks': new Definition('commit-hooks', { + default: true, + type: Boolean, + description: ` + Run git commit hooks when using the \`npm version\` command. + `, + flatten, + }), + cpu: new Definition('cpu', { + default: null, + type: [null, String], + description: ` + Override CPU architecture of native modules to install. + Acceptable values are same as \`cpu\` field of package.json, + which comes from \`process.arch\`. + `, + flatten, + }), + depth: new Definition('depth', { + default: null, + defaultDescription: ` + \`Infinity\` if \`--all\` is set; otherwise, \`0\` + `, + type: [null, Number], + description: ` + The depth to go when recursing packages for \`npm ls\`. + + If not set, \`npm ls\` will show only the immediate dependencies of the + root project. If \`--all\` is set, then npm will show all dependencies + by default. + `, + flatten, + }), + description: new Definition('description', { + default: true, + type: Boolean, + usage: '--no-description', + description: ` + Show the description in \`npm search\` + `, + flatten (key, obj, flatOptions) { + flatOptions.search = flatOptions.search || { limit: 20 } + flatOptions.search[key] = obj[key] + }, + }), + dev: new Definition('dev', { + default: false, + type: Boolean, + description: ` + Alias for \`--include=dev\`. + `, + deprecated: 'Please use --include=dev instead.', + flatten (key, obj, flatOptions) { + definitions.omit.flatten('omit', obj, flatOptions) + }, + }), + diff: new Definition('diff', { + default: [], + hint: '', + type: [String, Array], + description: ` + Define arguments to compare in \`npm diff\`. + `, + flatten, + }), + 'diff-ignore-all-space': new Definition('diff-ignore-all-space', { + default: false, + type: Boolean, + description: ` + Ignore whitespace when comparing lines in \`npm diff\`. + `, + flatten, + }), + 'diff-name-only': new Definition('diff-name-only', { + default: false, + type: Boolean, + description: ` + Prints only filenames when using \`npm diff\`. + `, + flatten, + }), + 'diff-no-prefix': new Definition('diff-no-prefix', { + default: false, + type: Boolean, + description: ` + Do not show any source or destination prefix in \`npm diff\` output. + + Note: this causes \`npm diff\` to ignore the \`--diff-src-prefix\` and + \`--diff-dst-prefix\` configs. + `, + flatten, + }), + 'diff-dst-prefix': new Definition('diff-dst-prefix', { + default: 'b/', + hint: '', + type: String, + description: ` + Destination prefix to be used in \`npm diff\` output. + `, + flatten, + }), + 'diff-src-prefix': new Definition('diff-src-prefix', { + default: 'a/', + hint: '', + type: String, + description: ` + Source prefix to be used in \`npm diff\` output. + `, + flatten, + }), + 'diff-text': new Definition('diff-text', { + default: false, + type: Boolean, + description: ` + Treat all files as text in \`npm diff\`. + `, + flatten, + }), + 'diff-unified': new Definition('diff-unified', { + default: 3, + type: Number, + description: ` + The number of lines of context to print in \`npm diff\`. + `, + flatten, + }), + 'dry-run': new Definition('dry-run', { + default: false, + type: Boolean, + description: ` + Indicates that you don't want npm to make any changes and that it should + only report what it would have done. This can be passed into any of the + commands that modify your local installation, eg, \`install\`, + \`update\`, \`dedupe\`, \`uninstall\`, as well as \`pack\` and + \`publish\`. + + Note: This is NOT honored by other network related commands, eg + \`dist-tags\`, \`owner\`, etc. + `, + flatten, + }), + editor: new Definition('editor', { + default: editor, + defaultDescription: ` + The EDITOR or VISUAL environment variables, or '%SYSTEMROOT%\\notepad.exe' on Windows, + or 'vi' on Unix systems + `, + type: String, + description: ` + The command to run for \`npm edit\` and \`npm config edit\`. + `, + flatten, + }), + 'engine-strict': new Definition('engine-strict', { + default: false, + type: Boolean, + description: ` + If set to true, then npm will stubbornly refuse to install (or even + consider installing) any package that claims to not be compatible with + the current Node.js version. + + This can be overridden by setting the \`--force\` flag. + `, + flatten, + }), + 'expect-result-count': new Definition('expect-result-count', { + default: null, + type: [null, Number], + hint: '', + exclusive: ['expect-results'], + description: ` + Tells to expect a specific number of results from the command. + `, + }), + 'expect-results': new Definition('expect-results', { + default: null, + type: [null, Boolean], + exclusive: ['expect-result-count'], + description: ` + Tells npm whether or not to expect results from the command. + Can be either true (expect some results) or false (expect no results). + `, + }), + 'fetch-retries': new Definition('fetch-retries', { + default: 2, + type: Number, + description: ` + The "retries" config for the \`retry\` module to use when fetching + packages from the registry. + + npm will retry idempotent read requests to the registry in the case + of network failures or 5xx HTTP errors. + `, + flatten (key, obj, flatOptions) { + flatOptions.retry = flatOptions.retry || {} + flatOptions.retry.retries = obj[key] + }, + }), + 'fetch-retry-factor': new Definition('fetch-retry-factor', { + default: 10, + type: Number, + description: ` + The "factor" config for the \`retry\` module to use when fetching + packages. + `, + flatten (key, obj, flatOptions) { + flatOptions.retry = flatOptions.retry || {} + flatOptions.retry.factor = obj[key] + }, + }), + 'fetch-retry-maxtimeout': new Definition('fetch-retry-maxtimeout', { + default: 60000, + defaultDescription: '60000 (1 minute)', + type: Number, + description: ` + The "maxTimeout" config for the \`retry\` module to use when fetching + packages. + `, + flatten (key, obj, flatOptions) { + flatOptions.retry = flatOptions.retry || {} + flatOptions.retry.maxTimeout = obj[key] + }, + }), + 'fetch-retry-mintimeout': new Definition('fetch-retry-mintimeout', { + default: 10000, + defaultDescription: '10000 (10 seconds)', + type: Number, + description: ` + The "minTimeout" config for the \`retry\` module to use when fetching + packages. + `, + flatten (key, obj, flatOptions) { + flatOptions.retry = flatOptions.retry || {} + flatOptions.retry.minTimeout = obj[key] + }, + }), + 'fetch-timeout': new Definition('fetch-timeout', { + default: 5 * 60 * 1000, + defaultDescription: `${5 * 60 * 1000} (5 minutes)`, + type: Number, + description: ` + The maximum amount of time to wait for HTTP requests to complete. + `, + flatten (key, obj, flatOptions) { + flatOptions.timeout = obj[key] + }, + }), + force: new Definition('force', { + default: false, + type: Boolean, + short: 'f', + description: ` + Removes various protections against unfortunate side effects, common + mistakes, unnecessary performance degradation, and malicious input. + + * Allow clobbering non-npm files in global installs. + * Allow the \`npm version\` command to work on an unclean git repository. + * Allow deleting the cache folder with \`npm cache clean\`. + * Allow installing packages that have an \`engines\` declaration + requiring a different version of npm. + * Allow installing packages that have an \`engines\` declaration + requiring a different version of \`node\`, even if \`--engine-strict\` + is enabled. + * Allow \`npm audit fix\` to install modules outside your stated + dependency range (including SemVer-major changes). + * Allow unpublishing all versions of a published package. + * Allow conflicting peerDependencies to be installed in the root project. + * Implicitly set \`--yes\` during \`npm init\`. + * Allow clobbering existing values in \`npm pkg\` + * Allow unpublishing of entire packages (not just a single version). + + If you don't have a clear idea of what you want to do, it is strongly + recommended that you do not use this option! + `, + flatten, + }), + 'foreground-scripts': new Definition('foreground-scripts', { + default: false, + defaultDescription: `\`false\` unless when using \`npm pack\` or \`npm publish\` where it + defaults to \`true\``, + type: Boolean, + description: ` + Run all build scripts (ie, \`preinstall\`, \`install\`, and + \`postinstall\`) scripts for installed packages in the foreground + process, sharing standard input, output, and error with the main npm + process. + + Note that this will generally make installs run slower, and be much + noisier, but can be useful for debugging. + `, + flatten, + }), + 'format-package-lock': new Definition('format-package-lock', { + default: true, + type: Boolean, + description: ` + Format \`package-lock.json\` or \`npm-shrinkwrap.json\` as a human + readable file. + `, + flatten, + }), + fund: new Definition('fund', { + default: true, + type: Boolean, + description: ` + When "true" displays the message at the end of each \`npm install\` + acknowledging the number of dependencies looking for funding. + See [\`npm fund\`](/commands/npm-fund) for details. + `, + flatten, + }), + git: new Definition('git', { + default: 'git', + type: String, + description: ` + The command to use for git commands. If git is installed on the + computer, but is not in the \`PATH\`, then set this to the full path to + the git binary. + `, + flatten, + }), + 'git-tag-version': new Definition('git-tag-version', { + default: true, + type: Boolean, + description: ` + Tag the commit when using the \`npm version\` command. Setting this to + false results in no commit being made at all. + `, + flatten, + }), + global: new Definition('global', { + default: false, + type: Boolean, + short: 'g', + description: ` + Operates in "global" mode, so that packages are installed into the + \`prefix\` folder instead of the current working directory. See + [folders](/configuring-npm/folders) for more on the differences in + behavior. + + * packages are installed into the \`{prefix}/lib/node_modules\` folder, + instead of the current working directory. + * bin files are linked to \`{prefix}/bin\` + * man pages are linked to \`{prefix}/share/man\` + `, + flatten: (key, obj, flatOptions) => { + flatten(key, obj, flatOptions) + if (flatOptions.global) { + flatOptions.location = 'global' + } + }, + }), + // the globalconfig has its default defined outside of this module + globalconfig: new Definition('globalconfig', { + type: path, + default: '', + defaultDescription: ` + The global --prefix setting plus 'etc/npmrc'. For example, + '/usr/local/etc/npmrc' + `, + description: ` + The config file to read for global config options. + `, + flatten, + }), + 'global-style': new Definition('global-style', { + default: false, + type: Boolean, + description: ` + Only install direct dependencies in the top level \`node_modules\`, + but hoist on deeper dependencies. + Sets \`--install-strategy=shallow\`. + `, + deprecated: ` + This option has been deprecated in favor of \`--install-strategy=shallow\` + `, + flatten (key, obj, flatOptions) { + if (obj[key]) { + obj['install-strategy'] = 'shallow' + flatOptions.installStrategy = 'shallow' + } + }, + }), + heading: new Definition('heading', { + default: 'npm', + type: String, + description: ` + The string that starts all the debugging log output. + `, + flatten, + }), + 'https-proxy': new Definition('https-proxy', { + default: null, + type: [null, url], + description: ` + A proxy to use for outgoing https requests. If the \`HTTPS_PROXY\` or + \`https_proxy\` or \`HTTP_PROXY\` or \`http_proxy\` environment variables + are set, proxy settings will be honored by the underlying + \`make-fetch-happen\` library. + `, + flatten, + }), + 'if-present': new Definition('if-present', { + default: false, + type: Boolean, + envExport: false, + description: ` + If true, npm will not exit with an error code when \`run\` is + invoked for a script that isn't defined in the \`scripts\` section of + \`package.json\`. This option can be used when it's desirable to + optionally run a script when it's present and fail if the script fails. + This is useful, for example, when running scripts that may only apply for + some builds in an otherwise generic CI setup. + `, + flatten, + }), + 'ignore-scripts': new Definition('ignore-scripts', { + default: false, + type: Boolean, + description: ` + If true, npm does not run scripts specified in package.json files. + + Note that commands explicitly intended to run a particular script, such + as \`npm start\`, \`npm stop\`, \`npm restart\`, \`npm test\`, and \`npm + run\` will still run their intended script if \`ignore-scripts\` is + set, but they will *not* run any pre- or post-scripts. + `, + flatten, + }), + include: new Definition('include', { + default: [], + type: [Array, 'prod', 'dev', 'optional', 'peer'], + description: ` + Option that allows for defining which types of dependencies to install. + + This is the inverse of \`--omit=\`. + + Dependency types specified in \`--include\` will not be omitted, + regardless of the order in which omit/include are specified on the + command-line. + `, + flatten (key, obj, flatOptions) { + // just call the omit flattener, it reads from obj.include + definitions.omit.flatten('omit', obj, flatOptions) + }, + }), + 'include-staged': new Definition('include-staged', { + default: false, + type: Boolean, + description: ` + Allow installing "staged" published packages, as defined by [npm RFC PR + #92](https://github.com/npm/rfcs/pull/92). + + This is experimental, and not implemented by the npm public registry. + `, + flatten, + }), + 'include-workspace-root': new Definition('include-workspace-root', { + default: false, + type: Boolean, + envExport: false, + description: ` + Include the workspace root when workspaces are enabled for a command. + + When false, specifying individual workspaces via the \`workspace\` config, + or all workspaces via the \`workspaces\` flag, will cause npm to operate only + on the specified workspaces, and not on the root project. + `, + flatten, + }), + 'init-author-email': new Definition('init-author-email', { + default: '', + hint: '', + type: String, + description: ` + The value \`npm init\` should use by default for the package author's + email. + `, + }), + 'init-author-name': new Definition('init-author-name', { + default: '', + hint: '', + type: String, + description: ` + The value \`npm init\` should use by default for the package author's name. + `, + }), + 'init-author-url': new Definition('init-author-url', { + default: '', + type: ['', url], + hint: '', + description: ` + The value \`npm init\` should use by default for the package author's homepage. + `, + }), + 'init-license': new Definition('init-license', { + default: 'ISC', + hint: '', + type: String, + description: ` + The value \`npm init\` should use by default for the package license. + `, + }), + 'init-module': new Definition('init-module', { + default: '~/.npm-init.js', + type: path, + hint: '', + description: ` + A module that will be loaded by the \`npm init\` command. See the + documentation for the + [init-package-json](https://github.com/npm/init-package-json) module for + more information, or [npm init](/commands/npm-init). + `, + }), + 'init-type': new Definition('init-type', { + default: 'commonjs', + type: String, + hint: '', + description: ` + The value that \`npm init\` should use by default for the package.json type field. + `, + }), + 'init-version': new Definition('init-version', { + default: '1.0.0', + type: Semver, + hint: '', + description: ` + The value that \`npm init\` should use by default for the package + version number, if not already set in package.json. + `, + }), + 'init-private': new Definition('init-private', { + default: false, + type: Boolean, + description: ` + The value \`npm init\` should use by default for the package's private flag. + `, + flatten, + }), + // these "aliases" are historically supported in .npmrc files, unfortunately + // They should be removed in a future npm version. + 'init.author.email': new Definition('init.author.email', { + default: '', + type: String, + deprecated: ` + Use \`--init-author-email\` instead.`, + description: ` + Alias for \`--init-author-email\` + `, + }), + 'init.author.name': new Definition('init.author.name', { + default: '', + type: String, + deprecated: ` + Use \`--init-author-name\` instead. + `, + description: ` + Alias for \`--init-author-name\` + `, + }), + 'init.author.url': new Definition('init.author.url', { + default: '', + type: ['', url], + deprecated: ` + Use \`--init-author-url\` instead. + `, + description: ` + Alias for \`--init-author-url\` + `, + }), + 'init.license': new Definition('init.license', { + default: 'ISC', + type: String, + deprecated: ` + Use \`--init-license\` instead. + `, + description: ` + Alias for \`--init-license\` + `, + }), + 'init.module': new Definition('init.module', { + default: '~/.npm-init.js', + type: path, + deprecated: ` + Use \`--init-module\` instead. + `, + description: ` + Alias for \`--init-module\` + `, + }), + 'init.version': new Definition('init.version', { + default: '1.0.0', + type: Semver, + deprecated: ` + Use \`--init-version\` instead. + `, + description: ` + Alias for \`--init-version\` + `, + }), + 'install-links': new Definition('install-links', { + default: false, + type: Boolean, + description: ` + When set file: protocol dependencies will be packed and installed as + regular dependencies instead of creating a symlink. This option has + no effect on workspaces. + `, + flatten, + }), + 'install-strategy': new Definition('install-strategy', { + default: 'hoisted', + type: ['hoisted', 'nested', 'shallow', 'linked'], + description: ` + Sets the strategy for installing packages in node_modules. + hoisted (default): Install non-duplicated in top-level, and duplicated as + necessary within directory structure. + nested: (formerly --legacy-bundling) install in place, no hoisting. + shallow (formerly --global-style) only install direct deps at top-level. + linked: (experimental) install in node_modules/.store, link in place, + unhoisted. + `, + flatten, + }), + json: new Definition('json', { + default: false, + type: Boolean, + description: ` + Whether or not to output JSON data, rather than the normal output. + + * In \`npm pkg set\` it enables parsing set values with JSON.parse() + before saving them to your \`package.json\`. + + Not supported by all npm commands. + `, + flatten, + }), + key: new Definition('key', { + default: null, + type: [null, String], + description: ` + A client key to pass when accessing the registry. Values should be in + PEM format with newlines replaced by the string "\\n". For example: + + \`\`\`ini + key="-----BEGIN PRIVATE KEY-----\\nXXXX\\nXXXX\\n-----END PRIVATE KEY-----" + \`\`\` + + It is _not_ the path to a key file, though you can set a registry-scoped + "keyfile" path like "//other-registry.tld/:keyfile=/path/to/key.pem". + `, + deprecated: ` + \`key\` and \`cert\` are no longer used for most registry operations. + Use registry scoped \`keyfile\` and \`certfile\` instead. + Example: + //other-registry.tld/:keyfile=/path/to/key.pem + //other-registry.tld/:certfile=/path/to/cert.crt + `, + flatten, + }), + 'legacy-bundling': new Definition('legacy-bundling', { + default: false, + type: Boolean, + description: ` + Instead of hoisting package installs in \`node_modules\`, install packages + in the same manner that they are depended on. This may cause very deep + directory structures and duplicate package installs as there is no + de-duplicating. + Sets \`--install-strategy=nested\`. + `, + deprecated: ` + This option has been deprecated in favor of \`--install-strategy=nested\` + `, + flatten (key, obj, flatOptions) { + if (obj[key]) { + obj['install-strategy'] = 'nested' + flatOptions.installStrategy = 'nested' + } + }, + }), + 'legacy-peer-deps': new Definition('legacy-peer-deps', { + default: false, + type: Boolean, + description: ` + Causes npm to completely ignore \`peerDependencies\` when building a + package tree, as in npm versions 3 through 6. + + If a package cannot be installed because of overly strict + \`peerDependencies\` that collide, it provides a way to move forward + resolving the situation. + + This differs from \`--omit=peer\`, in that \`--omit=peer\` will avoid + unpacking \`peerDependencies\` on disk, but will still design a tree such + that \`peerDependencies\` _could_ be unpacked in a correct place. + + Use of \`legacy-peer-deps\` is not recommended, as it will not enforce + the \`peerDependencies\` contract that meta-dependencies may rely on. + `, + flatten, + }), + libc: new Definition('libc', { + default: null, + type: [null, String], + description: ` + Override libc of native modules to install. + Acceptable values are same as \`libc\` field of package.json + `, + flatten, + }), + link: new Definition('link', { + default: false, + type: Boolean, + description: ` + Used with \`npm ls\`, limiting output to only those packages that are + linked. + `, + }), + 'local-address': new Definition('local-address', { + default: null, + type: getLocalAddresses(), + typeDescription: 'IP Address', + description: ` + The IP address of the local interface to use when making connections to + the npm registry. Must be IPv4 in versions of Node prior to 0.12. + `, + flatten, + }), + location: new Definition('location', { + default: 'user', + short: 'L', + type: [ + 'global', + 'user', + 'project', + ], + defaultDescription: ` + "user" unless \`--global\` is passed, which will also set this value to "global" + `, + description: ` + When passed to \`npm config\` this refers to which config file to use. + + When set to "global" mode, packages are installed into the \`prefix\` folder + instead of the current working directory. See + [folders](/configuring-npm/folders) for more on the differences in behavior. + + * packages are installed into the \`{prefix}/lib/node_modules\` folder, + instead of the current working directory. + * bin files are linked to \`{prefix}/bin\` + * man pages are linked to \`{prefix}/share/man\` + `, + flatten: (key, obj, flatOptions) => { + flatten(key, obj, flatOptions) + if (flatOptions.global) { + flatOptions.location = 'global' + } + if (obj.location === 'global') { + flatOptions.global = true + } + }, + }), + 'lockfile-version': new Definition('lockfile-version', { + default: null, + type: [null, 1, 2, 3, '1', '2', '3'], + defaultDescription: ` + Version 3 if no lockfile, auto-converting v1 lockfiles to v3; otherwise, + maintain current lockfile version.`, + description: ` + Set the lockfile format version to be used in package-lock.json and + npm-shrinkwrap-json files. Possible options are: + + 1: The lockfile version used by npm versions 5 and 6. Lacks some data that + is used during the install, resulting in slower and possibly less + deterministic installs. Prevents lockfile churn when interoperating with + older npm versions. + + 2: The default lockfile version used by npm version 7 and 8. Includes both + the version 1 lockfile data and version 3 lockfile data, for maximum + determinism and interoperability, at the expense of more bytes on disk. + + 3: Only the new lockfile information introduced in npm version 7. Smaller + on disk than lockfile version 2, but not interoperable with older npm + versions. Ideal if all users are on npm version 7 and higher. + `, + flatten: (key, obj, flatOptions) => { + flatOptions.lockfileVersion = obj[key] && parseInt(obj[key], 10) + }, + }), + loglevel: new Definition('loglevel', { + default: 'notice', + type: [ + 'silent', + 'error', + 'warn', + 'notice', + 'http', + 'info', + 'verbose', + 'silly', + ], + description: ` + What level of logs to report. All logs are written to a debug log, + with the path to that file printed if the execution of a command fails. + + Any logs of a higher level than the setting are shown. The default is + "notice". + + See also the \`foreground-scripts\` config. + `, + flatten (key, obj, flatOptions) { + flatOptions.silent = obj[key] === 'silent' + }, + }), + 'logs-dir': new Definition('logs-dir', { + default: null, + type: [null, path], + defaultDescription: ` + A directory named \`_logs\` inside the cache + `, + description: ` + The location of npm's log directory. See [\`npm + logging\`](/using-npm/logging) for more information. + `, + }), + 'logs-max': new Definition('logs-max', { + default: 10, + type: Number, + description: ` + The maximum number of log files to store. + + If set to 0, no log files will be written for the current run. + `, + }), + long: new Definition('long', { + default: false, + type: Boolean, + short: 'l', + description: ` + Show extended information in \`ls\`, \`search\`, and \`help-search\`. + `, + }), + maxsockets: new Definition('maxsockets', { + default: 15, + type: Number, + description: ` + The maximum number of connections to use per origin (protocol/host/port + combination). + `, + flatten (key, obj, flatOptions) { + flatOptions.maxSockets = obj[key] + }, + }), + message: new Definition('message', { + default: '%s', + type: String, + short: 'm', + description: ` + Commit message which is used by \`npm version\` when creating version commit. + + Any "%s" in the message will be replaced with the version number. + `, + flatten, + }), + 'node-gyp': new Definition('node-gyp', { + default: (() => { + try { + return require.resolve('node-gyp/bin/node-gyp.js') + } catch { + return '' + } + })(), + defaultDescription: ` + The path to the node-gyp bin that ships with npm + `, + type: path, + description: ` + This is the location of the "node-gyp" bin. By default it uses one that ships with npm itself. + + You can use this config to specify your own "node-gyp" to run when it is required to build a package. + `, + flatten, + }), + 'node-options': new Definition('node-options', { + default: null, + type: [null, String], + description: ` + Options to pass through to Node.js via the \`NODE_OPTIONS\` environment + variable. This does not impact how npm itself is executed but it does + impact how lifecycle scripts are called. + `, + }), + noproxy: new Definition('noproxy', { + default: '', + defaultDescription: ` + The value of the NO_PROXY environment variable + `, + type: [String, Array], + description: ` + Domain extensions that should bypass any proxies. + + Also accepts a comma-delimited string. + `, + flatten (key, obj, flatOptions) { + if (Array.isArray(obj[key])) { + flatOptions.noProxy = obj[key].join(',') + } else { + flatOptions.noProxy = obj[key] + } + }, + }), + offline: new Definition('offline', { + default: false, + type: Boolean, + description: ` + Force offline mode: no network requests will be done during install. To allow + the CLI to fill in missing cache data, see \`--prefer-offline\`. + `, + flatten, + }), + omit: new Definition('omit', { + default: process.env.NODE_ENV === 'production' ? ['dev'] : [], + defaultDescription: ` + 'dev' if the \`NODE_ENV\` environment variable is set to 'production'; + otherwise, empty. + `, + type: [Array, 'dev', 'optional', 'peer'], + description: ` + Dependency types to omit from the installation tree on disk. + + Note that these dependencies _are_ still resolved and added to the + \`package-lock.json\` or \`npm-shrinkwrap.json\` file. They are just + not physically installed on disk. + + If a package type appears in both the \`--include\` and \`--omit\` + lists, then it will be included. + + If the resulting omit list includes \`'dev'\`, then the \`NODE_ENV\` + environment variable will be set to \`'production'\` for all lifecycle + scripts. + `, + flatten (key, obj, flatOptions) { + flatOptions.omit = buildOmitList(obj) + }, + }), + 'omit-lockfile-registry-resolved': new Definition('omit-lockfile-registry-resolved', { + default: false, + type: Boolean, + description: ` + This option causes npm to create lock files without a \`resolved\` key for + registry dependencies. Subsequent installs will need to resolve tarball + endpoints with the configured registry, likely resulting in a longer install + time. + `, + flatten, + }), + only: new Definition('only', { + default: null, + type: [null, 'prod', 'production'], + deprecated: ` + Use \`--omit=dev\` to omit dev dependencies from the install. + `, + description: ` + When set to \`prod\` or \`production\`, this is an alias for + \`--omit=dev\`. + `, + flatten (key, obj, flatOptions) { + definitions.omit.flatten('omit', obj, flatOptions) + }, + }), + optional: new Definition('optional', { + default: null, + type: [null, Boolean], + deprecated: ` + Use \`--omit=optional\` to exclude optional dependencies, or + \`--include=optional\` to include them. + + Default value does install optional deps unless otherwise omitted. + `, + description: ` + Alias for --include=optional or --omit=optional + `, + flatten (key, obj, flatOptions) { + definitions.omit.flatten('omit', obj, flatOptions) + }, + }), + os: new Definition('os', { + default: null, + type: [null, String], + description: ` + Override OS of native modules to install. + Acceptable values are same as \`os\` field of package.json, + which comes from \`process.platform\`. + `, + flatten, + }), + otp: new Definition('otp', { + default: null, + type: [null, String], + description: ` + This is a one-time password from a two-factor authenticator. It's needed + when publishing or changing package permissions with \`npm access\`. + + If not set, and a registry response fails with a challenge for a one-time + password, npm will prompt on the command line for one. + `, + flatten (key, obj, flatOptions) { + flatten(key, obj, flatOptions) + if (obj.otp) { + obj['auth-type'] = 'legacy' + flatten('auth-type', obj, flatOptions) + } + }, + }), + package: new Definition('package', { + default: [], + hint: '', + type: [String, Array], + description: ` + The package or packages to install for [\`npm exec\`](/commands/npm-exec) + `, + flatten, + }), + 'package-lock': new Definition('package-lock', { + default: true, + type: Boolean, + description: ` + If set to false, then ignore \`package-lock.json\` files when installing. + This will also prevent _writing_ \`package-lock.json\` if \`save\` is + true. + `, + flatten: (key, obj, flatOptions) => { + flatten(key, obj, flatOptions) + if (flatOptions.packageLockOnly) { + flatOptions.packageLock = true + } + }, + }), + 'package-lock-only': new Definition('package-lock-only', { + default: false, + type: Boolean, + description: ` + If set to true, the current operation will only use the \`package-lock.json\`, + ignoring \`node_modules\`. + + For \`update\` this means only the \`package-lock.json\` will be updated, + instead of checking \`node_modules\` and downloading dependencies. + + For \`list\` this means the output will be based on the tree described by the + \`package-lock.json\`, rather than the contents of \`node_modules\`. + `, + flatten: (key, obj, flatOptions) => { + flatten(key, obj, flatOptions) + if (flatOptions.packageLockOnly) { + flatOptions.packageLock = true + } + }, + }), + 'pack-destination': new Definition('pack-destination', { + default: '.', + type: String, + description: ` + Directory in which \`npm pack\` will save tarballs. + `, + flatten, + }), + parseable: new Definition('parseable', { + default: false, + type: Boolean, + short: 'p', + description: ` + Output parseable results from commands that write to standard output. For + \`npm search\`, this will be tab-separated table format. + `, + flatten, + }), + 'prefer-dedupe': new Definition('prefer-dedupe', { + default: false, + type: Boolean, + description: ` + Prefer to deduplicate packages if possible, rather than + choosing a newer version of a dependency. + `, + flatten, + }), + 'prefer-offline': new Definition('prefer-offline', { + default: false, + type: Boolean, + description: ` + If true, staleness checks for cached data will be bypassed, but missing + data will be requested from the server. To force full offline mode, use + \`--offline\`. + `, + flatten, + }), + 'prefer-online': new Definition('prefer-online', { + default: false, + type: Boolean, + description: ` + If true, staleness checks for cached data will be forced, making the CLI + look for updates immediately even for fresh package data. + `, + flatten, + }), + // `prefix` has its default defined outside of this module + prefix: new Definition('prefix', { + type: path, + short: 'C', + default: '', + defaultDescription: ` + In global mode, the folder where the node executable is installed. + Otherwise, the nearest parent folder containing either a package.json + file or a node_modules folder. + `, + description: ` + The location to install global items. If set on the command line, then + it forces non-global commands to run in the specified folder. + `, + }), + preid: new Definition('preid', { + default: '', + hint: 'prerelease-id', + type: String, + description: ` + The "prerelease identifier" to use as a prefix for the "prerelease" part + of a semver. Like the \`rc\` in \`1.2.0-rc.8\`. + `, + flatten, + }), + production: new Definition('production', { + default: null, + type: [null, Boolean], + deprecated: 'Use `--omit=dev` instead.', + description: 'Alias for `--omit=dev`', + flatten (key, obj, flatOptions) { + definitions.omit.flatten('omit', obj, flatOptions) + }, + }), + progress: new Definition('progress', { + default: !(ciInfo.isCI || !process.stderr.isTTY || !process.stdout.isTTY || process.env.TERM === 'dumb'), + defaultDescription: ` + \`true\` when not in CI and both stderr and stdout are TTYs and not in a dumb terminal + `, + type: Boolean, + description: ` + When set to \`true\`, npm will display a progress bar during time + intensive operations, if \`process.stderr\` and \`process.stdout\` are a TTY. + + Set to \`false\` to suppress the progress bar. + `, + flatten (key, obj, flatOptions) { + // Only show progress if explicitly enabled AND we have proper TTY environment + flatOptions.progress = !!obj.progress && !!process.stderr.isTTY && !!process.stdout.isTTY && process.env.TERM !== 'dumb' + }, + }), + provenance: new Definition('provenance', { + default: false, + type: Boolean, + exclusive: ['provenance-file'], + description: ` + When publishing from a supported cloud CI/CD system, the package will be + publicly linked to where it was built and published from. + `, + flatten, + }), + 'provenance-file': new Definition('provenance-file', { + default: null, + type: path, + hint: '', + exclusive: ['provenance'], + description: ` + When publishing, the provenance bundle at the given path will be used. + `, + flatten, + }), + proxy: new Definition('proxy', { + default: null, + type: [null, false, url], // allow proxy to be disabled explicitly + description: ` + A proxy to use for outgoing http requests. If the \`HTTP_PROXY\` or + \`http_proxy\` environment variables are set, proxy settings will be + honored by the underlying \`request\` library. + `, + flatten, + }), + 'read-only': new Definition('read-only', { + default: false, + type: Boolean, + description: ` + This is used to mark a token as unable to publish when configuring + limited access tokens with the \`npm token create\` command. + `, + flatten, + }), + 'rebuild-bundle': new Definition('rebuild-bundle', { + default: true, + type: Boolean, + description: ` + Rebuild bundled dependencies after installation. + `, + flatten, + }), + registry: new Definition('registry', { + default: 'https://registry.npmjs.org/', + type: url, + description: ` + The base URL of the npm registry. + `, + flatten, + }), + 'replace-registry-host': new Definition('replace-registry-host', { + default: 'npmjs', + hint: ' | hostname', + type: ['npmjs', 'never', 'always', String], + description: ` + Defines behavior for replacing the registry host in a lockfile with the + configured registry. + + The default behavior is to replace package dist URLs from the default + registry (https://registry.npmjs.org) to the configured registry. If set to + "never", then use the registry value. If set to "always", then replace the + registry host with the configured host every time. + + You may also specify a bare hostname (e.g., "registry.npmjs.org"). + `, + flatten, + }), + save: new Definition('save', { + default: true, + defaultDescription: `\`true\` unless when using \`npm update\` where it + defaults to \`false\``, + usage: '-S|--save|--no-save|--save-prod|--save-dev|--save-optional|--save-peer|--save-bundle', + type: Boolean, + short: 'S', + description: ` + Save installed packages to a \`package.json\` file as dependencies. + + When used with the \`npm rm\` command, removes the dependency from + \`package.json\`. + + Will also prevent writing to \`package-lock.json\` if set to \`false\`. + `, + flatten, + }), + 'save-bundle': new Definition('save-bundle', { + default: false, + type: Boolean, + short: 'B', + description: ` + If a package would be saved at install time by the use of \`--save\`, + \`--save-dev\`, or \`--save-optional\`, then also put it in the + \`bundleDependencies\` list. + + Ignored if \`--save-peer\` is set, since peerDependencies cannot be bundled. + `, + flatten (key, obj, flatOptions) { + // XXX update arborist to just ignore it if resulting saveType is peer + // otherwise this won't have the expected effect: + // + // npm config set save-peer true + // npm i foo --save-bundle --save-prod <-- should bundle + flatOptions.saveBundle = obj['save-bundle'] && !obj['save-peer'] + }, + }), + 'save-dev': new Definition('save-dev', { + default: false, + type: Boolean, + short: 'D', + exclusive: ['save-optional', 'save-peer', 'save-prod'], + description: ` + Save installed packages to a package.json file as \`devDependencies\`. + `, + flatten (key, obj, flatOptions) { + if (!obj[key]) { + if (flatOptions.saveType === 'dev') { + delete flatOptions.saveType + } + return + } + + flatOptions.saveType = 'dev' + }, + }), + 'save-exact': new Definition('save-exact', { + default: false, + type: Boolean, + short: 'E', + description: ` + Dependencies saved to package.json will be configured with an exact + version rather than using npm's default semver range operator. + `, + flatten (key, obj, flatOptions) { + // just call the save-prefix flattener, it reads from obj['save-exact'] + definitions['save-prefix'].flatten('save-prefix', obj, flatOptions) + }, + }), + 'save-optional': new Definition('save-optional', { + default: false, + type: Boolean, + short: 'O', + exclusive: ['save-dev', 'save-peer', 'save-prod'], + description: ` + Save installed packages to a package.json file as + \`optionalDependencies\`. + `, + flatten (key, obj, flatOptions) { + if (!obj[key]) { + if (flatOptions.saveType === 'optional') { + delete flatOptions.saveType + } else if (flatOptions.saveType === 'peerOptional') { + flatOptions.saveType = 'peer' + } + return + } + + if (flatOptions.saveType === 'peerOptional') { + return + } + + if (flatOptions.saveType === 'peer') { + flatOptions.saveType = 'peerOptional' + } else { + flatOptions.saveType = 'optional' + } + }, + }), + 'save-peer': new Definition('save-peer', { + default: false, + type: Boolean, + exclusive: ['save-dev', 'save-optional', 'save-prod'], + description: ` + Save installed packages to a package.json file as \`peerDependencies\` + `, + flatten (key, obj, flatOptions) { + if (!obj[key]) { + if (flatOptions.saveType === 'peer') { + delete flatOptions.saveType + } else if (flatOptions.saveType === 'peerOptional') { + flatOptions.saveType = 'optional' + } + return + } + + if (flatOptions.saveType === 'peerOptional') { + return + } + + if (flatOptions.saveType === 'optional') { + flatOptions.saveType = 'peerOptional' + } else { + flatOptions.saveType = 'peer' + } + }, + }), + 'save-prefix': new Definition('save-prefix', { + default: '^', + type: String, + description: ` + Configure how versions of packages installed to a package.json file via + \`--save\` or \`--save-dev\` get prefixed. + + For example if a package has version \`1.2.3\`, by default its version is + set to \`^1.2.3\` which allows minor upgrades for that package, but after + \`npm config set save-prefix='~'\` it would be set to \`~1.2.3\` which + only allows patch upgrades. + `, + flatten (key, obj, flatOptions) { + flatOptions.savePrefix = obj['save-exact'] ? '' : obj['save-prefix'] + obj['save-prefix'] = flatOptions.savePrefix + }, + }), + 'save-prod': new Definition('save-prod', { + default: false, + type: Boolean, + short: 'P', + exclusive: ['save-dev', 'save-optional', 'save-peer'], + description: ` + Save installed packages into \`dependencies\` specifically. This is + useful if a package already exists in \`devDependencies\` or + \`optionalDependencies\`, but you want to move it to be a non-optional + production dependency. + + This is the default behavior if \`--save\` is true, and neither + \`--save-dev\` or \`--save-optional\` are true. + `, + flatten (key, obj, flatOptions) { + if (!obj[key]) { + if (flatOptions.saveType === 'prod') { + delete flatOptions.saveType + } + return + } + + flatOptions.saveType = 'prod' + }, + }), + 'sbom-format': new Definition('sbom-format', { + default: null, + type: [ + 'cyclonedx', + 'spdx', + ], + description: ` + SBOM format to use when generating SBOMs. + `, + flatten, + }), + 'sbom-type': new Definition('sbom-type', { + default: 'library', + type: [ + 'library', + 'application', + 'framework', + ], + description: ` + The type of package described by the generated SBOM. For SPDX, this is the + value for the \`primaryPackagePurpose\` field. For CycloneDX, this is the + value for the \`type\` field. + `, + flatten, + }), + scope: new Definition('scope', { + default: '', + defaultDescription: ` + the scope of the current project, if any, or "" + `, + type: String, + hint: '<@scope>', + description: ` + Associate an operation with a scope for a scoped registry. + + Useful when logging in to or out of a private registry: + + \`\`\` + # log in, linking the scope to the custom registry + npm login --scope=@mycorp --registry=https://registry.mycorp.com + + # log out, removing the link and the auth token + npm logout --scope=@mycorp + \`\`\` + + This will cause \`@mycorp\` to be mapped to the registry for future + installation of packages specified according to the pattern + \`@mycorp/package\`. + + This will also cause \`npm init\` to create a scoped package. + + \`\`\` + # accept all defaults, and create a package named "@foo/whatever", + # instead of just named "whatever" + npm init --scope=@foo --yes + \`\`\` + `, + flatten (key, obj, flatOptions) { + const value = obj[key] + const scope = value && !/^@/.test(value) ? `@${value}` : value + flatOptions.scope = scope + // projectScope is kept for compatibility with npm-registry-fetch + flatOptions.projectScope = scope + }, + }), + 'script-shell': new Definition('script-shell', { + default: null, + defaultDescription: ` + '/bin/sh' on POSIX systems, 'cmd.exe' on Windows + `, + type: [null, String], + description: ` + The shell to use for scripts run with the \`npm exec\`, + \`npm run\` and \`npm init \` commands. + `, + flatten (key, obj, flatOptions) { + flatOptions.scriptShell = obj[key] || undefined + }, + }), + searchexclude: new Definition('searchexclude', { + default: '', + type: String, + description: ` + Space-separated options that limit the results from search. + `, + flatten (key, obj, flatOptions) { + flatOptions.search = flatOptions.search || { limit: 20 } + flatOptions.search.exclude = obj[key].toLowerCase() + }, + }), + searchlimit: new Definition('searchlimit', { + default: 20, + type: Number, + description: ` + Number of items to limit search results to. Will not apply at all to + legacy searches. + `, + flatten (key, obj, flatOptions) { + flatOptions.search = flatOptions.search || {} + flatOptions.search.limit = obj[key] + }, + }), + searchopts: new Definition('searchopts', { + default: '', + type: String, + description: ` + Space-separated options that are always passed to search. + `, + flatten (key, obj, flatOptions) { + flatOptions.search = flatOptions.search || { limit: 20 } + flatOptions.search.opts = querystring.parse(obj[key]) + }, + }), + searchstaleness: new Definition('searchstaleness', { + default: 15 * 60, + type: Number, + description: ` + The age of the cache, in seconds, before another registry request is made + if using legacy search endpoint. + `, + flatten (key, obj, flatOptions) { + flatOptions.search = flatOptions.search || { limit: 20 } + flatOptions.search.staleness = obj[key] + }, + }), + shell: new Definition('shell', { + default: shell, + defaultDescription: ` + SHELL environment variable, or "bash" on Posix, or "cmd.exe" on Windows + `, + type: String, + description: ` + The shell to run for the \`npm explore\` command. + `, + flatten, + }), + shrinkwrap: new Definition('shrinkwrap', { + default: true, + type: Boolean, + deprecated: ` + Use the --package-lock setting instead. + `, + description: ` + Alias for --package-lock + `, + flatten (key, obj, flatOptions) { + obj['package-lock'] = obj.shrinkwrap + definitions['package-lock'].flatten('package-lock', obj, flatOptions) + }, + }), + 'sign-git-commit': new Definition('sign-git-commit', { + default: false, + type: Boolean, + description: ` + If set to true, then the \`npm version\` command will commit the new + package version using \`-S\` to add a signature. + + Note that git requires you to have set up GPG keys in your git configs + for this to work properly. + `, + flatten, + }), + 'sign-git-tag': new Definition('sign-git-tag', { + default: false, + type: Boolean, + description: ` + If set to true, then the \`npm version\` command will tag the version + using \`-s\` to add a signature. + + Note that git requires you to have set up GPG keys in your git configs + for this to work properly. + `, + flatten, + }), + 'strict-peer-deps': new Definition('strict-peer-deps', { + default: false, + type: Boolean, + description: ` + If set to \`true\`, and \`--legacy-peer-deps\` is not set, then _any_ + conflicting \`peerDependencies\` will be treated as an install failure, + even if npm could reasonably guess the appropriate resolution based on + non-peer dependency relationships. + + By default, conflicting \`peerDependencies\` deep in the dependency graph + will be resolved using the nearest non-peer dependency specification, + even if doing so will result in some packages receiving a peer dependency + outside the range set in their package's \`peerDependencies\` object. + + When such an override is performed, a warning is printed, explaining the + conflict and the packages involved. If \`--strict-peer-deps\` is set, + then this warning is treated as a failure. + `, + flatten, + }), + 'strict-ssl': new Definition('strict-ssl', { + default: true, + type: Boolean, + description: ` + Whether or not to do SSL key validation when making requests to the + registry via https. + + See also the \`ca\` config. + `, + flatten (key, obj, flatOptions) { + flatOptions.strictSSL = obj[key] + }, + }), + tag: new Definition('tag', { + default: 'latest', + type: String, + description: ` + If you ask npm to install a package and don't tell it a specific version, + then it will install the specified tag. + + It is the tag added to the package@version specified in the + \`npm dist-tag add\` command, if no explicit tag is given. + + When used by the \`npm diff\` command, this is the tag used to fetch the + tarball that will be compared with the local files by default. + + If used in the \`npm publish\` command, this is the tag that will be + added to the package submitted to the registry. + `, + flatten (key, obj, flatOptions) { + flatOptions.defaultTag = obj[key] + }, + }), + 'tag-version-prefix': new Definition('tag-version-prefix', { + default: 'v', + type: String, + description: ` + If set, alters the prefix used when tagging a new version when performing + a version increment using \`npm version\`. To remove the prefix + altogether, set it to the empty string: \`""\`. + + Because other tools may rely on the convention that npm version tags look + like \`v1.0.0\`, _only use this property if it is absolutely necessary_. + In particular, use care when overriding this setting for public packages. + `, + flatten, + }), + timing: new Definition('timing', { + default: false, + type: Boolean, + description: ` + If true, writes timing information to a process specific json file in + the cache or \`logs-dir\`. The file name ends with \`-timing.json\`. + + You can quickly view it with this [json](https://npm.im/json) command + line: \`cat ~/.npm/_logs/*-timing.json | npm exec -- json -g\`. + + Timing information will also be reported in the terminal. To suppress this + while still writing the timing file, use \`--silent\`. + `, + }), + umask: new Definition('umask', { + default: 0, + type: Umask, + description: ` + The "umask" value to use when setting the file creation mode on files and + folders. + + Folders and executables are given a mode which is \`0o777\` masked + against this value. Other files are given a mode which is \`0o666\` + masked against this value. + + Note that the underlying system will _also_ apply its own umask value to + files and folders that are created, and npm does not circumvent this, but + rather adds the \`--umask\` config to it. + + Thus, the effective default umask value on most POSIX systems is 0o22, + meaning that folders and executables are created with a mode of 0o755 and + other files are created with a mode of 0o644. + `, + flatten, + }), + unicode: new Definition('unicode', { + default: unicode, + defaultDescription: ` + false on windows, true on mac/unix systems with a unicode locale, as + defined by the \`LC_ALL\`, \`LC_CTYPE\`, or \`LANG\` environment variables. + `, + type: Boolean, + description: ` + When set to true, npm uses unicode characters in the tree output. When + false, it uses ascii characters instead of unicode glyphs. + `, + flatten, + }), + 'update-notifier': new Definition('update-notifier', { + default: true, + type: Boolean, + description: ` + Set to false to suppress the update notification when using an older + version of npm than the latest. + `, + }), + usage: new Definition('usage', { + default: false, + type: Boolean, + short: ['?', 'H', 'h'], + description: ` + Show short usage output about the command specified. + `, + }), + 'user-agent': new Definition('user-agent', { + default: 'npm/{npm-version} ' + + 'node/{node-version} ' + + '{platform} ' + + '{arch} ' + + 'workspaces/{workspaces} ' + + '{ci}', + type: String, + description: ` + Sets the User-Agent request header. The following fields are replaced + with their actual counterparts: + + * \`{npm-version}\` - The npm version in use + * \`{node-version}\` - The Node.js version in use + * \`{platform}\` - The value of \`process.platform\` + * \`{arch}\` - The value of \`process.arch\` + * \`{workspaces}\` - Set to \`true\` if the \`workspaces\` or \`workspace\` + options are set. + * \`{ci}\` - The value of the \`ci-name\` config, if set, prefixed with + \`ci/\`, or an empty string if \`ci-name\` is empty. + `, + flatten (key, obj, flatOptions) { + const value = obj[key] + const ciName = ciInfo.name?.toLowerCase().split(' ').join('-') || null + let inWorkspaces = false + if (obj.workspaces || obj.workspace && obj.workspace.length) { + inWorkspaces = true + } + flatOptions.userAgent = + value.replace(/\{node-version\}/gi, process.version) + .replace(/\{npm-version\}/gi, obj['npm-version']) + .replace(/\{platform\}/gi, process.platform) + .replace(/\{arch\}/gi, process.arch) + .replace(/\{workspaces\}/gi, inWorkspaces) + .replace(/\{ci\}/gi, ciName ? `ci/${ciName}` : '') + .trim() + + // We can't clobber the original or else subsequent flattening will fail + // (i.e. when we change the underlying config values) + // obj[key] = flatOptions.userAgent + + // user-agent is a unique kind of config item that gets set from a template + // and ends up translated. Because of this, the normal "should we set this + // to process.env also doesn't work + process.env.npm_config_user_agent = flatOptions.userAgent + }, + }), + userconfig: new Definition('userconfig', { + default: '~/.npmrc', + type: path, + description: ` + The location of user-level configuration settings. + + This may be overridden by the \`npm_config_userconfig\` environment + variable or the \`--userconfig\` command line option, but may _not_ + be overridden by settings in the \`globalconfig\` file. + `, + }), + version: new Definition('version', { + default: false, + type: Boolean, + short: 'v', + description: ` + If true, output the npm version and exit successfully. + + Only relevant when specified explicitly on the command line. + `, + }), + versions: new Definition('versions', { + default: false, + type: Boolean, + description: ` + If true, output the npm version as well as node's \`process.versions\` + map and the version in the current working directory's \`package.json\` + file if one exists, and exit successfully. + + Only relevant when specified explicitly on the command line. + `, + }), + viewer: new Definition('viewer', { + default: isWindows ? 'browser' : 'man', + defaultDescription: ` + "man" on Posix, "browser" on Windows + `, + type: String, + description: ` + The program to use to view help content. + + Set to \`"browser"\` to view html help content in the default web browser. + `, + }), + which: new Definition('which', { + default: null, + hint: '', + type: [null, Number], + description: ` + If there are multiple funding sources, which 1-indexed source URL to open. + `, + }), + workspace: new Definition('workspace', { + default: [], + type: [String, Array], + hint: '', + short: 'w', + envExport: false, + description: ` + Enable running a command in the context of the configured workspaces of the + current project while filtering by running only the workspaces defined by + this configuration option. + + Valid values for the \`workspace\` config are either: + + * Workspace names + * Path to a workspace directory + * Path to a parent workspace directory (will result in selecting all + workspaces within that folder) + + When set for the \`npm init\` command, this may be set to the folder of + a workspace which does not yet exist, to create the folder and set it + up as a brand new workspace within the project. + `, + flatten: (key, obj, flatOptions) => { + definitions['user-agent'].flatten('user-agent', obj, flatOptions) + }, + }), + workspaces: new Definition('workspaces', { + default: null, + type: [null, Boolean], + envExport: false, + description: ` + Set to true to run the command in the context of **all** configured + workspaces. + + Explicitly setting this to false will cause commands like \`install\` to + ignore workspaces altogether. + When not set explicitly: + + - Commands that operate on the \`node_modules\` tree (install, update, + etc.) will link workspaces into the \`node_modules\` folder. + - Commands that do other things (test, exec, publish, etc.) will operate + on the root project, _unless_ one or more workspaces are specified in + the \`workspace\` config. + `, + flatten: (key, obj, flatOptions) => { + definitions['user-agent'].flatten('user-agent', obj, flatOptions) + + // TODO: this is a derived value, and should be reworked when we have a + // pattern for derived value + + // workspacesEnabled is true whether workspaces is null or true + // commands contextually work with workspaces or not regardless of + // configuration, so we need an option specifically to disable workspaces + flatOptions.workspacesEnabled = obj[key] !== false + }, + }), + 'workspaces-update': new Definition('workspaces-update', { + default: true, + type: Boolean, + description: ` + If set to true, the npm cli will run an update after operations that may + possibly change the workspaces installed to the \`node_modules\` folder. + `, + flatten, + }), + yes: new Definition('yes', { + default: null, + type: [null, Boolean], + short: 'y', + description: ` + Automatically answer "yes" to any prompts that npm might print on + the command line. + `, + }), +} + +module.exports = definitions diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/lib/definitions/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/lib/definitions/index.js new file mode 100644 index 0000000000000000000000000000000000000000..793b71ea40d6f76ee5c6897ba43fd32ea6856056 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/lib/definitions/index.js @@ -0,0 +1,80 @@ +const definitions = require('./definitions.js') + +// use the defined flattening function, and copy over any scoped +// registries and registry-specific "nerfdart" configs verbatim +// +// TODO: make these getters so that we only have to make dirty +// the thing that changed, and then flatten the fields that +// could have changed when a config.set is called. +// +// TODO: move nerfdart auth stuff into a nested object that +// is only passed along to paths that end up calling npm-registry-fetch. +const flatten = (obj, flat = {}) => { + for (const [key, val] of Object.entries(obj)) { + const def = definitions[key] + if (def && def.flatten) { + def.flatten(key, obj, flat) + } else if (/@.*:registry$/i.test(key) || /^\/\//.test(key)) { + flat[key] = val + } + } + return flat +} + +const definitionProps = Object.entries(definitions) + .reduce((acc, [key, { short = [], default: d }]) => { + // can be either an array or string + for (const s of [].concat(short)) { + acc.shorthands[s] = [`--${key}`] + } + acc.defaults[key] = d + return acc + }, { shorthands: {}, defaults: {} }) + +// aliases where they get expanded into a completely different thing +// these are NOT supported in the environment or npmrc files, only +// expanded on the CLI. +// TODO: when we switch off of nopt, use an arg parser that supports +// more reasonable aliasing and short opts right in the definitions set. +const shorthands = { + 'enjoy-by': ['--before'], + d: ['--loglevel', 'info'], + dd: ['--loglevel', 'verbose'], + ddd: ['--loglevel', 'silly'], + quiet: ['--loglevel', 'warn'], + q: ['--loglevel', 'warn'], + s: ['--loglevel', 'silent'], + silent: ['--loglevel', 'silent'], + verbose: ['--loglevel', 'verbose'], + desc: ['--description'], + help: ['--usage'], + local: ['--no-global'], + n: ['--no-yes'], + no: ['--no-yes'], + porcelain: ['--parseable'], + readonly: ['--read-only'], + reg: ['--registry'], + iwr: ['--include-workspace-root'], + ws: ['--workspaces'], + ...definitionProps.shorthands, +} + +// These are the configs that we can nerf-dart. Only _auth even has a config definition so we have to explicitly validate them here. +// This is used to validate during "npm config set" and to not warn on loading unknown configs when we see these. +const nerfDarts = [ + '_auth', // Has a config + '_authToken', // Does not have a config + '_password', // Does not have a config + 'certfile', // Does not have a config + 'email', // Does not have a config + 'keyfile', // Does not have a config + 'username', // Does not have a config +] + +module.exports = { + defaults: definitionProps.defaults, + definitions, + flatten, + nerfDarts, + shorthands, +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/lib/env-replace.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/lib/env-replace.js new file mode 100644 index 0000000000000000000000000000000000000000..c347be480ed688e474cf01bfc740c8f26155436f --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/lib/env-replace.js @@ -0,0 +1,16 @@ +// replace any ${ENV} values with the appropriate environ. +// optional "?" modifier can be used like this: ${ENV?} so in case of the variable being not defined, it evaluates into empty string. + +const envExpr = /(? f.replace(envExpr, (orig, esc, name, modifier) => { + const fallback = modifier === '?' ? '' : `$\{${name}}` + const val = env[name] !== undefined ? env[name] : fallback + + // consume the escape chars that are relevant. + if (esc.length % 2) { + return orig.slice((esc.length + 1) / 2) + } + + return (esc.slice(esc.length / 2)) + val +}) diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/lib/errors.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/lib/errors.js new file mode 100644 index 0000000000000000000000000000000000000000..6161509108ff01ea5dc1fb88ea06b9ad7b08610e --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/lib/errors.js @@ -0,0 +1,23 @@ +'use strict' + +class ErrInvalidAuth extends Error { + constructor (problems) { + let message = 'Invalid auth configuration found: ' + message += problems.map((problem) => { + // istanbul ignore else + if (problem.action === 'delete') { + return `\`${problem.key}\` is not allowed in ${problem.where} config` + } else if (problem.action === 'rename') { + return `\`${problem.from}\` must be renamed to \`${problem.to}\` in ${problem.where} config` + } + }).join(', ') + message += '\nPlease run `npm config fix` to repair your configuration.`' + super(message) + this.code = 'ERR_INVALID_AUTH' + this.problems = problems + } +} + +module.exports = { + ErrInvalidAuth, +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/lib/index.js new file mode 100644 index 0000000000000000000000000000000000000000..b56c461991c5c01e57993aa8c9b013e76ca122ec --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/lib/index.js @@ -0,0 +1,977 @@ +// TODO: set the scope config from package.json or explicit cli config +const { walkUp } = require('walk-up-path') +const ini = require('ini') +const nopt = require('nopt') +const { log, time } = require('proc-log') + +const { resolve, dirname, join } = require('node:path') +const { homedir } = require('node:os') +const { + readFile, + writeFile, + chmod, + unlink, + stat, + mkdir, +} = require('node:fs/promises') + +// TODO global-prefix and local-prefix are set by lib/set-envs.js. This may not be the best way to persist those, if we even want to persist them (see set-envs.js) +const internalEnv = [ + 'npm-version', + 'global-prefix', + 'local-prefix', +] + +const fileExists = (...p) => stat(resolve(...p)) + .then((st) => st.isFile()) + .catch(() => false) + +const dirExists = (...p) => stat(resolve(...p)) + .then((st) => st.isDirectory()) + .catch(() => false) + +const hasOwnProperty = (obj, key) => + Object.prototype.hasOwnProperty.call(obj, key) + +const typeDefs = require('./type-defs.js') +const nerfDart = require('./nerf-dart.js') +const envReplace = require('./env-replace.js') +const parseField = require('./parse-field.js') +const setEnvs = require('./set-envs.js') + +// types that can be saved back to +const confFileTypes = new Set([ + 'global', + 'user', + 'project', +]) + +const confTypes = new Set([ + 'default', + 'builtin', + ...confFileTypes, + 'env', + 'cli', +]) + +class Config { + #loaded = false + #flatten + // populated the first time we flatten the object + #flatOptions = null + + static get typeDefs () { + return typeDefs + } + + constructor ({ + definitions, + shorthands, + flatten, + nerfDarts = [], + npmPath, + + // options just to override in tests, mostly + env = process.env, + argv = process.argv, + platform = process.platform, + execPath = process.execPath, + cwd = process.cwd(), + excludeNpmCwd = false, + }) { + this.nerfDarts = nerfDarts + this.definitions = definitions + // turn the definitions into nopt's weirdo syntax + const types = {} + const defaults = {} + this.deprecated = {} + for (const [key, def] of Object.entries(definitions)) { + defaults[key] = def.default + types[key] = def.type + if (def.deprecated) { + this.deprecated[key] = def.deprecated.trim().replace(/\n +/, '\n') + } + } + + this.#flatten = flatten + this.types = types + this.shorthands = shorthands + this.defaults = defaults + + this.npmPath = npmPath + this.npmBin = join(this.npmPath, 'bin/npm-cli.js') + this.argv = argv + this.env = env + this.execPath = execPath + this.platform = platform + this.cwd = cwd + this.excludeNpmCwd = excludeNpmCwd + + // set when we load configs + this.globalPrefix = null + this.localPrefix = null + this.localPackage = null + + // defaults to env.HOME, but will always be *something* + this.home = null + + // set up the prototype chain of config objects + const wheres = [...confTypes] + this.data = new Map() + let parent = null + for (const where of wheres) { + this.data.set(where, parent = new ConfigData(parent)) + } + + this.data.set = () => { + throw new Error('cannot change internal config data structure') + } + this.data.delete = () => { + throw new Error('cannot change internal config data structure') + } + + this.sources = new Map([]) + + this.list = [] + for (const { data } of this.data.values()) { + this.list.unshift(data) + } + Object.freeze(this.list) + + this.#loaded = false + } + + get loaded () { + return this.#loaded + } + + get prefix () { + return this.#get('global') ? this.globalPrefix : this.localPrefix + } + + // return the location where key is found. + find (key) { + if (!this.loaded) { + throw new Error('call config.load() before reading values') + } + + // have to look in reverse order + const entries = [...this.data.entries()] + for (let i = entries.length - 1; i > -1; i--) { + const [where, { data }] = entries[i] + if (hasOwnProperty(data, key)) { + return where + } + } + return null + } + + get (key, where) { + if (!this.loaded) { + throw new Error('call config.load() before reading values') + } + return this.#get(key, where) + } + + // we need to get values sometimes, so use this internal one to do so + // while in the process of loading. + #get (key, where = null) { + if (where !== null && !confTypes.has(where)) { + throw new Error('invalid config location param: ' + where) + } + const { data } = this.data.get(where || 'cli') + return where === null || hasOwnProperty(data, key) ? data[key] : undefined + } + + set (key, val, where = 'cli') { + if (!this.loaded) { + throw new Error('call config.load() before setting values') + } + if (!confTypes.has(where)) { + throw new Error('invalid config location param: ' + where) + } + this.#checkDeprecated(key) + const { data, raw } = this.data.get(where) + data[key] = val + if (['global', 'user', 'project'].includes(where)) { + raw[key] = val + } + + // this is now dirty, the next call to this.valid will have to check it + this.data.get(where)[_valid] = null + + // the flat options are invalidated, regenerate next time they're needed + this.#flatOptions = null + } + + get flat () { + if (this.#flatOptions) { + return this.#flatOptions + } + + // create the object for flat options passed to deps + const timeEnd = time.start('config:load:flatten') + this.#flatOptions = {} + // walk from least priority to highest + for (const { data } of this.data.values()) { + this.#flatten(data, this.#flatOptions) + } + this.#flatOptions.nodeBin = this.execPath + this.#flatOptions.npmBin = this.npmBin + timeEnd() + + return this.#flatOptions + } + + delete (key, where = 'cli') { + if (!this.loaded) { + throw new Error('call config.load() before deleting values') + } + if (!confTypes.has(where)) { + throw new Error('invalid config location param: ' + where) + } + const { data, raw } = this.data.get(where) + delete data[key] + if (['global', 'user', 'project'].includes(where)) { + delete raw[key] + } + } + + async load () { + if (this.loaded) { + throw new Error('attempting to load npm config multiple times') + } + + // first load the defaults, which sets the global prefix + this.loadDefaults() + + // next load the builtin config, as this sets new effective defaults + await this.loadBuiltinConfig() + + // cli and env are not async, and can set the prefix, relevant to project + this.loadCLI() + this.loadEnv() + + // next project config, which can affect userconfig location + await this.loadProjectConfig() + + // then user config, which can affect globalconfig location + await this.loadUserConfig() + + // last but not least, global config file + await this.loadGlobalConfig() + + // set this before calling setEnvs, so that we don't have to share + // private attributes, as that module also does a bunch of get operations + this.#loaded = true + + // set proper globalPrefix now that everything is loaded + this.globalPrefix = this.get('prefix') + + this.setEnvs() + } + + loadDefaults () { + this.loadGlobalPrefix() + this.loadHome() + + const defaultsObject = { + ...this.defaults, + prefix: this.globalPrefix, + } + + try { + // This does not have an actual definition because this is not user definable + defaultsObject['npm-version'] = require(join(this.npmPath, 'package.json')).version + } catch { + // in some weird state where the passed in npmPath does not have a package.json + // this will never happen in npm, but is guarded here in case this is consumed + // in other ways + tests + } + + this.#loadObject(defaultsObject, 'default', 'default values') + + const { data } = this.data.get('default') + + // if the prefix is set on cli, env, or userconfig, then we need to + // default the globalconfig file to that location, instead of the default + // global prefix. It's weird that `npm get globalconfig --prefix=/foo` + // returns `/foo/etc/npmrc`, but better to not change it at this point. + // define a custom getter, but turn into a normal prop + // if we set it. otherwise it can't be set on child objects + Object.defineProperty(data, 'globalconfig', { + get: () => resolve(this.#get('prefix'), 'etc/npmrc'), + set (value) { + Object.defineProperty(data, 'globalconfig', { + value, + configurable: true, + writable: true, + enumerable: true, + }) + }, + configurable: true, + enumerable: true, + }) + } + + loadHome () { + this.home = this.env.HOME || homedir() + } + + loadGlobalPrefix () { + if (this.globalPrefix) { + throw new Error('cannot load default global prefix more than once') + } + + if (this.env.PREFIX) { + this.globalPrefix = this.env.PREFIX + } else if (this.platform === 'win32') { + // c:\node\node.exe --> prefix=c:\node\ + this.globalPrefix = dirname(this.execPath) + } else { + // /usr/local/bin/node --> prefix=/usr/local + this.globalPrefix = dirname(dirname(this.execPath)) + + // destdir only is respected on Unix + if (this.env.DESTDIR) { + this.globalPrefix = join(this.env.DESTDIR, this.globalPrefix) + } + } + } + + loadEnv () { + const conf = Object.create(null) + for (const [envKey, envVal] of Object.entries(this.env)) { + if (!/^npm_config_/i.test(envKey) || envVal === '') { + continue + } + let key = envKey.slice('npm_config_'.length) + if (!key.startsWith('//')) { // don't normalize nerf-darted keys + key = key.replace(/(?!^)_/g, '-') // don't replace _ at the start of the key + .toLowerCase() + } + conf[key] = envVal + } + this.#loadObject(conf, 'env', 'environment') + } + + loadCLI () { + for (const s of Object.keys(this.shorthands)) { + if (s.length > 1 && this.argv.includes(`-${s}`)) { + log.warn(`-${s} is not a valid single-hyphen cli flag and will be removed in the future`) + } + } + nopt.invalidHandler = (k, val, type) => + this.invalidHandler(k, val, type, 'command line options', 'cli') + nopt.unknownHandler = this.unknownHandler + nopt.abbrevHandler = this.abbrevHandler + const conf = nopt(this.types, this.shorthands, this.argv) + nopt.invalidHandler = null + nopt.unknownHandler = null + this.parsedArgv = conf.argv + delete conf.argv + this.#loadObject(conf, 'cli', 'command line options') + } + + get valid () { + for (const [where, { valid }] of this.data.entries()) { + if (valid === false || valid === null && !this.validate(where)) { + return false + } + } + return true + } + + validate (where) { + if (!where) { + let valid = true + const authProblems = [] + + for (const entryWhere of this.data.keys()) { + // no need to validate our defaults, we know they're fine + // cli was already validated when parsed the first time + if (entryWhere === 'default' || entryWhere === 'builtin' || entryWhere === 'cli') { + continue + } + const ret = this.validate(entryWhere) + valid = valid && ret + + if (['global', 'user', 'project'].includes(entryWhere)) { + // after validating everything else, we look for old auth configs we no longer support + // if these keys are found, we build up a list of them and the appropriate action and + // attach it as context on the thrown error + + // first, keys that should be removed + for (const key of ['_authtoken', '-authtoken']) { + if (this.get(key, entryWhere)) { + authProblems.push({ action: 'delete', key, where: entryWhere }) + } + } + + // NOTE we pull registry without restricting to the current 'where' because we want to + // suggest scoping things to the registry they would be applied to, which is the default + // regardless of where it was defined + const nerfedReg = nerfDart(this.get('registry')) + // keys that should be nerfed but currently are not + for (const key of ['_auth', '_authToken', 'username', '_password']) { + if (this.get(key, entryWhere)) { + // username and _password must both exist in the same file to be recognized correctly + if (key === 'username' && !this.get('_password', entryWhere)) { + authProblems.push({ action: 'delete', key, where: entryWhere }) + } else if (key === '_password' && !this.get('username', entryWhere)) { + authProblems.push({ action: 'delete', key, where: entryWhere }) + } else { + authProblems.push({ + action: 'rename', + from: key, + to: `${nerfedReg}:${key}`, + where: entryWhere, + }) + } + } + } + } + } + + if (authProblems.length) { + const { ErrInvalidAuth } = require('./errors.js') + throw new ErrInvalidAuth(authProblems) + } + + return valid + } else { + const obj = this.data.get(where) + obj[_valid] = true + + nopt.invalidHandler = (k, val, type) => + this.invalidHandler(k, val, type, obj.source, where) + + nopt.clean(obj.data, this.types, typeDefs) + + nopt.invalidHandler = null + return obj[_valid] + } + } + + // fixes problems identified by validate(), accepts the 'problems' property from a thrown + // ErrInvalidAuth to avoid having to check everything again + repair (problems) { + if (!problems) { + try { + this.validate() + } catch (err) { + // coverage skipped here because we don't need to test re-throwing an error + // istanbul ignore next + if (err.code !== 'ERR_INVALID_AUTH') { + throw err + } + + problems = err.problems + } finally { + if (!problems) { + problems = [] + } + } + } + + for (const problem of problems) { + // coverage disabled for else branch because it doesn't do anything and shouldn't + // istanbul ignore else + if (problem.action === 'delete') { + this.delete(problem.key, problem.where) + } else if (problem.action === 'rename') { + const raw = this.data.get(problem.where).raw?.[problem.from] + const calculated = this.get(problem.from, problem.where) + this.set(problem.to, raw || calculated, problem.where) + this.delete(problem.from, problem.where) + } + } + } + + // Returns true if the value is coming directly from the source defined + // in default definitions, if the current value for the key config is + // coming from any other different source, returns false + isDefault (key) { + const [defaultType, ...types] = [...confTypes] + const defaultData = this.data.get(defaultType).data + + return hasOwnProperty(defaultData, key) + && types.every(type => { + const typeData = this.data.get(type).data + return !hasOwnProperty(typeData, key) + }) + } + + invalidHandler (k, val, type, source, where) { + const typeDescription = require('./type-description.js') + log.warn( + 'invalid config', + k + '=' + JSON.stringify(val), + `set in ${source}` + ) + this.data.get(where)[_valid] = false + + if (Array.isArray(type)) { + if (type.includes(typeDefs.url.type)) { + type = typeDefs.url.type + } else { + /* istanbul ignore if - no actual configs matching this, but + * path types SHOULD be handled this way, like URLs, for the + * same reason */ + if (type.includes(typeDefs.path.type)) { + type = typeDefs.path.type + } + } + } + + const typeDesc = typeDescription(type) + const mustBe = typeDesc + .filter(m => m !== undefined && m !== Array) + const msg = 'Must be' + this.#getOneOfKeywords(mustBe, typeDesc) + const desc = mustBe.length === 1 ? mustBe[0] + : [...new Set(mustBe.map(n => typeof n === 'string' ? n : JSON.stringify(n)))].join(', ') + log.warn('invalid config', msg, desc) + } + + abbrevHandler (short, long) { + log.warn(`Expanding --${short} to --${long}. This will stop working in the next major version of npm.`) + } + + unknownHandler (key, next) { + if (next) { + log.warn(`"${next}" is being parsed as a normal command line argument.`) + } + } + + #getOneOfKeywords (mustBe, typeDesc) { + let keyword + if (mustBe.length === 1 && typeDesc.includes(Array)) { + keyword = ' one or more' + } else if (mustBe.length > 1 && typeDesc.includes(Array)) { + keyword = ' one or more of:' + } else if (mustBe.length > 1) { + keyword = ' one of:' + } else { + keyword = '' + } + return keyword + } + + #loadObject (obj, where, source, er = null) { + // obj is the raw data read from the file + const conf = this.data.get(where) + if (conf.source) { + const m = `double-loading "${where}" configs from ${source}, ` + + `previously loaded from ${conf.source}` + throw new Error(m) + } + + if (this.sources.has(source)) { + const m = `double-loading config "${source}" as "${where}", ` + + `previously loaded as "${this.sources.get(source)}"` + throw new Error(m) + } + + conf.source = source + this.sources.set(source, where) + if (er) { + conf.loadError = er + if (er.code !== 'ENOENT') { + log.verbose('config', `error loading ${where} config`, er) + } + } else { + conf.raw = obj + for (const [key, value] of Object.entries(obj)) { + const k = envReplace(key, this.env) + const v = this.parseField(value, k) + if (where !== 'default') { + this.#checkDeprecated(k) + if (this.definitions[key]?.exclusive) { + for (const exclusive of this.definitions[key].exclusive) { + if (!this.isDefault(exclusive)) { + throw new TypeError(`--${key} cannot be provided when using --${exclusive}`) + } + } + } + } + if (where !== 'default' || key === 'npm-version') { + this.checkUnknown(where, key) + } + conf.data[k] = v + } + } + } + + checkUnknown (where, key) { + if (!this.definitions[key]) { + if (internalEnv.includes(key)) { + return + } + if (!key.includes(':')) { + log.warn(`Unknown ${where} config "${where === 'cli' ? '--' : ''}${key}". This will stop working in the next major version of npm.`) + return + } + const baseKey = key.split(':').pop() + if (!this.definitions[baseKey] && !this.nerfDarts.includes(baseKey)) { + log.warn(`Unknown ${where} config "${baseKey}" (${key}). This will stop working in the next major version of npm.`) + } + } + } + + #checkDeprecated (key) { + if (this.deprecated[key]) { + log.warn('config', key, this.deprecated[key]) + } + } + + // Parse a field, coercing it to the best type available. + parseField (f, key, listElement = false) { + return parseField(f, key, this, listElement) + } + + async #loadFile (file, type) { + // only catch the error from readFile, not from the loadObject call + log.silly('config', `load:file:${file}`) + await readFile(file, 'utf8').then( + data => { + const parsedConfig = ini.parse(data) + if (type === 'project' && parsedConfig.prefix) { + // Log error if prefix is mentioned in project .npmrc + /* eslint-disable-next-line max-len */ + log.error('config', `prefix cannot be changed from project config: ${file}.`) + } + return this.#loadObject(parsedConfig, type, file) + }, + er => this.#loadObject(null, type, file, er) + ) + } + + loadBuiltinConfig () { + return this.#loadFile(resolve(this.npmPath, 'npmrc'), 'builtin') + } + + async loadProjectConfig () { + // the localPrefix can be set by the CLI config, but otherwise is + // found by walking up the folder tree. either way, we load it before + // we return to make sure localPrefix is set + await this.loadLocalPrefix() + + // if we have not detected a local package json yet, try now that we + // have a local prefix + if (this.localPackage == null) { + this.localPackage = await fileExists(this.localPrefix, 'package.json') + } + + if (this.#get('global') === true || this.#get('location') === 'global') { + this.data.get('project').source = '(global mode enabled, ignored)' + this.sources.set(this.data.get('project').source, 'project') + return + } + + const projectFile = resolve(this.localPrefix, '.npmrc') + // if we're in the ~ directory, and there happens to be a node_modules + // folder (which is not TOO uncommon, it turns out), then we can end + // up loading the "project" config where the "userconfig" will be, + // which causes some calamities. So, we only load project config if + // it doesn't match what the userconfig will be. + if (projectFile !== this.#get('userconfig')) { + return this.#loadFile(projectFile, 'project') + } else { + this.data.get('project').source = '(same as "user" config, ignored)' + this.sources.set(this.data.get('project').source, 'project') + } + } + + async loadLocalPrefix () { + const cliPrefix = this.#get('prefix', 'cli') + if (cliPrefix) { + this.localPrefix = cliPrefix + return + } + + const cliWorkspaces = this.#get('workspaces', 'cli') + const isGlobal = this.#get('global') || this.#get('location') === 'global' + + for (const p of walkUp(this.cwd)) { + // HACK: this is an option set in tests to stop the local prefix from being set + // on tests that are created inside the npm repo + if (this.excludeNpmCwd && p === this.npmPath) { + break + } + + const hasPackageJson = await fileExists(p, 'package.json') + + if (!this.localPrefix && (hasPackageJson || await dirExists(p, 'node_modules'))) { + this.localPrefix = p + this.localPackage = hasPackageJson + + // if workspaces are disabled, or we're in global mode, return now + if (cliWorkspaces === false || isGlobal) { + return + } + + // otherwise, continue the loop + continue + } + + if (this.localPrefix && hasPackageJson) { + const pkgJson = require('@npmcli/package-json') + // if we already set localPrefix but this dir has a package.json + // then we need to see if `p` is a workspace root by reading its package.json + // however, if reading it fails then we should just move on + const { content: pkg } = await pkgJson.normalize(p).catch(() => ({ content: {} })) + if (!pkg?.workspaces) { + continue + } + + const mapWorkspaces = require('@npmcli/map-workspaces') + const workspaces = await mapWorkspaces({ cwd: p, pkg }) + for (const w of workspaces.values()) { + if (w === this.localPrefix) { + // see if there's a .npmrc file in the workspace, if so log a warning + if (await fileExists(this.localPrefix, '.npmrc')) { + log.warn('config', `ignoring workspace config at ${this.localPrefix}/.npmrc`) + } + + // set the workspace in the default layer, which allows it to be overridden easily + const { data } = this.data.get('default') + data.workspace = [this.localPrefix] + this.localPrefix = p + this.localPackage = hasPackageJson + log.info('config', `found workspace root at ${this.localPrefix}`) + // we found a root, so we return now + return + } + } + } + } + + if (!this.localPrefix) { + this.localPrefix = this.cwd + } + } + + loadUserConfig () { + return this.#loadFile(this.#get('userconfig'), 'user') + } + + loadGlobalConfig () { + return this.#loadFile(this.#get('globalconfig'), 'global') + } + + async save (where) { + if (!this.loaded) { + throw new Error('call config.load() before saving') + } + if (!confFileTypes.has(where)) { + throw new Error('invalid config location param: ' + where) + } + + const conf = this.data.get(where) + conf[_loadError] = null + + if (where === 'user') { + // if email is nerfed, then we want to de-nerf it + const nerfed = nerfDart(this.get('registry')) + const email = this.get(`${nerfed}:email`, 'user') + if (email) { + this.delete(`${nerfed}:email`, 'user') + this.set('email', email, 'user') + } + } + + // We need the actual raw data before we called parseField so that we are + // saving the same content back to the file + const iniData = ini.stringify(conf.raw).trim() + '\n' + if (!iniData.trim()) { + // ignore the unlink error (eg, if file doesn't exist) + await unlink(conf.source).catch(() => {}) + return + } + const dir = dirname(conf.source) + await mkdir(dir, { recursive: true }) + await writeFile(conf.source, iniData, 'utf8') + const mode = where === 'user' ? 0o600 : 0o666 + await chmod(conf.source, mode) + } + + clearCredentialsByURI (uri, level = 'user') { + const nerfed = nerfDart(uri) + const def = nerfDart(this.get('registry')) + if (def === nerfed) { + this.delete(`-authtoken`, level) + this.delete(`_authToken`, level) + this.delete(`_authtoken`, level) + this.delete(`_auth`, level) + this.delete(`_password`, level) + this.delete(`username`, level) + // de-nerf email if it's nerfed to the default registry + const email = this.get(`${nerfed}:email`, level) + if (email) { + this.set('email', email, level) + } + } + this.delete(`${nerfed}:_authToken`, level) + this.delete(`${nerfed}:_auth`, level) + this.delete(`${nerfed}:_password`, level) + this.delete(`${nerfed}:username`, level) + this.delete(`${nerfed}:email`, level) + this.delete(`${nerfed}:certfile`, level) + this.delete(`${nerfed}:keyfile`, level) + } + + setCredentialsByURI (uri, { token, username, password, certfile, keyfile }) { + const nerfed = nerfDart(uri) + + // field that hasn't been used as documented for a LONG time, + // and as of npm 7.10.0, isn't used at all. We just always + // send auth if we have it, only to the URIs under the nerf dart. + this.delete(`${nerfed}:always-auth`, 'user') + + this.delete(`${nerfed}:email`, 'user') + if (certfile && keyfile) { + this.set(`${nerfed}:certfile`, certfile, 'user') + this.set(`${nerfed}:keyfile`, keyfile, 'user') + // cert/key may be used in conjunction with other credentials, thus no `else` + } + if (token) { + this.set(`${nerfed}:_authToken`, token, 'user') + this.delete(`${nerfed}:_password`, 'user') + this.delete(`${nerfed}:username`, 'user') + } else if (username || password) { + if (!username) { + throw new Error('must include username') + } + if (!password) { + throw new Error('must include password') + } + this.delete(`${nerfed}:_authToken`, 'user') + this.set(`${nerfed}:username`, username, 'user') + // note: not encrypted, no idea why we bothered to do this, but oh well + // protects against shoulder-hacks if password is memorable, I guess? + const encoded = Buffer.from(password, 'utf8').toString('base64') + this.set(`${nerfed}:_password`, encoded, 'user') + } else if (!certfile || !keyfile) { + throw new Error('No credentials to set.') + } + } + + // this has to be a bit more complicated to support legacy data of all forms + getCredentialsByURI (uri) { + const nerfed = nerfDart(uri) + const def = nerfDart(this.get('registry')) + const creds = {} + + // email is handled differently, it used to always be nerfed and now it never should be + // if it's set nerfed to the default registry, then we copy it to the unnerfed key + // TODO: evaluate removing 'email' from the credentials object returned here + const email = this.get(`${nerfed}:email`) || this.get('email') + if (email) { + if (nerfed === def) { + this.set('email', email, 'user') + } + creds.email = email + } + + const certfileReg = this.get(`${nerfed}:certfile`) + const keyfileReg = this.get(`${nerfed}:keyfile`) + if (certfileReg && keyfileReg) { + creds.certfile = certfileReg + creds.keyfile = keyfileReg + // cert/key may be used in conjunction with other credentials, thus no `return` + } + + const tokenReg = this.get(`${nerfed}:_authToken`) + if (tokenReg) { + creds.token = tokenReg + return creds + } + + const userReg = this.get(`${nerfed}:username`) + const passReg = this.get(`${nerfed}:_password`) + if (userReg && passReg) { + creds.username = userReg + creds.password = Buffer.from(passReg, 'base64').toString('utf8') + const auth = `${creds.username}:${creds.password}` + creds.auth = Buffer.from(auth, 'utf8').toString('base64') + return creds + } + + const authReg = this.get(`${nerfed}:_auth`) + if (authReg) { + const authDecode = Buffer.from(authReg, 'base64').toString('utf8') + const authSplit = authDecode.split(':') + creds.username = authSplit.shift() + creds.password = authSplit.join(':') + creds.auth = authReg + return creds + } + + // at this point, nothing else is usable so just return what we do have + return creds + } + + // set up the environment object we have with npm_config_* environs + // for all configs that are different from their default values, and + // set EDITOR and HOME. + setEnvs () { + setEnvs(this) + } +} + +const _loadError = Symbol('loadError') +const _valid = Symbol('valid') + +class ConfigData { + #data + #source = null + #raw = null + constructor (parent) { + this.#data = Object.create(parent && parent.data) + this.#raw = {} + this[_valid] = true + } + + get data () { + return this.#data + } + + get valid () { + return this[_valid] + } + + set source (s) { + if (this.#source) { + throw new Error('cannot set ConfigData source more than once') + } + this.#source = s + } + + get source () { + return this.#source + } + + set loadError (e) { + if (this[_loadError] || (Object.keys(this.#raw).length)) { + throw new Error('cannot set ConfigData loadError after load') + } + this[_loadError] = e + } + + get loadError () { + return this[_loadError] + } + + set raw (r) { + if (Object.keys(this.#raw).length || this[_loadError]) { + throw new Error('cannot set ConfigData raw after load') + } + this.#raw = r + } + + get raw () { + return this.#raw + } +} + +module.exports = Config diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/lib/nerf-dart.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/lib/nerf-dart.js new file mode 100644 index 0000000000000000000000000000000000000000..030d92a82270d2b125b16ee64590b68c05440160 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/lib/nerf-dart.js @@ -0,0 +1,18 @@ +const { URL } = require('node:url') + +/** + * Maps a URL to an identifier. + * + * Name courtesy schiffertronix media LLC, a New Jersey corporation + * + * @param {String} uri The URL to be nerfed. + * + * @returns {String} A nerfed URL. + */ +module.exports = (url) => { + const parsed = new URL(url) + const from = `${parsed.protocol}//${parsed.host}${parsed.pathname}` + const rel = new URL('.', from) + const res = `//${rel.host}${rel.pathname}` + return res +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/lib/parse-field.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/lib/parse-field.js new file mode 100644 index 0000000000000000000000000000000000000000..9ac3d21cae8b718108b33c7ff2d9ac2fb1a95982 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/lib/parse-field.js @@ -0,0 +1,86 @@ +// Parse a field, coercing it to the best type available. +const typeDefs = require('./type-defs.js') +const envReplace = require('./env-replace.js') +const { resolve } = require('node:path') + +const { parse: umaskParse } = require('./umask.js') + +const parseField = (f, key, opts, listElement = false) => { + if (typeof f !== 'string' && !Array.isArray(f)) { + return f + } + + const { platform, types, home, env } = opts + + // type can be array or a single thing. coerce to array. + const typeList = new Set([].concat(types[key])) + const isPath = typeList.has(typeDefs.path.type) + const isBool = typeList.has(typeDefs.Boolean.type) + const isString = isPath || typeList.has(typeDefs.String.type) + const isUmask = typeList.has(typeDefs.Umask.type) + const isNumber = typeList.has(typeDefs.Number.type) + const isList = !listElement && typeList.has(Array) + const isDate = typeList.has(typeDefs.Date.type) + + if (Array.isArray(f)) { + return !isList ? f : f.map(field => parseField(field, key, opts, true)) + } + + // now we know it's a string + f = f.trim() + + // list types get put in the environment separated by double-\n + // usually a single \n would suffice, but ca/cert configs can contain + // line breaks and multiple entries. + if (isList) { + return parseField(f.split('\n\n'), key, opts) + } + + // --foo is like --foo=true for boolean types + if (isBool && !isString && f === '') { + return true + } + + // string types can be the string 'true', 'false', etc. + // otherwise, parse these values out + if (!isString && !isPath && !isNumber) { + switch (f) { + case 'true': return true + case 'false': return false + case 'null': return null + case 'undefined': return undefined + } + } + + f = envReplace(f, env) + + if (isDate) { + return new Date(f) + } + + if (isPath) { + const homePattern = platform === 'win32' ? /^~(\/|\\)/ : /^~\// + if (homePattern.test(f) && home) { + f = resolve(home, f.slice(2)) + } else { + f = resolve(f) + } + } + + if (isUmask) { + try { + return umaskParse(f) + } catch (er) { + // let it warn later when we validate + return f + } + } + + if (isNumber && !isNaN(f)) { + f = +f + } + + return f +} + +module.exports = parseField diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/lib/set-envs.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/lib/set-envs.js new file mode 100644 index 0000000000000000000000000000000000000000..12719b56478363a53caddeb5ea3aa0af32cb015a --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/lib/set-envs.js @@ -0,0 +1,113 @@ +// Set environment variables for any non-default configs, +// so that they're already there when we run lifecycle scripts. +// +// See https://github.com/npm/rfcs/pull/90 + +// Return the env key if this is a thing that belongs in the env. +// Ie, if the key isn't a @scope, //nerf.dart, or _private, +// and the value is a string or array. Otherwise return false. +const envKey = (key, val) => { + return !/^[/@_]/.test(key) && + (typeof envVal(val) === 'string') && + `npm_config_${key.replace(/-/g, '_').toLowerCase()}` +} + +const envVal = val => Array.isArray(val) ? val.map(v => envVal(v)).join('\n\n') + : val === null || val === undefined || val === false ? '' + : typeof val === 'object' ? null + : String(val) + +const sameConfigValue = (def, val) => + !Array.isArray(val) || !Array.isArray(def) ? def === val + : sameArrayValue(def, val) + +const sameArrayValue = (def, val) => { + if (def.length !== val.length) { + return false + } + + for (let i = 0; i < def.length; i++) { + /* istanbul ignore next - there are no array configs where the default + * is not an empty array, so this loop is a no-op, but it's the correct + * thing to do if we ever DO add a config like that. */ + if (def[i] !== val[i]) { + return false + } + } + return true +} + +const setEnv = (env, rawKey, rawVal) => { + const val = envVal(rawVal) + const key = envKey(rawKey, val) + if (key && val !== null) { + env[key] = val + } +} + +const setEnvs = (config) => { + // This ensures that all npm config values that are not the defaults are + // shared appropriately with child processes, without false positives. + const { + env, + defaults, + definitions, + list: [cliConf, envConf], + } = config + + env.INIT_CWD = process.cwd() + + // if the key is deprecated, skip it always. + // if the key is the default value, + // if the environ is NOT the default value, + // set the environ + // else skip it, it's fine + // if the key is NOT the default value, + // if the env is setting it, then leave it (already set) + // otherwise, set the env + const cliSet = new Set(Object.keys(cliConf)) + const envSet = new Set(Object.keys(envConf)) + for (const key in cliConf) { + const { deprecated, envExport = true } = definitions[key] || {} + if (deprecated || envExport === false) { + continue + } + + if (sameConfigValue(defaults[key], cliConf[key])) { + // config is the default, if the env thought different, then we + // have to set it BACK to the default in the environment. + if (!sameConfigValue(envConf[key], cliConf[key])) { + setEnv(env, key, cliConf[key]) + } + } else { + // config is not the default. if the env wasn't the one to set + // it that way, then we have to put it in the env + if (!(envSet.has(key) && !cliSet.has(key))) { + setEnv(env, key, cliConf[key]) + } + } + } + + // also set some other common nice envs that we want to rely on + env.HOME = config.home + // TODO this may not be the best away to persist these + env.npm_config_global_prefix = config.globalPrefix + env.npm_config_local_prefix = config.localPrefix + if (cliConf.editor) { + env.EDITOR = cliConf.editor + } + + // note: this doesn't affect the *current* node process, of course, since + // it's already started, but it does affect the options passed to scripts. + if (cliConf['node-options']) { + env.NODE_OPTIONS = cliConf['node-options'] + } + // the node-gyp bin uses this so we always set it + env.npm_config_node_gyp = cliConf['node-gyp'] + // this doesn't have a full definition so we manually export it here + env.npm_config_npm_version = cliConf['npm-version'] || 'unknown' + env.npm_execpath = config.npmBin + env.NODE = env.npm_node_execpath = config.execPath +} + +module.exports = setEnvs diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/lib/type-defs.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/lib/type-defs.js new file mode 100644 index 0000000000000000000000000000000000000000..3c9dfe19ded113e972edbd3d9b6c833f9d6e5033 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/lib/type-defs.js @@ -0,0 +1,61 @@ +const nopt = require('nopt') + +const { validate: validateUmask } = require('./umask.js') + +class Umask {} +class Semver {} +const semverValid = require('semver/functions/valid') +const validateSemver = (data, k, val) => { + const valid = semverValid(val) + if (!valid) { + return false + } + data[k] = valid +} + +const noptValidatePath = nopt.typeDefs.path.validate +const validatePath = (data, k, val) => { + if (typeof val !== 'string') { + return false + } + return noptValidatePath(data, k, val) +} + +// add descriptions so we can validate more usefully +module.exports = { + ...nopt.typeDefs, + semver: { + type: Semver, + validate: validateSemver, + description: 'full valid SemVer string', + }, + Umask: { + type: Umask, + validate: validateUmask, + description: 'octal number in range 0o000..0o777 (0..511)', + }, + url: { + ...nopt.typeDefs.url, + description: 'full url with "http://"', + }, + path: { + ...nopt.typeDefs.path, + validate: validatePath, + description: 'valid filesystem path', + }, + Number: { + ...nopt.typeDefs.Number, + description: 'numeric value', + }, + Boolean: { + ...nopt.typeDefs.Boolean, + description: 'boolean value (true or false)', + }, + Date: { + ...nopt.typeDefs.Date, + description: 'valid Date string', + }, +} + +// TODO: make nopt less of a global beast so this kludge isn't necessary +nopt.typeDefs = module.exports diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/lib/type-description.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/lib/type-description.js new file mode 100644 index 0000000000000000000000000000000000000000..f5e0d164f9edcfb6bc3a963b47cd39641f9ebfa1 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/lib/type-description.js @@ -0,0 +1,21 @@ +// return the description of the valid values of a field +// returns a string for one thing, or an array of descriptions +const typeDefs = require('./type-defs.js') +const typeDescription = t => { + if (!t || typeof t !== 'function' && typeof t !== 'object') { + return t + } + + if (Array.isArray(t)) { + return t.map(t => typeDescription(t)) + } + + for (const { type, description } of Object.values(typeDefs)) { + if (type === t) { + return description || type + } + } + + return t +} +module.exports = t => [].concat(typeDescription(t)).filter(t => t !== undefined) diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/lib/umask.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/lib/umask.js new file mode 100644 index 0000000000000000000000000000000000000000..2ddc5ca782232615eee4b3fd88054594ce5f5370 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/lib/umask.js @@ -0,0 +1,35 @@ +const parse = val => { + // this is run via nopt and parse field where everything is + // converted to a string first, ignoring coverage for now + // instead of figuring out what is happening under the hood in nopt + // istanbul ignore else + if (typeof val === 'string') { + if (/^0o?[0-7]+$/.test(val)) { + return parseInt(val.replace(/^0o?/, ''), 8) + } else if (/^[1-9][0-9]*$/.test(val)) { + return parseInt(val, 10) + } else { + throw new Error(`invalid umask value: ${val}`) + } + } else { + if (typeof val !== 'number') { + throw new Error(`invalid umask value: ${val}`) + } + val = Math.floor(val) + if (val < 0 || val > 511) { + throw new Error(`invalid umask value: ${val}`) + } + return val + } +} + +const validate = (data, k, val) => { + try { + data[k] = parse(val) + return true + } catch (er) { + return false + } +} + +module.exports = { parse, validate } diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/package.json new file mode 100644 index 0000000000000000000000000000000000000000..651e2135893f48071831873eb3a3d3437c4b9f04 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/config/package.json @@ -0,0 +1,57 @@ +{ + "name": "@npmcli/config", + "version": "10.4.2", + "files": [ + "bin/", + "lib/" + ], + "main": "lib/index.js", + "description": "Configuration management for the npm cli", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/cli.git", + "directory": "workspaces/config" + }, + "author": "GitHub Inc.", + "license": "ISC", + "scripts": { + "test": "tap", + "snap": "tap", + "lint": "npm run eslint", + "postlint": "template-oss-check", + "lintfix": "npm run eslint -- --fix", + "posttest": "npm run lint", + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "devDependencies": { + "@npmcli/eslint-config": "^5.0.1", + "@npmcli/mock-globals": "^1.0.0", + "@npmcli/template-oss": "4.25.1", + "tap": "^16.3.8" + }, + "dependencies": { + "@npmcli/map-workspaces": "^5.0.0", + "@npmcli/package-json": "^7.0.0", + "ci-info": "^4.0.0", + "ini": "^5.0.0", + "nopt": "^8.1.0", + "proc-log": "^5.0.0", + "semver": "^7.3.5", + "walk-up-path": "^4.0.0" + }, + "engines": { + "node": "^20.17.0 || >=22.9.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.25.1", + "content": "../../scripts/template-oss/index.js" + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/fs/LICENSE.md b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/fs/LICENSE.md new file mode 100644 index 0000000000000000000000000000000000000000..5fc208ff122e08e2ca9777f80b0551617b30ba2a --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/fs/LICENSE.md @@ -0,0 +1,20 @@ + + +ISC License + +Copyright npm, Inc. + +Permission to use, copy, modify, and/or distribute this +software for any purpose with or without fee is hereby +granted, provided that the above copyright notice and this +permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL +WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO +EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/fs/lib/common/get-options.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/fs/lib/common/get-options.js new file mode 100644 index 0000000000000000000000000000000000000000..cb5982f79077acc90cf962196f05c9a835bab7c8 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/fs/lib/common/get-options.js @@ -0,0 +1,20 @@ +// given an input that may or may not be an object, return an object that has +// a copy of every defined property listed in 'copy'. if the input is not an +// object, assign it to the property named by 'wrap' +const getOptions = (input, { copy, wrap }) => { + const result = {} + + if (input && typeof input === 'object') { + for (const prop of copy) { + if (input[prop] !== undefined) { + result[prop] = input[prop] + } + } + } else { + result[wrap] = input + } + + return result +} + +module.exports = getOptions diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/fs/lib/common/node.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/fs/lib/common/node.js new file mode 100644 index 0000000000000000000000000000000000000000..4d13bc037359d7d2b7cf26f62dd4231232721370 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/fs/lib/common/node.js @@ -0,0 +1,9 @@ +const semver = require('semver') + +const satisfies = (range) => { + return semver.satisfies(process.version, range, { includePrerelease: true }) +} + +module.exports = { + satisfies, +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/fs/lib/cp/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/fs/lib/cp/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..93546dfb7655bfafd17a87ab7f3391c9e68f4130 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/fs/lib/cp/LICENSE @@ -0,0 +1,15 @@ +(The MIT License) + +Copyright (c) 2011-2017 JP Richardson + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files +(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, + merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS +OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, + ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/fs/lib/cp/errors.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/fs/lib/cp/errors.js new file mode 100644 index 0000000000000000000000000000000000000000..1cd1e05d0c533da61a36f9af9cffef7185a728dc --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/fs/lib/cp/errors.js @@ -0,0 +1,129 @@ +'use strict' +const { inspect } = require('util') + +// adapted from node's internal/errors +// https://github.com/nodejs/node/blob/c8a04049/lib/internal/errors.js + +// close copy of node's internal SystemError class. +class SystemError { + constructor (code, prefix, context) { + // XXX context.code is undefined in all constructors used in cp/polyfill + // that may be a bug copied from node, maybe the constructor should use + // `code` not `errno`? nodejs/node#41104 + let message = `${prefix}: ${context.syscall} returned ` + + `${context.code} (${context.message})` + + if (context.path !== undefined) { + message += ` ${context.path}` + } + if (context.dest !== undefined) { + message += ` => ${context.dest}` + } + + this.code = code + Object.defineProperties(this, { + name: { + value: 'SystemError', + enumerable: false, + writable: true, + configurable: true, + }, + message: { + value: message, + enumerable: false, + writable: true, + configurable: true, + }, + info: { + value: context, + enumerable: true, + configurable: true, + writable: false, + }, + errno: { + get () { + return context.errno + }, + set (value) { + context.errno = value + }, + enumerable: true, + configurable: true, + }, + syscall: { + get () { + return context.syscall + }, + set (value) { + context.syscall = value + }, + enumerable: true, + configurable: true, + }, + }) + + if (context.path !== undefined) { + Object.defineProperty(this, 'path', { + get () { + return context.path + }, + set (value) { + context.path = value + }, + enumerable: true, + configurable: true, + }) + } + + if (context.dest !== undefined) { + Object.defineProperty(this, 'dest', { + get () { + return context.dest + }, + set (value) { + context.dest = value + }, + enumerable: true, + configurable: true, + }) + } + } + + toString () { + return `${this.name} [${this.code}]: ${this.message}` + } + + [Symbol.for('nodejs.util.inspect.custom')] (_recurseTimes, ctx) { + return inspect(this, { + ...ctx, + getters: true, + customInspect: false, + }) + } +} + +function E (code, message) { + module.exports[code] = class NodeError extends SystemError { + constructor (ctx) { + super(code, message, ctx) + } + } +} + +E('ERR_FS_CP_DIR_TO_NON_DIR', 'Cannot overwrite directory with non-directory') +E('ERR_FS_CP_EEXIST', 'Target already exists') +E('ERR_FS_CP_EINVAL', 'Invalid src or dest') +E('ERR_FS_CP_FIFO_PIPE', 'Cannot copy a FIFO pipe') +E('ERR_FS_CP_NON_DIR_TO_DIR', 'Cannot overwrite non-directory with directory') +E('ERR_FS_CP_SOCKET', 'Cannot copy a socket file') +E('ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY', 'Cannot overwrite symlink in subdirectory of self') +E('ERR_FS_CP_UNKNOWN', 'Cannot copy an unknown file type') +E('ERR_FS_EISDIR', 'Path is a directory') + +module.exports.ERR_INVALID_ARG_TYPE = class ERR_INVALID_ARG_TYPE extends Error { + constructor (name, expected, actual) { + super() + this.code = 'ERR_INVALID_ARG_TYPE' + this.message = `The ${name} argument must be ${expected}. Received ${typeof actual}` + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/fs/lib/cp/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/fs/lib/cp/index.js new file mode 100644 index 0000000000000000000000000000000000000000..972ce7aa12abef0ed6b327aa55b5cca9547fabda --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/fs/lib/cp/index.js @@ -0,0 +1,22 @@ +const fs = require('fs/promises') +const getOptions = require('../common/get-options.js') +const node = require('../common/node.js') +const polyfill = require('./polyfill.js') + +// node 16.7.0 added fs.cp +const useNative = node.satisfies('>=16.7.0') + +const cp = async (src, dest, opts) => { + const options = getOptions(opts, { + copy: ['dereference', 'errorOnExist', 'filter', 'force', 'preserveTimestamps', 'recursive'], + }) + + // the polyfill is tested separately from this module, no need to hack + // process.version to try to trigger it just for coverage + // istanbul ignore next + return useNative + ? fs.cp(src, dest, options) + : polyfill(src, dest, options) +} + +module.exports = cp diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/fs/lib/cp/polyfill.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/fs/lib/cp/polyfill.js new file mode 100644 index 0000000000000000000000000000000000000000..80eb10de971918910ffbc9c6543accbd9ac7015e --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/fs/lib/cp/polyfill.js @@ -0,0 +1,428 @@ +// this file is a modified version of the code in node 17.2.0 +// which is, in turn, a modified version of the fs-extra module on npm +// node core changes: +// - Use of the assert module has been replaced with core's error system. +// - All code related to the glob dependency has been removed. +// - Bring your own custom fs module is not currently supported. +// - Some basic code cleanup. +// changes here: +// - remove all callback related code +// - drop sync support +// - change assertions back to non-internal methods (see options.js) +// - throws ENOTDIR when rmdir gets an ENOENT for a path that exists in Windows +'use strict' + +const { + ERR_FS_CP_DIR_TO_NON_DIR, + ERR_FS_CP_EEXIST, + ERR_FS_CP_EINVAL, + ERR_FS_CP_FIFO_PIPE, + ERR_FS_CP_NON_DIR_TO_DIR, + ERR_FS_CP_SOCKET, + ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY, + ERR_FS_CP_UNKNOWN, + ERR_FS_EISDIR, + ERR_INVALID_ARG_TYPE, +} = require('./errors.js') +const { + constants: { + errno: { + EEXIST, + EISDIR, + EINVAL, + ENOTDIR, + }, + }, +} = require('os') +const { + chmod, + copyFile, + lstat, + mkdir, + readdir, + readlink, + stat, + symlink, + unlink, + utimes, +} = require('fs/promises') +const { + dirname, + isAbsolute, + join, + parse, + resolve, + sep, + toNamespacedPath, +} = require('path') +const { fileURLToPath } = require('url') + +const defaultOptions = { + dereference: false, + errorOnExist: false, + filter: undefined, + force: true, + preserveTimestamps: false, + recursive: false, +} + +async function cp (src, dest, opts) { + if (opts != null && typeof opts !== 'object') { + throw new ERR_INVALID_ARG_TYPE('options', ['Object'], opts) + } + return cpFn( + toNamespacedPath(getValidatedPath(src)), + toNamespacedPath(getValidatedPath(dest)), + { ...defaultOptions, ...opts }) +} + +function getValidatedPath (fileURLOrPath) { + const path = fileURLOrPath != null && fileURLOrPath.href + && fileURLOrPath.origin + ? fileURLToPath(fileURLOrPath) + : fileURLOrPath + return path +} + +async function cpFn (src, dest, opts) { + // Warn about using preserveTimestamps on 32-bit node + // istanbul ignore next + if (opts.preserveTimestamps && process.arch === 'ia32') { + const warning = 'Using the preserveTimestamps option in 32-bit ' + + 'node is not recommended' + process.emitWarning(warning, 'TimestampPrecisionWarning') + } + const stats = await checkPaths(src, dest, opts) + const { srcStat, destStat } = stats + await checkParentPaths(src, srcStat, dest) + if (opts.filter) { + return handleFilter(checkParentDir, destStat, src, dest, opts) + } + return checkParentDir(destStat, src, dest, opts) +} + +async function checkPaths (src, dest, opts) { + const { 0: srcStat, 1: destStat } = await getStats(src, dest, opts) + if (destStat) { + if (areIdentical(srcStat, destStat)) { + throw new ERR_FS_CP_EINVAL({ + message: 'src and dest cannot be the same', + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + if (srcStat.isDirectory() && !destStat.isDirectory()) { + throw new ERR_FS_CP_DIR_TO_NON_DIR({ + message: `cannot overwrite directory ${src} ` + + `with non-directory ${dest}`, + path: dest, + syscall: 'cp', + errno: EISDIR, + }) + } + if (!srcStat.isDirectory() && destStat.isDirectory()) { + throw new ERR_FS_CP_NON_DIR_TO_DIR({ + message: `cannot overwrite non-directory ${src} ` + + `with directory ${dest}`, + path: dest, + syscall: 'cp', + errno: ENOTDIR, + }) + } + } + + if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { + throw new ERR_FS_CP_EINVAL({ + message: `cannot copy ${src} to a subdirectory of self ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + return { srcStat, destStat } +} + +function areIdentical (srcStat, destStat) { + return destStat.ino && destStat.dev && destStat.ino === srcStat.ino && + destStat.dev === srcStat.dev +} + +function getStats (src, dest, opts) { + const statFunc = opts.dereference ? + (file) => stat(file, { bigint: true }) : + (file) => lstat(file, { bigint: true }) + return Promise.all([ + statFunc(src), + statFunc(dest).catch((err) => { + // istanbul ignore next: unsure how to cover. + if (err.code === 'ENOENT') { + return null + } + // istanbul ignore next: unsure how to cover. + throw err + }), + ]) +} + +async function checkParentDir (destStat, src, dest, opts) { + const destParent = dirname(dest) + const dirExists = await pathExists(destParent) + if (dirExists) { + return getStatsForCopy(destStat, src, dest, opts) + } + await mkdir(destParent, { recursive: true }) + return getStatsForCopy(destStat, src, dest, opts) +} + +function pathExists (dest) { + return stat(dest).then( + () => true, + // istanbul ignore next: not sure when this would occur + (err) => (err.code === 'ENOENT' ? false : Promise.reject(err))) +} + +// Recursively check if dest parent is a subdirectory of src. +// It works for all file types including symlinks since it +// checks the src and dest inodes. It starts from the deepest +// parent and stops once it reaches the src parent or the root path. +async function checkParentPaths (src, srcStat, dest) { + const srcParent = resolve(dirname(src)) + const destParent = resolve(dirname(dest)) + if (destParent === srcParent || destParent === parse(destParent).root) { + return + } + let destStat + try { + destStat = await stat(destParent, { bigint: true }) + } catch (err) { + // istanbul ignore else: not sure when this would occur + if (err.code === 'ENOENT') { + return + } + // istanbul ignore next: not sure when this would occur + throw err + } + if (areIdentical(srcStat, destStat)) { + throw new ERR_FS_CP_EINVAL({ + message: `cannot copy ${src} to a subdirectory of self ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + return checkParentPaths(src, srcStat, destParent) +} + +const normalizePathToArray = (path) => + resolve(path).split(sep).filter(Boolean) + +// Return true if dest is a subdir of src, otherwise false. +// It only checks the path strings. +function isSrcSubdir (src, dest) { + const srcArr = normalizePathToArray(src) + const destArr = normalizePathToArray(dest) + return srcArr.every((cur, i) => destArr[i] === cur) +} + +async function handleFilter (onInclude, destStat, src, dest, opts, cb) { + const include = await opts.filter(src, dest) + if (include) { + return onInclude(destStat, src, dest, opts, cb) + } +} + +function startCopy (destStat, src, dest, opts) { + if (opts.filter) { + return handleFilter(getStatsForCopy, destStat, src, dest, opts) + } + return getStatsForCopy(destStat, src, dest, opts) +} + +async function getStatsForCopy (destStat, src, dest, opts) { + const statFn = opts.dereference ? stat : lstat + const srcStat = await statFn(src) + // istanbul ignore else: can't portably test FIFO + if (srcStat.isDirectory() && opts.recursive) { + return onDir(srcStat, destStat, src, dest, opts) + } else if (srcStat.isDirectory()) { + throw new ERR_FS_EISDIR({ + message: `${src} is a directory (not copied)`, + path: src, + syscall: 'cp', + errno: EINVAL, + }) + } else if (srcStat.isFile() || + srcStat.isCharacterDevice() || + srcStat.isBlockDevice()) { + return onFile(srcStat, destStat, src, dest, opts) + } else if (srcStat.isSymbolicLink()) { + return onLink(destStat, src, dest) + } else if (srcStat.isSocket()) { + throw new ERR_FS_CP_SOCKET({ + message: `cannot copy a socket file: ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } else if (srcStat.isFIFO()) { + throw new ERR_FS_CP_FIFO_PIPE({ + message: `cannot copy a FIFO pipe: ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + // istanbul ignore next: should be unreachable + throw new ERR_FS_CP_UNKNOWN({ + message: `cannot copy an unknown file type: ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) +} + +function onFile (srcStat, destStat, src, dest, opts) { + if (!destStat) { + return _copyFile(srcStat, src, dest, opts) + } + return mayCopyFile(srcStat, src, dest, opts) +} + +async function mayCopyFile (srcStat, src, dest, opts) { + if (opts.force) { + await unlink(dest) + return _copyFile(srcStat, src, dest, opts) + } else if (opts.errorOnExist) { + throw new ERR_FS_CP_EEXIST({ + message: `${dest} already exists`, + path: dest, + syscall: 'cp', + errno: EEXIST, + }) + } +} + +async function _copyFile (srcStat, src, dest, opts) { + await copyFile(src, dest) + if (opts.preserveTimestamps) { + return handleTimestampsAndMode(srcStat.mode, src, dest) + } + return setDestMode(dest, srcStat.mode) +} + +async function handleTimestampsAndMode (srcMode, src, dest) { + // Make sure the file is writable before setting the timestamp + // otherwise open fails with EPERM when invoked with 'r+' + // (through utimes call) + if (fileIsNotWritable(srcMode)) { + await makeFileWritable(dest, srcMode) + return setDestTimestampsAndMode(srcMode, src, dest) + } + return setDestTimestampsAndMode(srcMode, src, dest) +} + +function fileIsNotWritable (srcMode) { + return (srcMode & 0o200) === 0 +} + +function makeFileWritable (dest, srcMode) { + return setDestMode(dest, srcMode | 0o200) +} + +async function setDestTimestampsAndMode (srcMode, src, dest) { + await setDestTimestamps(src, dest) + return setDestMode(dest, srcMode) +} + +function setDestMode (dest, srcMode) { + return chmod(dest, srcMode) +} + +async function setDestTimestamps (src, dest) { + // The initial srcStat.atime cannot be trusted + // because it is modified by the read(2) system call + // (See https://nodejs.org/api/fs.html#fs_stat_time_values) + const updatedSrcStat = await stat(src) + return utimes(dest, updatedSrcStat.atime, updatedSrcStat.mtime) +} + +function onDir (srcStat, destStat, src, dest, opts) { + if (!destStat) { + return mkDirAndCopy(srcStat.mode, src, dest, opts) + } + return copyDir(src, dest, opts) +} + +async function mkDirAndCopy (srcMode, src, dest, opts) { + await mkdir(dest) + await copyDir(src, dest, opts) + return setDestMode(dest, srcMode) +} + +async function copyDir (src, dest, opts) { + const dir = await readdir(src) + for (let i = 0; i < dir.length; i++) { + const item = dir[i] + const srcItem = join(src, item) + const destItem = join(dest, item) + const { destStat } = await checkPaths(srcItem, destItem, opts) + await startCopy(destStat, srcItem, destItem, opts) + } +} + +async function onLink (destStat, src, dest) { + let resolvedSrc = await readlink(src) + if (!isAbsolute(resolvedSrc)) { + resolvedSrc = resolve(dirname(src), resolvedSrc) + } + if (!destStat) { + return symlink(resolvedSrc, dest) + } + let resolvedDest + try { + resolvedDest = await readlink(dest) + } catch (err) { + // Dest exists and is a regular file or directory, + // Windows may throw UNKNOWN error. If dest already exists, + // fs throws error anyway, so no need to guard against it here. + // istanbul ignore next: can only test on windows + if (err.code === 'EINVAL' || err.code === 'UNKNOWN') { + return symlink(resolvedSrc, dest) + } + // istanbul ignore next: should not be possible + throw err + } + if (!isAbsolute(resolvedDest)) { + resolvedDest = resolve(dirname(dest), resolvedDest) + } + if (isSrcSubdir(resolvedSrc, resolvedDest)) { + throw new ERR_FS_CP_EINVAL({ + message: `cannot copy ${resolvedSrc} to a subdirectory of self ` + + `${resolvedDest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + // Do not copy if src is a subdir of dest since unlinking + // dest in this case would result in removing src contents + // and therefore a broken symlink would be created. + const srcStat = await stat(src) + if (srcStat.isDirectory() && isSrcSubdir(resolvedDest, resolvedSrc)) { + throw new ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY({ + message: `cannot overwrite ${resolvedDest} with ${resolvedSrc}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + return copyLink(resolvedSrc, dest) +} + +async function copyLink (resolvedSrc, dest) { + await unlink(dest) + return symlink(resolvedSrc, dest) +} + +module.exports = cp diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/fs/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/fs/lib/index.js new file mode 100644 index 0000000000000000000000000000000000000000..81c746304cc42803604eabfe73a212dc3c3aae4a --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/fs/lib/index.js @@ -0,0 +1,13 @@ +'use strict' + +const cp = require('./cp/index.js') +const withTempDir = require('./with-temp-dir.js') +const readdirScoped = require('./readdir-scoped.js') +const moveFile = require('./move-file.js') + +module.exports = { + cp, + withTempDir, + readdirScoped, + moveFile, +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/fs/lib/move-file.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/fs/lib/move-file.js new file mode 100644 index 0000000000000000000000000000000000000000..d56e06d384659ae95d8b205ec01533221bf41319 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/fs/lib/move-file.js @@ -0,0 +1,78 @@ +const { dirname, join, resolve, relative, isAbsolute } = require('path') +const fs = require('fs/promises') + +const pathExists = async path => { + try { + await fs.access(path) + return true + } catch (er) { + return er.code !== 'ENOENT' + } +} + +const moveFile = async (source, destination, options = {}, root = true, symlinks = []) => { + if (!source || !destination) { + throw new TypeError('`source` and `destination` file required') + } + + options = { + overwrite: true, + ...options, + } + + if (!options.overwrite && await pathExists(destination)) { + throw new Error(`The destination file exists: ${destination}`) + } + + await fs.mkdir(dirname(destination), { recursive: true }) + + try { + await fs.rename(source, destination) + } catch (error) { + if (error.code === 'EXDEV' || error.code === 'EPERM') { + const sourceStat = await fs.lstat(source) + if (sourceStat.isDirectory()) { + const files = await fs.readdir(source) + await Promise.all(files.map((file) => + moveFile(join(source, file), join(destination, file), options, false, symlinks) + )) + } else if (sourceStat.isSymbolicLink()) { + symlinks.push({ source, destination }) + } else { + await fs.copyFile(source, destination) + } + } else { + throw error + } + } + + if (root) { + await Promise.all(symlinks.map(async ({ source: symSource, destination: symDestination }) => { + let target = await fs.readlink(symSource) + // junction symlinks in windows will be absolute paths, so we need to + // make sure they point to the symlink destination + if (isAbsolute(target)) { + target = resolve(symDestination, relative(symSource, target)) + } + // try to determine what the actual file is so we can create the correct + // type of symlink in windows + let targetStat = 'file' + try { + targetStat = await fs.stat(resolve(dirname(symSource), target)) + if (targetStat.isDirectory()) { + targetStat = 'junction' + } + } catch { + // targetStat remains 'file' + } + await fs.symlink( + target, + symDestination, + targetStat + ) + })) + await fs.rm(source, { recursive: true, force: true }) + } +} + +module.exports = moveFile diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/fs/lib/readdir-scoped.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/fs/lib/readdir-scoped.js new file mode 100644 index 0000000000000000000000000000000000000000..cd601dfbe7486b40fa69119e31d855b4f97baa4a --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/fs/lib/readdir-scoped.js @@ -0,0 +1,20 @@ +const { readdir } = require('fs/promises') +const { join } = require('path') + +const readdirScoped = async (dir) => { + const results = [] + + for (const item of await readdir(dir)) { + if (item.startsWith('@')) { + for (const scopedItem of await readdir(join(dir, item))) { + results.push(join(item, scopedItem)) + } + } else { + results.push(item) + } + } + + return results +} + +module.exports = readdirScoped diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/fs/lib/with-temp-dir.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/fs/lib/with-temp-dir.js new file mode 100644 index 0000000000000000000000000000000000000000..0738ac4f29e1bedacf83737eb2a30cb8aefa3b51 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/fs/lib/with-temp-dir.js @@ -0,0 +1,39 @@ +const { join, sep } = require('path') + +const getOptions = require('./common/get-options.js') +const { mkdir, mkdtemp, rm } = require('fs/promises') + +// create a temp directory, ensure its permissions match its parent, then call +// the supplied function passing it the path to the directory. clean up after +// the function finishes, whether it throws or not +const withTempDir = async (root, fn, opts) => { + const options = getOptions(opts, { + copy: ['tmpPrefix'], + }) + // create the directory + await mkdir(root, { recursive: true }) + + const target = await mkdtemp(join(`${root}${sep}`, options.tmpPrefix || '')) + let err + let result + + try { + result = await fn(target) + } catch (_err) { + err = _err + } + + try { + await rm(target, { force: true, recursive: true }) + } catch { + // ignore errors + } + + if (err) { + throw err + } + + return result +} + +module.exports = withTempDir diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/fs/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/fs/package.json new file mode 100644 index 0000000000000000000000000000000000000000..e4063ec8752437739ffd375019a6971a8cbe110d --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/fs/package.json @@ -0,0 +1,54 @@ +{ + "name": "@npmcli/fs", + "version": "4.0.0", + "description": "filesystem utilities for the npm cli", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "scripts": { + "snap": "tap", + "test": "tap", + "npmclilint": "npmcli-lint", + "lint": "npm run eslint", + "lintfix": "npm run eslint -- --fix", + "posttest": "npm run lint", + "postsnap": "npm run lintfix --", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/fs.git" + }, + "keywords": [ + "npm", + "oss" + ], + "author": "GitHub Inc.", + "license": "ISC", + "devDependencies": { + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", + "tap": "^16.0.1" + }, + "dependencies": { + "semver": "^7.3.5" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.23.3", + "publish": true + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..8f90f96f4c6c5076027c2a01bd117f42a8d16bb5 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE NPM DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE NPM BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, +OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, +DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS +ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS +SOFTWARE. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/lib/clone.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/lib/clone.js new file mode 100644 index 0000000000000000000000000000000000000000..e25a4d14268216d9ea8b68709e5142f0388e5eae --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/lib/clone.js @@ -0,0 +1,172 @@ +// The goal here is to minimize both git workload and +// the number of refs we download over the network. +// +// Every method ends up with the checked out working dir +// at the specified ref, and resolves with the git sha. + +// Only certain whitelisted hosts get shallow cloning. +// Many hosts (including GHE) don't always support it. +// A failed shallow fetch takes a LOT longer than a full +// fetch in most cases, so we skip it entirely. +// Set opts.gitShallow = true/false to force this behavior +// one way or the other. +const shallowHosts = new Set([ + 'github.com', + 'gist.github.com', + 'gitlab.com', + 'bitbucket.com', + 'bitbucket.org', +]) +// we have to use url.parse until we add the same shim that hosted-git-info has +// to handle scp:// urls +const { parse } = require('url') // eslint-disable-line node/no-deprecated-api +const path = require('path') + +const getRevs = require('./revs.js') +const spawn = require('./spawn.js') +const { isWindows } = require('./utils.js') + +const pickManifest = require('npm-pick-manifest') +const fs = require('fs/promises') + +module.exports = (repo, ref = 'HEAD', target = null, opts = {}) => + getRevs(repo, opts).then(revs => clone( + repo, + revs, + ref, + resolveRef(revs, ref, opts), + target || defaultTarget(repo, opts.cwd), + opts + )) + +const maybeShallow = (repo, opts) => { + if (opts.gitShallow === false || opts.gitShallow) { + return opts.gitShallow + } + return shallowHosts.has(parse(repo).host) +} + +const defaultTarget = (repo, /* istanbul ignore next */ cwd = process.cwd()) => + path.resolve(cwd, path.basename(repo.replace(/[/\\]?\.git$/, ''))) + +const clone = (repo, revs, ref, revDoc, target, opts) => { + if (!revDoc) { + return unresolved(repo, ref, target, opts) + } + if (revDoc.sha === revs.refs.HEAD.sha) { + return plain(repo, revDoc, target, opts) + } + if (revDoc.type === 'tag' || revDoc.type === 'branch') { + return branch(repo, revDoc, target, opts) + } + return other(repo, revDoc, target, opts) +} + +const resolveRef = (revs, ref, opts) => { + const { spec = {} } = opts + ref = spec.gitCommittish || ref + /* istanbul ignore next - will fail anyway, can't pull */ + if (!revs) { + return null + } + if (spec.gitRange) { + return pickManifest(revs, spec.gitRange, opts) + } + if (!ref) { + return revs.refs.HEAD + } + if (revs.refs[ref]) { + return revs.refs[ref] + } + if (revs.shas[ref]) { + return revs.refs[revs.shas[ref][0]] + } + return null +} + +// pull request or some other kind of advertised ref +const other = (repo, revDoc, target, opts) => { + const shallow = maybeShallow(repo, opts) + + const fetchOrigin = ['fetch', 'origin', revDoc.rawRef] + .concat(shallow ? ['--depth=1'] : []) + + const git = (args) => spawn(args, { ...opts, cwd: target }) + return fs.mkdir(target, { recursive: true }) + .then(() => git(['init'])) + .then(() => isWindows(opts) + ? git(['config', '--local', '--add', 'core.longpaths', 'true']) + : null) + .then(() => git(['remote', 'add', 'origin', repo])) + .then(() => git(fetchOrigin)) + .then(() => git(['checkout', revDoc.sha])) + .then(() => updateSubmodules(target, opts)) + .then(() => revDoc.sha) +} + +// tag or branches. use -b +const branch = (repo, revDoc, target, opts) => { + const args = [ + 'clone', + '-b', + revDoc.ref, + repo, + target, + '--recurse-submodules', + ] + if (maybeShallow(repo, opts)) { + args.push('--depth=1') + } + if (isWindows(opts)) { + args.push('--config', 'core.longpaths=true') + } + return spawn(args, opts).then(() => revDoc.sha) +} + +// just the head. clone it +const plain = (repo, revDoc, target, opts) => { + const args = [ + 'clone', + repo, + target, + '--recurse-submodules', + ] + if (maybeShallow(repo, opts)) { + args.push('--depth=1') + } + if (isWindows(opts)) { + args.push('--config', 'core.longpaths=true') + } + return spawn(args, opts).then(() => revDoc.sha) +} + +const updateSubmodules = async (target, opts) => { + const hasSubmodules = await fs.stat(`${target}/.gitmodules`) + .then(() => true) + .catch(() => false) + if (!hasSubmodules) { + return null + } + return spawn([ + 'submodule', + 'update', + '-q', + '--init', + '--recursive', + ], { ...opts, cwd: target }) +} + +const unresolved = (repo, ref, target, opts) => { + // can't do this one shallowly, because the ref isn't advertised + // but we can avoid checking out the working dir twice, at least + const lp = isWindows(opts) ? ['--config', 'core.longpaths=true'] : [] + const cloneArgs = ['clone', '--mirror', '-q', repo, target + '/.git'] + const git = (args) => spawn(args, { ...opts, cwd: target }) + return fs.mkdir(target, { recursive: true }) + .then(() => git(cloneArgs.concat(lp))) + .then(() => git(['init'])) + .then(() => git(['checkout', ref])) + .then(() => updateSubmodules(target, opts)) + .then(() => git(['rev-parse', '--revs-only', 'HEAD'])) + .then(({ stdout }) => stdout.trim()) +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/lib/errors.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/lib/errors.js new file mode 100644 index 0000000000000000000000000000000000000000..3ceaa4581166909ab65eada730b21f14ee768fc9 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/lib/errors.js @@ -0,0 +1,36 @@ + +const maxRetry = 3 + +class GitError extends Error { + shouldRetry () { + return false + } +} + +class GitConnectionError extends GitError { + constructor () { + super('A git connection error occurred') + } + + shouldRetry (number) { + return number < maxRetry + } +} + +class GitPathspecError extends GitError { + constructor () { + super('The git reference could not be found') + } +} + +class GitUnknownError extends GitError { + constructor () { + super('An unknown git error occurred') + } +} + +module.exports = { + GitConnectionError, + GitPathspecError, + GitUnknownError, +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/lib/find.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/lib/find.js new file mode 100644 index 0000000000000000000000000000000000000000..34bd310b88e5d57b61566a453e9e6dac07f695af --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/lib/find.js @@ -0,0 +1,15 @@ +const is = require('./is.js') +const { dirname } = require('path') + +module.exports = async ({ cwd = process.cwd(), root } = {}) => { + while (true) { + if (await is({ cwd })) { + return cwd + } + const next = dirname(cwd) + if (cwd === root || cwd === next) { + return null + } + cwd = next + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/lib/index.js new file mode 100644 index 0000000000000000000000000000000000000000..10a65f782e6da533770b9db27b3bb6f1b812a53f --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/lib/index.js @@ -0,0 +1,9 @@ +module.exports = { + clone: require('./clone.js'), + revs: require('./revs.js'), + spawn: require('./spawn.js'), + is: require('./is.js'), + find: require('./find.js'), + isClean: require('./is-clean.js'), + errors: require('./errors.js'), +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/lib/is-clean.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/lib/is-clean.js new file mode 100644 index 0000000000000000000000000000000000000000..182373be9419352d4e699a42dfa69a738909120d --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/lib/is-clean.js @@ -0,0 +1,6 @@ +const spawn = require('./spawn.js') + +module.exports = (opts = {}) => + spawn(['status', '--porcelain=v1', '-uno'], opts) + .then(res => !res.stdout.trim().split(/\r?\n+/) + .map(l => l.trim()).filter(l => l).length) diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/lib/is.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/lib/is.js new file mode 100644 index 0000000000000000000000000000000000000000..f5a0e8754f10dc85ef0cb669bedadfedb159bcb6 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/lib/is.js @@ -0,0 +1,4 @@ +// not an airtight indicator, but a good gut-check to even bother trying +const { stat } = require('fs/promises') +module.exports = ({ cwd = process.cwd() } = {}) => + stat(cwd + '/.git').then(() => true, () => false) diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/lib/lines-to-revs.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/lib/lines-to-revs.js new file mode 100644 index 0000000000000000000000000000000000000000..6bd7e7a4c153154c84783613f441dc946515e97c --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/lib/lines-to-revs.js @@ -0,0 +1,147 @@ +// turn an array of lines from `git ls-remote` into a thing +// vaguely resembling a packument, where docs are a resolved ref + +const semver = require('semver') + +module.exports = lines => finish(lines.reduce(linesToRevsReducer, { + versions: {}, + 'dist-tags': {}, + refs: {}, + shas: {}, +})) + +const finish = revs => distTags(shaList(peelTags(revs))) + +// We can check out shallow clones on specific SHAs if we have a ref +const shaList = revs => { + Object.keys(revs.refs).forEach(ref => { + const doc = revs.refs[ref] + if (!revs.shas[doc.sha]) { + revs.shas[doc.sha] = [ref] + } else { + revs.shas[doc.sha].push(ref) + } + }) + return revs +} + +// Replace any tags with their ^{} counterparts, if those exist +const peelTags = revs => { + Object.keys(revs.refs).filter(ref => ref.endsWith('^{}')).forEach(ref => { + const peeled = revs.refs[ref] + const unpeeled = revs.refs[ref.replace(/\^\{\}$/, '')] + if (unpeeled) { + unpeeled.sha = peeled.sha + delete revs.refs[ref] + } + }) + return revs +} + +const distTags = revs => { + // not entirely sure what situations would result in an + // ichabod repo, but best to be careful in Sleepy Hollow anyway + const HEAD = revs.refs.HEAD || /* istanbul ignore next */ {} + const versions = Object.keys(revs.versions) + versions.forEach(v => { + // simulate a dist-tags with latest pointing at the + // 'latest' branch if one exists and is a version, + // or HEAD if not. + const ver = revs.versions[v] + if (revs.refs.latest && ver.sha === revs.refs.latest.sha) { + revs['dist-tags'].latest = v + } else if (ver.sha === HEAD.sha) { + revs['dist-tags'].HEAD = v + if (!revs.refs.latest) { + revs['dist-tags'].latest = v + } + } + }) + return revs +} + +const refType = ref => { + if (ref.startsWith('refs/tags/')) { + return 'tag' + } + if (ref.startsWith('refs/heads/')) { + return 'branch' + } + if (ref.startsWith('refs/pull/')) { + return 'pull' + } + if (ref === 'HEAD') { + return 'head' + } + // Could be anything, ignore for now + /* istanbul ignore next */ + return 'other' +} + +// return the doc, or null if we should ignore it. +const lineToRevDoc = line => { + const split = line.trim().split(/\s+/, 2) + if (split.length < 2) { + return null + } + + const sha = split[0].trim() + const rawRef = split[1].trim() + const type = refType(rawRef) + + if (type === 'tag') { + // refs/tags/foo^{} is the 'peeled tag', ie the commit + // that is tagged by refs/tags/foo they resolve to the same + // content, just different objects in git's data structure. + // But, we care about the thing the tag POINTS to, not the tag + // object itself, so we only look at the peeled tag refs, and + // ignore the pointer. + // For now, though, we have to save both, because some tags + // don't have peels, if they were not annotated. + const ref = rawRef.slice('refs/tags/'.length) + return { sha, ref, rawRef, type } + } + + if (type === 'branch') { + const ref = rawRef.slice('refs/heads/'.length) + return { sha, ref, rawRef, type } + } + + if (type === 'pull') { + // NB: merged pull requests installable with #pull/123/merge + // for the merged pr, or #pull/123 for the PR head + const ref = rawRef.slice('refs/'.length).replace(/\/head$/, '') + return { sha, ref, rawRef, type } + } + + if (type === 'head') { + const ref = 'HEAD' + return { sha, ref, rawRef, type } + } + + // at this point, all we can do is leave the ref un-munged + return { sha, ref: rawRef, rawRef, type } +} + +const linesToRevsReducer = (revs, line) => { + const doc = lineToRevDoc(line) + + if (!doc) { + return revs + } + + revs.refs[doc.ref] = doc + revs.refs[doc.rawRef] = doc + + if (doc.type === 'tag') { + // try to pull a semver value out of tags like `release-v1.2.3` + // which is a pretty common pattern. + const match = !doc.ref.endsWith('^{}') && + doc.ref.match(/v?(\d+\.\d+\.\d+(?:[-+].+)?)$/) + if (match && semver.valid(match[1], true)) { + revs.versions[semver.clean(match[1], true)] = doc + } + } + + return revs +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/lib/make-error.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/lib/make-error.js new file mode 100644 index 0000000000000000000000000000000000000000..7540ec7c8b9f71c58b78c64867bacd3a1cb25181 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/lib/make-error.js @@ -0,0 +1,33 @@ +const { + GitConnectionError, + GitPathspecError, + GitUnknownError, +} = require('./errors.js') + +const connectionErrorRe = new RegExp([ + 'remote error: Internal Server Error', + 'The remote end hung up unexpectedly', + 'Connection timed out', + 'Operation timed out', + 'Failed to connect to .* Timed out', + 'Connection reset by peer', + 'SSL_ERROR_SYSCALL', + 'The requested URL returned error: 503', +].join('|')) + +const missingPathspecRe = /pathspec .* did not match any file\(s\) known to git/ + +function makeError (er) { + const message = er.stderr + let gitEr + if (connectionErrorRe.test(message)) { + gitEr = new GitConnectionError(message) + } else if (missingPathspecRe.test(message)) { + gitEr = new GitPathspecError(message) + } else { + gitEr = new GitUnknownError(message) + } + return Object.assign(gitEr, er) +} + +module.exports = makeError diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/lib/opts.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/lib/opts.js new file mode 100644 index 0000000000000000000000000000000000000000..1e80e9efe4989c0afa860d9c816576a519afccd8 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/lib/opts.js @@ -0,0 +1,57 @@ +const fs = require('node:fs') +const os = require('node:os') +const path = require('node:path') +const ini = require('ini') + +const gitConfigPath = path.join(os.homedir(), '.gitconfig') + +let cachedConfig = null + +// Function to load and cache the git config +const loadGitConfig = () => { + if (cachedConfig === null) { + try { + cachedConfig = {} + if (fs.existsSync(gitConfigPath)) { + const configContent = fs.readFileSync(gitConfigPath, 'utf-8') + cachedConfig = ini.parse(configContent) + } + } catch (error) { + cachedConfig = {} + } + } + return cachedConfig +} + +const checkGitConfigs = () => { + const config = loadGitConfig() + return { + sshCommandSetInConfig: config?.core?.sshCommand !== undefined, + askPassSetInConfig: config?.core?.askpass !== undefined, + } +} + +const sshCommandSetInEnv = process.env.GIT_SSH_COMMAND !== undefined +const askPassSetInEnv = process.env.GIT_ASKPASS !== undefined +const { sshCommandSetInConfig, askPassSetInConfig } = checkGitConfigs() + +// Values we want to set if they're not already defined by the end user +// This defaults to accepting new ssh host key fingerprints +const finalGitEnv = { + ...(askPassSetInEnv || askPassSetInConfig ? {} : { + GIT_ASKPASS: 'echo', + }), + ...(sshCommandSetInEnv || sshCommandSetInConfig ? {} : { + GIT_SSH_COMMAND: 'ssh -oStrictHostKeyChecking=accept-new', + }), +} + +module.exports = (opts = {}) => ({ + stdioString: true, + ...opts, + shell: false, + env: opts.env || { ...finalGitEnv, ...process.env }, +}) + +// Export the loadGitConfig function for testing +module.exports.loadGitConfig = loadGitConfig diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/lib/revs.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/lib/revs.js new file mode 100644 index 0000000000000000000000000000000000000000..ebcc848fa34584bf100eaac248d218bc7d5e2c6c --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/lib/revs.js @@ -0,0 +1,22 @@ +const spawn = require('./spawn.js') +const { LRUCache } = require('lru-cache') +const linesToRevs = require('./lines-to-revs.js') + +const revsCache = new LRUCache({ + max: 100, + ttl: 5 * 60 * 1000, +}) + +module.exports = async (repo, opts = {}) => { + if (!opts.noGitRevCache) { + const cached = revsCache.get(repo) + if (cached) { + return cached + } + } + + const { stdout } = await spawn(['ls-remote', repo], opts) + const revs = linesToRevs(stdout.trim().split('\n')) + revsCache.set(repo, revs) + return revs +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/lib/spawn.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/lib/spawn.js new file mode 100644 index 0000000000000000000000000000000000000000..03c1cbde2154779a9bd7097e188487c6564b1819 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/lib/spawn.js @@ -0,0 +1,44 @@ +const spawn = require('@npmcli/promise-spawn') +const promiseRetry = require('promise-retry') +const { log } = require('proc-log') +const makeError = require('./make-error.js') +const makeOpts = require('./opts.js') + +module.exports = (gitArgs, opts = {}) => { + const whichGit = require('./which.js') + const gitPath = whichGit(opts) + + if (gitPath instanceof Error) { + return Promise.reject(gitPath) + } + + // undocumented option, mostly only here for tests + const args = opts.allowReplace || gitArgs[0] === '--no-replace-objects' + ? gitArgs + : ['--no-replace-objects', ...gitArgs] + + let retryOpts = opts.retry + if (retryOpts === null || retryOpts === undefined) { + retryOpts = { + retries: opts.fetchRetries || 2, + factor: opts.fetchRetryFactor || 10, + maxTimeout: opts.fetchRetryMaxtimeout || 60000, + minTimeout: opts.fetchRetryMintimeout || 1000, + } + } + return promiseRetry((retryFn, number) => { + if (number !== 1) { + log.silly('git', `Retrying git command: ${ + args.join(' ')} attempt # ${number}`) + } + + return spawn(gitPath, args, makeOpts(opts)) + .catch(er => { + const gitError = makeError(er) + if (!gitError.shouldRetry(number)) { + throw gitError + } + retryFn(gitError) + }) + }, retryOpts) +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/lib/utils.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/lib/utils.js new file mode 100644 index 0000000000000000000000000000000000000000..fcd9578a19597da11eb49d60df38ae72a538fc43 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/lib/utils.js @@ -0,0 +1,3 @@ +const isWindows = opts => (opts.fakePlatform || process.platform) === 'win32' + +exports.isWindows = isWindows diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/lib/which.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/lib/which.js new file mode 100644 index 0000000000000000000000000000000000000000..dc2a1ad212166316976f36f08610ffc870a461e4 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/lib/which.js @@ -0,0 +1,18 @@ +const which = require('which') + +let gitPath +try { + gitPath = which.sync('git') +} catch { + // ignore errors +} + +module.exports = (opts = {}) => { + if (opts.git) { + return opts.git + } + if (!gitPath || opts.git === false) { + return Object.assign(new Error('No git binary found in $PATH'), { code: 'ENOGIT' }) + } + return gitPath +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/package.json new file mode 100644 index 0000000000000000000000000000000000000000..f4e844bccab0dba685461107dd864387b665e1ca --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/git/package.json @@ -0,0 +1,58 @@ +{ + "name": "@npmcli/git", + "version": "7.0.0", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "description": "a util for spawning git from npm CLI contexts", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/git.git" + }, + "author": "GitHub Inc.", + "license": "ISC", + "scripts": { + "lint": "npm run eslint", + "snap": "tap", + "test": "tap", + "posttest": "npm run lint", + "postlint": "template-oss-check", + "lintfix": "npm run eslint -- --fix", + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" + }, + "tap": { + "timeout": 600, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "devDependencies": { + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.24.1", + "npm-package-arg": "^13.0.0", + "slash": "^3.0.0", + "tap": "^16.0.1" + }, + "dependencies": { + "@npmcli/promise-spawn": "^8.0.0", + "ini": "^5.0.0", + "lru-cache": "^11.2.1", + "npm-pick-manifest": "^11.0.1", + "proc-log": "^5.0.0", + "promise-retry": "^2.0.1", + "semver": "^7.3.5", + "which": "^5.0.0" + }, + "engines": { + "node": "^20.17.0 || >=22.9.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.24.1", + "publish": true + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/installed-package-contents/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/installed-package-contents/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..19cec97b1846830f5628807533a144313cd67532 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/installed-package-contents/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/installed-package-contents/README.md b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/installed-package-contents/README.md new file mode 100644 index 0000000000000000000000000000000000000000..edd23bd26d64c81ec95f1eeb1c390ed7b1b70c00 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/installed-package-contents/README.md @@ -0,0 +1,109 @@ +# @npmcli/installed-package-contents + +Get the list of files installed in a package in node_modules, including +bundled dependencies. + +This is useful if you want to remove a package node from the tree _without_ +removing its child nodes, for example to extract a new version of the +dependency into place safely. + +It's sort of the reflection of [npm-packlist](http://npm.im/npm-packlist), +but for listing out the _installed_ files rather than the files that _will_ +be installed. This is of course a much simpler operation, because we don't +have to handle ignore files or package.json `files` lists. + +## USAGE + +```js +// programmatic usage +const pkgContents = require('@npmcli/installed-package-contents') + +pkgContents({ path: 'node_modules/foo', depth: 1 }).then(files => { + // files is an array of items that need to be passed to + // rimraf or moved out of the way to make the folder empty + // if foo bundled dependencies, those will be included. + // It will not traverse into child directories, because we set + // depth:1 in the options. + // If the folder doesn't exist, this returns an empty array. +}) + +pkgContents({ path: 'node_modules/foo', depth: Infinity }).then(files => { + // setting depth:Infinity tells it to keep walking forever + // until it hits something that isn't a directory, so we'll + // just get the list of all files, but not their containing + // directories. +}) +``` + +As a CLI: + +```bash +$ installed-package-contents node_modules/bundle-some -d1 +node_modules/.bin/some +node_modules/bundle-some/package.json +node_modules/bundle-some/node_modules/@scope/baz +node_modules/bundle-some/node_modules/.bin/foo +node_modules/bundle-some/node_modules/foo +``` + +CLI options: + +``` +Usage: + installed-package-contents [-d --depth=] + +Lists the files installed for a package specified by . + +Options: + -d --depth= Provide a numeric value ("Infinity" is allowed) + to specify how deep in the file tree to traverse. + Default=1 + -h --help Show this usage information +``` + +## OPTIONS + +* `depth` Number, default `1`. How deep to traverse through folders to get + contents. Typically you'd want to set this to either `1` (to get the + surface files and folders) or `Infinity` (to get all files), but any + other positive number is supported as well. If set to `0` or a + negative number, returns the path provided and (if it is a package) its + set of linked bins. +* `path` Required. Path to the package in `node_modules` where traversal + should begin. + +## RETURN VALUE + +A Promise that resolves to an array of fully-resolved files and folders +matching the criteria. This includes all bundled dependencies in +`node_modules`, and any linked executables in `node_modules/.bin` that the +package caused to be installed. + +An empty or missing package folder will return an empty array. Empty +directories _within_ package contents are listed, even if the `depth` +argument would cause them to be traversed into. + +## CAVEAT + +If using this module to generate a list of files that should be recursively +removed to clear away the package, note that this will leave empty +directories behind in certain cases: + +- If all child packages are bundled dependencies, then the + `node_modules` folder will remain. +- If all child packages within a given scope were bundled dependencies, + then the `node_modules/@scope` folder will remain. +- If all linked bin scripts were removed, then an empty `node_modules/.bin` + folder will remain. + +In the interest of speed and algorithmic complexity, this module does _not_ +do a subsequent readdir to see if it would remove all directory entries, +though it would be easier to look at if it returned `node_modules` or +`.bin` in that case rather than the contents. However, if the intent is to +pass these arguments to `rimraf`, it hardly makes sense to do _two_ +`readdir` calls just so that we can have the luxury of having to make a +third. + +Since the primary use case is to delete a package's contents so that they +can be re-filled with a new version of that package, this caveat does not +pose a problem. Empty directories are already ignored by both npm and git. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/installed-package-contents/bin/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/installed-package-contents/bin/index.js new file mode 100644 index 0000000000000000000000000000000000000000..7b83b23bf168c002e4daa36f670cd9c13640828e --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/installed-package-contents/bin/index.js @@ -0,0 +1,44 @@ +#! /usr/bin/env node + +const { relative } = require('path') +const pkgContents = require('../') + +const usage = `Usage: + installed-package-contents [-d --depth=] + +Lists the files installed for a package specified by . + +Options: + -d --depth= Provide a numeric value ("Infinity" is allowed) + to specify how deep in the file tree to traverse. + Default=1 + -h --help Show this usage information` + +const options = {} + +process.argv.slice(2).forEach(arg => { + let match + if ((match = arg.match(/^(?:--depth=|-d)([0-9]+|Infinity)/))) { + options.depth = +match[1] + } else if (arg === '-h' || arg === '--help') { + console.log(usage) + process.exit(0) + } else { + options.path = arg + } +}) + +if (!options.path) { + console.error('ERROR: no path provided') + console.error(usage) + process.exit(1) +} + +const cwd = process.cwd() + +pkgContents(options) + .then(list => list.sort().forEach(p => console.log(relative(cwd, p)))) + .catch(/* istanbul ignore next - pretty unusual */ er => { + console.error(er) + process.exit(1) + }) diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/installed-package-contents/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/installed-package-contents/lib/index.js new file mode 100644 index 0000000000000000000000000000000000000000..ab1486cd01d003e564433983600fe716a2b2add1 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/installed-package-contents/lib/index.js @@ -0,0 +1,181 @@ +// to GET CONTENTS for folder at PATH (which may be a PACKAGE): +// - if PACKAGE, read path/package.json +// - if bins in ../node_modules/.bin, add those to result +// - if depth >= maxDepth, add PATH to result, and finish +// - readdir(PATH, with file types) +// - add all FILEs in PATH to result +// - if PARENT: +// - if depth < maxDepth, add GET CONTENTS of all DIRs in PATH +// - else, add all DIRs in PATH +// - if no parent +// - if no bundled deps, +// - if depth < maxDepth, add GET CONTENTS of DIRs in path except +// node_modules +// - else, add all DIRs in path other than node_modules +// - if has bundled deps, +// - get list of bundled deps +// - add GET CONTENTS of bundled deps, PACKAGE=true, depth + 1 + +const bundled = require('npm-bundled') +const { readFile, readdir, stat } = require('fs/promises') +const { resolve, basename, dirname } = require('path') +const normalizePackageBin = require('npm-normalize-package-bin') + +const readPackage = ({ path, packageJsonCache }) => packageJsonCache.has(path) + ? Promise.resolve(packageJsonCache.get(path)) + : readFile(path).then(json => { + const pkg = normalizePackageBin(JSON.parse(json)) + packageJsonCache.set(path, pkg) + return pkg + }).catch(() => null) + +// just normalize bundle deps and bin, that's all we care about here. +const normalized = Symbol('package data has been normalized') +const rpj = ({ path, packageJsonCache }) => readPackage({ path, packageJsonCache }) + .then(pkg => { + if (!pkg || pkg[normalized]) { + return pkg + } + if (pkg.bundledDependencies && !pkg.bundleDependencies) { + pkg.bundleDependencies = pkg.bundledDependencies + delete pkg.bundledDependencies + } + const bd = pkg.bundleDependencies + if (bd === true) { + pkg.bundleDependencies = [ + ...Object.keys(pkg.dependencies || {}), + ...Object.keys(pkg.optionalDependencies || {}), + ] + } + if (typeof bd === 'object' && !Array.isArray(bd)) { + pkg.bundleDependencies = Object.keys(bd) + } + pkg[normalized] = true + return pkg + }) + +const pkgContents = async ({ + path, + depth = 1, + currentDepth = 0, + pkg = null, + result = null, + packageJsonCache = null, +}) => { + if (!result) { + result = new Set() + } + + if (!packageJsonCache) { + packageJsonCache = new Map() + } + + if (pkg === true) { + return rpj({ path: path + '/package.json', packageJsonCache }) + .then(p => pkgContents({ + path, + depth, + currentDepth, + pkg: p, + result, + packageJsonCache, + })) + } + + if (pkg) { + // add all bins to result if they exist + if (pkg.bin) { + const dir = dirname(path) + const scope = basename(dir) + const nm = /^@.+/.test(scope) ? dirname(dir) : dir + + const binFiles = [] + Object.keys(pkg.bin).forEach(b => { + const base = resolve(nm, '.bin', b) + binFiles.push(base, base + '.cmd', base + '.ps1') + }) + + const bins = await Promise.all( + binFiles.map(b => stat(b).then(() => b).catch(() => null)) + ) + bins.filter(b => b).forEach(b => result.add(b)) + } + } + + if (currentDepth >= depth) { + result.add(path) + return result + } + + // we'll need bundle list later, so get that now in parallel + const [dirEntries, bundleDeps] = await Promise.all([ + readdir(path, { withFileTypes: true }), + currentDepth === 0 && pkg && pkg.bundleDependencies + ? bundled({ path, packageJsonCache }) : null, + ]).catch(() => []) + + // not a thing, probably a missing folder + if (!dirEntries) { + return result + } + + // empty folder, just add the folder itself to the result + if (!dirEntries.length && !bundleDeps && currentDepth !== 0) { + result.add(path) + return result + } + + const recursePromises = [] + + for (const entry of dirEntries) { + const p = resolve(path, entry.name) + if (entry.isDirectory() === false) { + result.add(p) + continue + } + + if (currentDepth !== 0 || entry.name !== 'node_modules') { + if (currentDepth < depth - 1) { + recursePromises.push(pkgContents({ + path: p, + packageJsonCache, + depth, + currentDepth: currentDepth + 1, + result, + })) + } else { + result.add(p) + } + continue + } + } + + if (bundleDeps) { + // bundle deps are all folders + // we always recurse to get pkg bins, but if currentDepth is too high, + // it'll return early before walking their contents. + recursePromises.push(...bundleDeps.map(dep => { + const p = resolve(path, 'node_modules', dep) + return pkgContents({ + path: p, + packageJsonCache, + pkg: true, + depth, + currentDepth: currentDepth + 1, + result, + }) + })) + } + + if (recursePromises.length) { + await Promise.all(recursePromises) + } + + return result +} + +module.exports = ({ path, ...opts }) => pkgContents({ + path: resolve(path), + ...opts, + pkg: true, +}).then(results => [...results]) diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/installed-package-contents/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/installed-package-contents/package.json new file mode 100644 index 0000000000000000000000000000000000000000..d5b68a737daf49f7f96737a9481c95a911c47392 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/installed-package-contents/package.json @@ -0,0 +1,52 @@ +{ + "name": "@npmcli/installed-package-contents", + "version": "3.0.0", + "description": "Get the list of files installed in a package in node_modules, including bundled dependencies", + "author": "GitHub Inc.", + "main": "lib/index.js", + "bin": { + "installed-package-contents": "bin/index.js" + }, + "license": "ISC", + "scripts": { + "test": "tap", + "snap": "tap", + "lint": "npm run eslint", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run eslint -- --fix", + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" + }, + "devDependencies": { + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", + "tap": "^16.3.0" + }, + "dependencies": { + "npm-bundled": "^4.0.0", + "npm-normalize-package-bin": "^4.0.0" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/installed-package-contents.git" + }, + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^18.17.0 || >=20.5.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.23.3", + "publish": true + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/map-workspaces/LICENSE.md b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/map-workspaces/LICENSE.md new file mode 100644 index 0000000000000000000000000000000000000000..5fc208ff122e08e2ca9777f80b0551617b30ba2a --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/map-workspaces/LICENSE.md @@ -0,0 +1,20 @@ + + +ISC License + +Copyright npm, Inc. + +Permission to use, copy, modify, and/or distribute this +software for any purpose with or without fee is hereby +granted, provided that the above copyright notice and this +permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL +WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO +EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/map-workspaces/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/map-workspaces/lib/index.js new file mode 100644 index 0000000000000000000000000000000000000000..f38b8cd33b74f766cdf1d3cc4338631bbebe0e82 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/map-workspaces/lib/index.js @@ -0,0 +1,226 @@ +const path = require('path') + +const getName = require('@npmcli/name-from-folder') +const { minimatch } = require('minimatch') +const pkgJson = require('@npmcli/package-json') +const { glob } = require('glob') + +function appendNegatedPatterns (allPatterns) { + const patterns = [] + const negatedPatterns = [] + for (let pattern of allPatterns) { + const excl = pattern.match(/^!+/) + if (excl) { + pattern = pattern.slice(excl[0].length) + } + + // strip off any / or ./ from the start of the pattern. /foo => foo + pattern = pattern.replace(/^\.?\/+/, '') + + // an odd number of ! means a negated pattern. !!foo ==> foo + const negate = excl && excl[0].length % 2 === 1 + if (negate) { + negatedPatterns.push(pattern) + } else { + // remove negated patterns that appeared before this pattern to avoid + // ignoring paths that were matched afterwards + // e.g: ['packages/**', '!packages/b/**', 'packages/b/a'] + // in the above list, the last pattern overrides the negated pattern + // right before it. In effect, the above list would become: + // ['packages/**', 'packages/b/a'] + // The order matters here which is why we must do it inside the loop + // as opposed to doing it all together at the end. + for (let i = 0; i < negatedPatterns.length; ++i) { + const negatedPattern = negatedPatterns[i] + if (minimatch(pattern, negatedPattern)) { + negatedPatterns.splice(i, 1) + } + } + patterns.push(pattern) + } + } + + // use the negated patterns to eagerly remove all the patterns that + // can be removed to avoid unnecessary crawling + for (const negated of negatedPatterns) { + for (const pattern of minimatch.match(patterns, negated)) { + patterns.splice(patterns.indexOf(pattern), 1) + } + } + return { patterns, negatedPatterns } +} + +function getPatterns (workspaces) { + const workspacesDeclaration = + Array.isArray(workspaces.packages) + ? workspaces.packages + : workspaces + + if (!Array.isArray(workspacesDeclaration)) { + throw getError({ + message: 'workspaces config expects an Array', + code: 'EWORKSPACESCONFIG', + }) + } + + return appendNegatedPatterns(workspacesDeclaration) +} + +function getPackageName (pkg, pathname) { + return pkg.name || getName(pathname) +} + +// make sure glob pattern only matches folders +function getGlobPattern (pattern) { + pattern = pattern.replace(/\\/g, '/') + return pattern.endsWith('/') + ? pattern + : `${pattern}/` +} + +function getError ({ Type = TypeError, message, code }) { + return Object.assign(new Type(message), { code }) +} + +function reverseResultMap (map) { + return new Map(Array.from(map, item => item.reverse())) +} + +async function mapWorkspaces (opts = {}) { + if (!opts || !opts.pkg) { + throw getError({ + message: 'mapWorkspaces missing pkg info', + code: 'EMAPWORKSPACESPKG', + }) + } + if (!opts.cwd) { + opts.cwd = process.cwd() + } + + const { workspaces = [] } = opts.pkg + const { patterns, negatedPatterns } = getPatterns(workspaces) + const results = new Map() + + if (!patterns.length && !negatedPatterns.length) { + return results + } + + const seen = new Map() + const getGlobOpts = () => ({ + ...opts, + ignore: [ + ...opts.ignore || [], + '**/node_modules/**', + // just ignore the negated patterns to avoid unnecessary crawling + ...negatedPatterns, + ], + }) + + let matches = await glob(patterns.map((p) => getGlobPattern(p)), getGlobOpts()) + // preserves glob@8 behavior + matches = matches.sort((a, b) => a.localeCompare(b, 'en')) + + // we must preserve the order of results according to the given list of + // workspace patterns + const orderedMatches = [] + for (const pattern of patterns) { + orderedMatches.push(...matches.filter((m) => { + return minimatch(m, pattern, { partial: true, windowsPathsNoEscape: true }) + })) + } + + for (const match of orderedMatches) { + let pkg + try { + pkg = await pkgJson.normalize(path.join(opts.cwd, match)) + } catch (err) { + if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { + continue + } else { + throw err + } + } + + const name = getPackageName(pkg.content, pkg.path) + + let seenPackagePathnames = seen.get(name) + if (!seenPackagePathnames) { + seenPackagePathnames = new Set() + seen.set(name, seenPackagePathnames) + } + seenPackagePathnames.add(pkg.path) + } + + const errorMessageArray = ['must not have multiple workspaces with the same name'] + for (const [packageName, seenPackagePathnames] of seen) { + if (seenPackagePathnames.size > 1) { + addDuplicateErrorMessages(errorMessageArray, packageName, seenPackagePathnames) + } else { + results.set(packageName, seenPackagePathnames.values().next().value) + } + } + + if (errorMessageArray.length > 1) { + throw getError({ + Type: Error, + message: errorMessageArray.join('\n'), + code: 'EDUPLICATEWORKSPACE', + }) + } + + return results +} + +function addDuplicateErrorMessages (messageArray, packageName, packagePathnames) { + messageArray.push( + `package '${packageName}' has conflicts in the following paths:` + ) + + for (const packagePathname of packagePathnames) { + messageArray.push( + ' ' + packagePathname + ) + } +} + +mapWorkspaces.virtual = function (opts = {}) { + if (!opts || !opts.lockfile) { + throw getError({ + message: 'mapWorkspaces.virtual missing lockfile info', + code: 'EMAPWORKSPACESLOCKFILE', + }) + } + if (!opts.cwd) { + opts.cwd = process.cwd() + } + + const { packages = {} } = opts.lockfile + const { workspaces = [] } = packages[''] || {} + // uses a pathname-keyed map in order to negate the exact items + const results = new Map() + const { patterns, negatedPatterns } = getPatterns(workspaces) + if (!patterns.length && !negatedPatterns.length) { + return results + } + negatedPatterns.push('**/node_modules/**') + + const packageKeys = Object.keys(packages) + for (const pattern of negatedPatterns) { + for (const packageKey of minimatch.match(packageKeys, pattern)) { + packageKeys.splice(packageKeys.indexOf(packageKey), 1) + } + } + + for (const pattern of patterns) { + for (const packageKey of minimatch.match(packageKeys, pattern)) { + const packagePathname = path.join(opts.cwd, packageKey) + const name = getPackageName(packages[packageKey], packagePathname) + results.set(packagePathname, name) + } + } + + // Invert pathname-keyed to a proper name-to-pathnames Map + return reverseResultMap(results) +} + +module.exports = mapWorkspaces diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/map-workspaces/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/map-workspaces/package.json new file mode 100644 index 0000000000000000000000000000000000000000..fb77ea8615c1cad1dd56ccbb75dcd14966139e9b --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/map-workspaces/package.json @@ -0,0 +1,61 @@ +{ + "name": "@npmcli/map-workspaces", + "version": "5.0.0", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^20.17.0 || >=22.9.0" + }, + "description": "Retrieves a name:pathname Map for a given workspaces config", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/map-workspaces.git" + }, + "keywords": [ + "npm", + "npmcli", + "libnpm", + "cli", + "workspaces", + "map-workspaces" + ], + "author": "GitHub Inc.", + "license": "ISC", + "scripts": { + "lint": "npm run eslint", + "pretest": "npm run lint", + "test": "tap", + "snap": "tap", + "postlint": "template-oss-check", + "lintfix": "npm run eslint -- --fix", + "posttest": "npm run lint", + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" + }, + "tap": { + "check-coverage": true, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "devDependencies": { + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.25.0", + "tap": "^16.0.1" + }, + "dependencies": { + "@npmcli/name-from-folder": "^3.0.0", + "@npmcli/package-json": "^7.0.0", + "glob": "^11.0.3", + "minimatch": "^10.0.3" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.25.0", + "publish": "true" + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/metavuln-calculator/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/metavuln-calculator/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..19cec97b1846830f5628807533a144313cd67532 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/metavuln-calculator/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/metavuln-calculator/lib/advisory.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/metavuln-calculator/lib/advisory.js new file mode 100644 index 0000000000000000000000000000000000000000..1f4554963d7e48dfa35955faac864d51578761eb --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/metavuln-calculator/lib/advisory.js @@ -0,0 +1,435 @@ +const hash = require('./hash.js') +const semver = require('semver') +const semverOpt = { includePrerelease: true, loose: true } +const getDepSpec = require('./get-dep-spec.js') + +// any fields that we don't want in the cache need to be hidden +const _source = Symbol('source') +const _packument = Symbol('packument') +const _versionVulnMemo = Symbol('versionVulnMemo') +const _updated = Symbol('updated') +const _options = Symbol('options') +const _specVulnMemo = Symbol('specVulnMemo') +const _testVersion = Symbol('testVersion') +const _testVersions = Symbol('testVersions') +const _calculateRange = Symbol('calculateRange') +const _markVulnerable = Symbol('markVulnerable') +const _testSpec = Symbol('testSpec') + +class Advisory { + constructor (name, source, options = {}) { + this.source = source.id + this[_source] = source + this[_options] = options + this.name = name + if (!source.name) { + source.name = name + } + + this.dependency = source.name + + if (this.type === 'advisory') { + this.title = source.title + this.url = source.url + } else { + this.title = `Depends on vulnerable versions of ${source.name}` + this.url = null + } + + this.severity = source.severity || 'high' + this.versions = [] + this.vulnerableVersions = [] + this.cwe = source.cwe + this.cvss = source.cvss + + // advisories have the range, metavulns do not + // if an advisory doesn't specify range, assume all are vulnerable + this.range = this.type === 'advisory' ? source.vulnerable_versions || '*' + : null + + this.id = hash(this) + + this[_packument] = null + // memoized list of which versions are vulnerable + this[_versionVulnMemo] = new Map() + // memoized list of which dependency specs are vulnerable + this[_specVulnMemo] = new Map() + this[_updated] = false + } + + // true if we updated from what we had in cache + get updated () { + return this[_updated] + } + + get type () { + return this.dependency === this.name ? 'advisory' : 'metavuln' + } + + get packument () { + return this[_packument] + } + + // load up the data from a cache entry and a fetched packument + load (cached, packument) { + // basic data integrity gutcheck + if (!cached || typeof cached !== 'object') { + throw new TypeError('invalid cached data, expected object') + } + + if (!packument || typeof packument !== 'object') { + throw new TypeError('invalid packument data, expected object') + } + + if (cached.id && cached.id !== this.id) { + throw Object.assign(new Error('loading from incorrect cache entry'), { + expected: this.id, + actual: cached.id, + }) + } + if (packument.name !== this.name) { + throw Object.assign(new Error('loading from incorrect packument'), { + expected: this.name, + actual: packument.name, + }) + } + if (this[_packument]) { + throw new Error('advisory object already loaded') + } + + // if we have a range from the initialization, and the cached + // data has a *different* range, then we know we have to recalc. + // just don't use the cached data, so we will definitely not match later + if (!this.range || cached.range && cached.range === this.range) { + Object.assign(this, cached) + } + + this[_packument] = packument + + const pakuVersions = Object.keys(packument.versions || {}) + const allVersions = new Set([...pakuVersions, ...this.versions]) + const versionsAdded = [] + const versionsRemoved = [] + for (const v of allVersions) { + if (!this.versions.includes(v)) { + versionsAdded.push(v) + this.versions.push(v) + } else if (!pakuVersions.includes(v)) { + versionsRemoved.push(v) + } + } + + // strip out any removed versions from our lists, and sort by semver + this.versions = semver.sort(this.versions.filter(v => + !versionsRemoved.includes(v)), semverOpt) + + // if no changes, then just return what we got from cache + // versions added or removed always means we changed + // otherwise, advisories change if the range changes, and + // metavulns change if the source was updated + const unchanged = this.type === 'advisory' + ? this.range && this.range === cached.range + : !this[_source].updated + + // if the underlying source changed, by an advisory updating the + // range, or a source advisory being updated, then we have to re-check + // otherwise, only recheck the new ones. + this.vulnerableVersions = !unchanged ? [] + : semver.sort(this.vulnerableVersions.filter(v => + !versionsRemoved.includes(v)), semverOpt) + + if (unchanged && !versionsAdded.length && !versionsRemoved.length) { + // nothing added or removed, nothing to do here. use the cached copy. + return this + } + + this[_updated] = true + + // test any versions newly added + if (!unchanged || versionsAdded.length) { + this[_testVersions](unchanged ? versionsAdded : this.versions) + } + this.vulnerableVersions = semver.sort(this.vulnerableVersions, semverOpt) + + // metavulns have to calculate their range, since cache is invalidated + // advisories just get their range from the advisory above + if (this.type === 'metavuln') { + this[_calculateRange]() + } + + return this + } + + [_calculateRange] () { + // calling semver.simplifyRange with a massive list of versions, and those + // versions all concatenated with `||` is a geometric CPU explosion! + // we can try to be a *little* smarter up front by doing x-y for all + // contiguous version sets in the list + const ranges = [] + this.versions = semver.sort(this.versions, semverOpt) + this.vulnerableVersions = semver.sort(this.vulnerableVersions, semverOpt) + for (let v = 0, vulnVer = 0; v < this.versions.length; v++) { + // figure out the vulnerable subrange + const vr = [this.versions[v]] + while (v < this.versions.length) { + if (this.versions[v] !== this.vulnerableVersions[vulnVer]) { + // we don't test prerelease versions, so just skip past it + if (/-/.test(this.versions[v])) { + v++ + continue + } + break + } + if (vr.length > 1) { + vr[1] = this.versions[v] + } else { + vr.push(this.versions[v]) + } + v++ + vulnVer++ + } + // it'll either be just the first version, which means no overlap, + // or the start and end versions, which might be the same version + if (vr.length > 1) { + const tail = this.versions[this.versions.length - 1] + ranges.push(vr[1] === tail ? `>=${vr[0]}` + : vr[0] === vr[1] ? vr[0] + : vr.join(' - ')) + } + } + const metavuln = ranges.join(' || ').trim() + this.range = !metavuln ? '<0.0.0-0' + : semver.simplifyRange(this.versions, metavuln, semverOpt) + } + + // returns true if marked as vulnerable, false if ok + // spec is a dependency specifier, for metavuln cases + // where the version might not be in the packument. if + // we have the packument and spec is not provided, then + // we use the dependency version from the manifest. + testVersion (version, spec = null) { + const sv = String(version) + if (this[_versionVulnMemo].has(sv)) { + return this[_versionVulnMemo].get(sv) + } + + const result = this[_testVersion](version, spec) + if (result) { + this[_markVulnerable](version) + } + this[_versionVulnMemo].set(sv, !!result) + return result + } + + [_markVulnerable] (version) { + const sv = String(version) + if (!this.vulnerableVersions.includes(sv)) { + this.vulnerableVersions.push(sv) + } + } + + [_testVersion] (version, spec) { + const sv = String(version) + if (this.vulnerableVersions.includes(sv)) { + return true + } + + if (this.type === 'advisory') { + // advisory, just test range + return semver.satisfies(version, this.range, semverOpt) + } + + // check the dependency of this version on the vulnerable dep + // if we got a version that's not in the packument, fall back on + // the spec provided, if possible. + const mani = this[_packument]?.versions?.[version] || { + dependencies: { + [this.dependency]: spec, + }, + } + + if (!spec) { + spec = getDepSpec(mani, this.dependency) + } + + // no dep, no vuln + if (spec === null) { + return false + } + + if (!semver.validRange(spec, semverOpt)) { + // not a semver range, nothing we can hope to do about it + return true + } + + const bd = mani.bundleDependencies + const bundled = bd && bd.includes(this[_source].name) + // XXX if bundled, then semver.intersects() means vulnerable + // else, pick a manifest and see if it can't be avoided + // try to pick a version of the dep that isn't vulnerable + const avoid = this[_source].range + + if (bundled) { + return semver.intersects(spec, avoid, semverOpt) + } + + return this[_source].testSpec(spec) + } + + testSpec (spec) { + // testing all the versions is a bit costly, and the spec tends to stay + // consistent across multiple versions, so memoize this as well, in case + // we're testing lots of versions. + const memo = this[_specVulnMemo] + if (memo.has(spec)) { + return memo.get(spec) + } + + const res = this[_testSpec](spec) + memo.set(spec, res) + return res + } + + [_testSpec] (spec) { + for (const v of this.versions) { + const satisfies = semver.satisfies(v, spec, semverOpt) + if (!satisfies) { + continue + } + if (!this.testVersion(v)) { + return false + } + } + // either vulnerable, or not installable because nothing satisfied + // either way, best avoided. + return true + } + + [_testVersions] (versions) { + if (!versions.length) { + return + } + + // set of lists of versions + const versionSets = new Set() + versions = semver.sort(versions.map(v => semver.parse(v, semverOpt))) + + // start out with the versions grouped by major and minor + let last = versions[0].major + '.' + versions[0].minor + let list = [] + versionSets.add(list) + for (const v of versions) { + const k = v.major + '.' + v.minor + if (k !== last) { + last = k + list = [] + versionSets.add(list) + } + list.push(v) + } + + for (const set of versionSets) { + // it's common to have version lists like: + // 1.0.0 + // 1.0.1-alpha.0 + // 1.0.1-alpha.1 + // ... + // 1.0.1-alpha.999 + // 1.0.1 + // 1.0.2-alpha.0 + // ... + // 1.0.2-alpha.99 + // 1.0.2 + // with a huge number of prerelease versions that are not installable + // anyway. + // If mid has a prerelease tag, and set[0] does not, then walk it + // back until we hit a non-prerelease version + // If mid has a prerelease tag, and set[set.length-1] does not, + // then walk it forward until we hit a version without a prerelease tag + // Similarly, if the head/tail is a prerelease, but there is a non-pr + // version in the set, then start there instead. + let h = 0 + const origHeadVuln = this.testVersion(set[h]) + while (h < set.length && /-/.test(String(set[h]))) { + h++ + } + + // don't filter out the whole list! they might all be pr's + if (h === set.length) { + h = 0 + } else if (origHeadVuln) { + // if the original was vulnerable, assume so are all of these + for (let hh = 0; hh < h; hh++) { + this[_markVulnerable](set[hh]) + } + } + + let t = set.length - 1 + const origTailVuln = this.testVersion(set[t]) + while (t > h && /-/.test(String(set[t]))) { + t-- + } + + // don't filter out the whole list! might all be pr's + if (t === h) { + t = set.length - 1 + } else if (origTailVuln) { + // if original tail was vulnerable, assume these are as well + for (let tt = set.length - 1; tt > t; tt--) { + this[_markVulnerable](set[tt]) + } + } + + const headVuln = h === 0 ? origHeadVuln + : this.testVersion(set[h]) + + const tailVuln = t === set.length - 1 ? origTailVuln + : this.testVersion(set[t]) + + // if head and tail both vulnerable, whole list is thrown out + if (headVuln && tailVuln) { + for (let v = h; v < t; v++) { + this[_markVulnerable](set[v]) + } + continue + } + + // if length is 2 or 1, then we marked them all already + if (t < h + 2) { + continue + } + + const mid = Math.floor(set.length / 2) + const pre = set.slice(0, mid) + const post = set.slice(mid) + + // if the parent list wasn't prereleases, then drop pr tags + // from end of the pre list, and beginning of the post list, + // marking as vulnerable if the midpoint item we picked is. + if (!/-/.test(String(pre[0]))) { + const midVuln = this.testVersion(pre[pre.length - 1]) + while (/-/.test(String(pre[pre.length - 1]))) { + const v = pre.pop() + if (midVuln) { + this[_markVulnerable](v) + } + } + } + + if (!/-/.test(String(post[post.length - 1]))) { + const midVuln = this.testVersion(post[0]) + while (/-/.test(String(post[0]))) { + const v = post.shift() + if (midVuln) { + this[_markVulnerable](v) + } + } + } + + versionSets.add(pre) + versionSets.add(post) + } + } +} + +module.exports = Advisory diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/metavuln-calculator/lib/get-dep-spec.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/metavuln-calculator/lib/get-dep-spec.js new file mode 100644 index 0000000000000000000000000000000000000000..8d1d72b8c46eb5e4b4e8c5d2517ce71e772267e9 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/metavuln-calculator/lib/get-dep-spec.js @@ -0,0 +1,15 @@ +module.exports = (mani, name) => { + // skip dev because that only matters at the root, + // where we aren't fetching a manifest from the registry + // with multiple versions anyway. + const { + dependencies: deps = {}, + optionalDependencies: optDeps = {}, + peerDependencies: peerDeps = {}, + } = mani + + return deps && typeof deps[name] === 'string' ? deps[name] + : optDeps && typeof optDeps[name] === 'string' ? optDeps[name] + : peerDeps && typeof peerDeps[name] === 'string' ? peerDeps[name] + : null +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/metavuln-calculator/lib/hash.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/metavuln-calculator/lib/hash.js new file mode 100644 index 0000000000000000000000000000000000000000..634bf99de04947cfa74e1c52556eb8337a28d5f9 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/metavuln-calculator/lib/hash.js @@ -0,0 +1,5 @@ +const { createHash } = require('crypto') + +module.exports = ({ name, source }) => createHash('sha512') + .update(JSON.stringify([name, source])) + .digest('base64') diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/metavuln-calculator/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/metavuln-calculator/lib/index.js new file mode 100644 index 0000000000000000000000000000000000000000..971409b5bad44ec9d12a87df449c60ad1428c6dd --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/metavuln-calculator/lib/index.js @@ -0,0 +1,128 @@ +// this is the public class that is used by consumers. +// the Advisory class handles all the calculation, and this +// class handles all the IO with the registry and cache. +const pacote = require('pacote') +const cacache = require('cacache') +const { time } = require('proc-log') +const Advisory = require('./advisory.js') +const { homedir } = require('os') +const jsonParse = require('json-parse-even-better-errors') + +const _packument = Symbol('packument') +const _cachePut = Symbol('cachePut') +const _cacheGet = Symbol('cacheGet') +const _cacheData = Symbol('cacheData') +const _packuments = Symbol('packuments') +const _cache = Symbol('cache') +const _options = Symbol('options') +const _advisories = Symbol('advisories') +const _calculate = Symbol('calculate') + +class Calculator { + constructor (options = {}) { + this[_options] = { ...options } + this[_cache] = this[_options].cache || (homedir() + '/.npm/_cacache') + this[_options].cache = this[_cache] + this[_packuments] = new Map() + this[_cacheData] = new Map() + this[_advisories] = new Map() + } + + get cache () { + return this[_cache] + } + + get options () { + return { ...this[_options] } + } + + async calculate (name, source) { + const k = `security-advisory:${name}:${source.id}` + if (this[_advisories].has(k)) { + return this[_advisories].get(k) + } + + const p = this[_calculate](name, source) + this[_advisories].set(k, p) + return p + } + + async [_calculate] (name, source) { + const k = `security-advisory:${name}:${source.id}` + const timeEnd = time.start(`metavuln:calculate:${k}`) + const advisory = new Advisory(name, source, this[_options]) + // load packument and cached advisory + const [cached, packument] = await Promise.all([ + this[_cacheGet](advisory), + this[_packument](name), + ]) + const timeEndLoad = time.start(`metavuln:load:${k}`) + advisory.load(cached, packument) + timeEndLoad() + if (advisory.updated) { + await this[_cachePut](advisory) + } + this[_advisories].set(k, advisory) + timeEnd() + return advisory + } + + async [_cachePut] (advisory) { + const { name, id } = advisory + const key = `security-advisory:${name}:${id}` + const timeEnd = time.start(`metavuln:cache:put:${key}`) + const data = JSON.stringify(advisory) + const options = { ...this[_options] } + this[_cacheData].set(key, jsonParse(data)) + await cacache.put(this[_cache], key, data, options).catch(() => {}) + timeEnd() + } + + async [_cacheGet] (advisory) { + const { name, id } = advisory + const key = `security-advisory:${name}:${id}` + /* istanbul ignore if - should be impossible, since we memoize the + * advisory object itself using the same key, just being cautious */ + if (this[_cacheData].has(key)) { + return this[_cacheData].get(key) + } + + const timeEnd = time.start(`metavuln:cache:get:${key}`) + const p = cacache.get(this[_cache], key, { ...this[_options] }) + .catch(() => ({ data: '{}' })) + .then(({ data }) => { + data = jsonParse(data) + timeEnd() + this[_cacheData].set(key, data) + return data + }) + this[_cacheData].set(key, p) + return p + } + + async [_packument] (name) { + if (this[_packuments].has(name)) { + return this[_packuments].get(name) + } + + const timeEnd = time.start(`metavuln:packument:${name}`) + const p = pacote.packument(name, { ...this[_options] }) + .catch(() => { + // presumably not something from the registry. + // an empty packument will have an effective range of * + return { + name, + versions: {}, + } + }) + .then(paku => { + timeEnd() + this[_packuments].set(name, paku) + return paku + }) + this[_packuments].set(name, p) + return p + } +} + +module.exports = Calculator diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/metavuln-calculator/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/metavuln-calculator/package.json new file mode 100644 index 0000000000000000000000000000000000000000..9d17000653c0e77636d1ef79e939b5b40679fe16 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/metavuln-calculator/package.json @@ -0,0 +1,62 @@ +{ + "name": "@npmcli/metavuln-calculator", + "version": "9.0.2", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "description": "Calculate meta-vulnerabilities from package security advisories", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/metavuln-calculator.git" + }, + "author": "GitHub Inc.", + "license": "ISC", + "scripts": { + "test": "tap", + "posttest": "npm run lint", + "snap": "tap", + "postsnap": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", + "lintfix": "npm run eslint -- --fix", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force" + }, + "tap": { + "check-coverage": true, + "coverage-map": "map.js", + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "devDependencies": { + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.25.0", + "require-inject": "^1.4.4", + "tap": "^16.0.1" + }, + "dependencies": { + "cacache": "^20.0.0", + "json-parse-even-better-errors": "^4.0.0", + "pacote": "^21.0.0", + "proc-log": "^5.0.0", + "semver": "^7.3.5" + }, + "engines": { + "node": "^20.17.0 || >=22.9.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.25.0", + "publish": "true", + "ciVersions": [ + "16.14.0", + "16.x", + "18.0.0", + "18.x" + ] + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/name-from-folder/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/name-from-folder/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..d24a9fca761c837d4ceebd5060ec8e26129799cb --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/name-from-folder/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL WARRANTIES WITH REGARD +TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, +DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS +ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS +SOFTWARE. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/name-from-folder/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/name-from-folder/lib/index.js new file mode 100644 index 0000000000000000000000000000000000000000..afb1dbb76297f7d48595e28112d6993ce0e90630 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/name-from-folder/lib/index.js @@ -0,0 +1,7 @@ +const { basename, dirname } = require('path') + +const getName = (parent, base) => + parent.charAt(0) === '@' ? `${parent}/${base}` : base + +module.exports = dir => dir ? getName(basename(dirname(dir)), basename(dir)) + : false diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/name-from-folder/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/name-from-folder/package.json new file mode 100644 index 0000000000000000000000000000000000000000..323edd81d22fb431ce4875064273a1ea8db83083 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/name-from-folder/package.json @@ -0,0 +1,45 @@ +{ + "name": "@npmcli/name-from-folder", + "version": "3.0.0", + "files": [ + "bin/", + "lib/" + ], + "main": "lib/index.js", + "description": "Get the package name from a folder path", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/name-from-folder.git" + }, + "author": "GitHub Inc.", + "license": "ISC", + "scripts": { + "test": "tap", + "snap": "tap", + "lint": "npm run eslint", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run eslint -- --fix", + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" + }, + "devDependencies": { + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", + "tap": "^16.3.2" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.23.3", + "publish": true + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/node-gyp/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/node-gyp/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..3609cabca453507dda944f8b738cd9773372e5e1 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/node-gyp/LICENSE @@ -0,0 +1,7 @@ +ISC License: + +Copyright (c) 2023 by GitHub Inc. + +Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/node-gyp/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/node-gyp/lib/index.js new file mode 100644 index 0000000000000000000000000000000000000000..cdf18560e0ca23ffdffda14663add340c7f21613 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/node-gyp/lib/index.js @@ -0,0 +1,14 @@ +const util = require('util') +const fs = require('fs') +const { stat } = fs.promises || { stat: util.promisify(fs.stat) } + +async function isNodeGypPackage (path) { + return await stat(`${path}/binding.gyp`) + .then(st => st.isFile()) + .catch(() => false) +} + +module.exports = { + isNodeGypPackage, + defaultGypInstallScript: 'node-gyp rebuild', +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/node-gyp/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/node-gyp/package.json new file mode 100644 index 0000000000000000000000000000000000000000..3be9663a39de04e86b18bf8b57ab04960256febf --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/node-gyp/package.json @@ -0,0 +1,50 @@ +{ + "name": "@npmcli/node-gyp", + "version": "4.0.0", + "description": "Tools for dealing with node-gyp packages", + "scripts": { + "test": "tap", + "lint": "npm run eslint", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run eslint -- --fix", + "snap": "tap", + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/node-gyp.git" + }, + "keywords": [ + "npm", + "cli", + "node-gyp" + ], + "files": [ + "bin/", + "lib/" + ], + "main": "lib/index.js", + "author": "GitHub Inc.", + "license": "ISC", + "devDependencies": { + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", + "tap": "^16.0.1" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.23.3", + "publish": true + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/package-json/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/package-json/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..6a1f3708f6d70e651684dff51bd2fee840bb21a4 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/package-json/LICENSE @@ -0,0 +1,18 @@ +ISC License + +Copyright GitHub Inc. + +Permission to use, copy, modify, and/or distribute this +software for any purpose with or without fee is hereby +granted, provided that the above copyright notice and this +permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL +WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO +EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/package-json/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/package-json/lib/index.js new file mode 100644 index 0000000000000000000000000000000000000000..fabe5fbcda7bc564ace2f23c3d30938e0cbaf0ef --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/package-json/lib/index.js @@ -0,0 +1,296 @@ +const { readFile, writeFile } = require('node:fs/promises') +const { resolve } = require('node:path') +const parseJSON = require('json-parse-even-better-errors') + +const updateDeps = require('./update-dependencies.js') +const updateScripts = require('./update-scripts.js') +const updateWorkspaces = require('./update-workspaces.js') +const { normalize, syncNormalize } = require('./normalize.js') +const { read, parse } = require('./read-package.js') +const { packageSort } = require('./sort.js') + +// a list of handy specialized helper functions that take +// care of special cases that are handled by the npm cli +const knownSteps = new Set([ + updateDeps, + updateScripts, + updateWorkspaces, +]) + +// list of all keys that are handled by "knownSteps" helpers +const knownKeys = new Set([ + ...updateDeps.knownKeys, + 'scripts', + 'workspaces', +]) + +class PackageJson { + // npm pkg fix + static fixSteps = Object.freeze([ + 'binRefs', + 'bundleDependencies', + 'fixName', + 'fixVersionField', + 'fixRepositoryField', + 'fixDependencies', + 'devDependencies', + 'scriptpath', + ]) + + static normalizeSteps = Object.freeze([ + '_id', + '_attributes', + 'bundledDependencies', + 'bundleDependencies', + 'optionalDedupe', + 'scripts', + 'funding', + 'bin', + 'binDir', + ]) + + static prepareSteps = Object.freeze([ + '_id', + '_attributes', + 'bundledDependencies', + 'bundleDependencies', + 'bundleDependenciesDeleteFalse', + 'gypfile', + 'serverjs', + 'scriptpath', + 'authors', + 'readme', + 'mans', + 'binDir', + 'gitHead', + 'fillTypes', + 'normalizeData', + 'binRefs', + ]) + + // create a new empty package.json, so we can save at the given path even + // though we didn't start from a parsed file + static async create (path, opts = {}) { + const p = new PackageJson() + await p.create(path) + if (opts.data) { + return p.update(opts.data) + } + return p + } + + // Loads a package.json at given path and JSON parses + static async load (path, opts = {}) { + const p = new PackageJson() + // Avoid try/catch if we aren't going to create + if (!opts.create) { + return p.load(path) + } + + try { + return await p.load(path) + } catch (err) { + if (!err.message.startsWith('Could not read package.json')) { + throw err + } + return await p.create(path) + } + } + + // npm pkg fix + static async fix (path, opts) { + const p = new PackageJson() + await p.load(path, true) + return p.fix(opts) + } + + // read-package-json compatible behavior + static async prepare (path, opts) { + const p = new PackageJson() + await p.load(path, true) + return p.prepare(opts) + } + + // read-package-json-fast compatible behavior + static async normalize (path, opts) { + const p = new PackageJson() + await p.load(path) + return p.normalize(opts) + } + + #path + #manifest + #readFileContent = '' + #canSave = true + + // Load content from given path + async load (path, parseIndex) { + this.#path = path + let parseErr + try { + this.#readFileContent = await read(this.filename) + } catch (err) { + if (!parseIndex) { + throw err + } + parseErr = err + } + + if (parseErr) { + const indexFile = resolve(this.path, 'index.js') + let indexFileContent + try { + indexFileContent = await readFile(indexFile, 'utf8') + } catch (err) { + throw parseErr + } + try { + this.fromComment(indexFileContent) + } catch (err) { + throw parseErr + } + // This wasn't a package.json so prevent saving + this.#canSave = false + return this + } + + return this.fromJSON(this.#readFileContent) + } + + // Load data from a JSON string/buffer + fromJSON (data) { + this.#manifest = parse(data) + return this + } + + // Manually set data from an existing object + fromContent (data) { + if (!data || typeof data !== 'object') { + throw new Error('Content data must be an object') + } + this.#manifest = data + this.#canSave = false + return this + } + + // Load data from a comment + // /**package { "name": "foo", "version": "1.2.3", ... } **/ + fromComment (data) { + data = data.split(/^\/\*\*package(?:\s|$)/m) + + if (data.length < 2) { + throw new Error('File has no package in comments') + } + data = data[1] + data = data.split(/\*\*\/$/m) + + if (data.length < 2) { + throw new Error('File has no package in comments') + } + data = data[0] + data = data.replace(/^\s*\*/mg, '') + + this.#manifest = parseJSON(data) + return this + } + + get content () { + return this.#manifest + } + + get path () { + return this.#path + } + + get filename () { + if (this.path) { + return resolve(this.path, 'package.json') + } + return undefined + } + + create (path) { + this.#path = path + this.#manifest = {} + return this + } + + // This should be the ONLY way to set content in the manifest + update (content) { + if (!this.content) { + throw new Error('Can not update without content. Please `load` or `create`') + } + + for (const step of knownSteps) { + this.#manifest = step({ content, originalContent: this.content }) + } + + // unknown properties will just be overwitten + for (const [key, value] of Object.entries(content)) { + if (!knownKeys.has(key)) { + this.content[key] = value + } + } + + return this + } + + async save ({ sort } = {}) { + if (!this.#canSave) { + throw new Error('No package.json to save to') + } + const { + [Symbol.for('indent')]: indent, + [Symbol.for('newline')]: newline, + ...rest + } = this.content + + const format = indent === undefined ? ' ' : indent + const eol = newline === undefined ? '\n' : newline + + const content = sort ? packageSort(rest) : rest + + const fileContent = `${ + JSON.stringify(content, null, format) + }\n` + .replace(/\n/g, eol) + + if (fileContent.trim() !== this.#readFileContent.trim()) { + const written = await writeFile(this.filename, fileContent) + this.#readFileContent = fileContent + return written + } + } + + // steps is NOT overrideable here because this is a legacy function that's not being used in new places + syncNormalize (opts = {}) { + opts.steps = this.constructor.normalizeSteps.filter(s => s !== '_attributes') + syncNormalize(this, opts) + return this + } + + async normalize (opts = {}) { + if (!opts.steps) { + opts.steps = this.constructor.normalizeSteps + } + await normalize(this, opts) + return this + } + + async prepare (opts = {}) { + if (!opts.steps) { + opts.steps = this.constructor.prepareSteps + } + await normalize(this, opts) + return this + } + + async fix (opts = {}) { + // This one is not overridable + opts.steps = this.constructor.fixSteps + await normalize(this, opts) + return this + } +} + +module.exports = PackageJson diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/package-json/lib/normalize-data.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/package-json/lib/normalize-data.js new file mode 100644 index 0000000000000000000000000000000000000000..1c1a36984c5e9be4406fb6adb4cd0035dbff4eac --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/package-json/lib/normalize-data.js @@ -0,0 +1,254 @@ +// Originally normalize-package-data + +const { URL } = require('node:url') +const hostedGitInfo = require('hosted-git-info') +const validateLicense = require('validate-npm-package-license') + +const typos = { + dependancies: 'dependencies', + dependecies: 'dependencies', + depdenencies: 'dependencies', + devEependencies: 'devDependencies', + depends: 'dependencies', + 'dev-dependencies': 'devDependencies', + devDependences: 'devDependencies', + devDepenencies: 'devDependencies', + devdependencies: 'devDependencies', + repostitory: 'repository', + repo: 'repository', + prefereGlobal: 'preferGlobal', + hompage: 'homepage', + hampage: 'homepage', + autohr: 'author', + autor: 'author', + contributers: 'contributors', + publicationConfig: 'publishConfig', + script: 'scripts', +} + +const isEmail = str => str.includes('@') && (str.indexOf('@') < str.lastIndexOf('.')) + +// Extracts description from contents of a readme file in markdown format +function extractDescription (description) { + // the first block of text before the first heading that isn't the first line heading + const lines = description.trim().split('\n') + let start = 0 + // skip initial empty lines and lines that start with # + while (lines[start]?.trim().match(/^(#|$)/)) { + start++ + } + let end = start + 1 + // keep going till we get to the end or an empty line + while (end < lines.length && lines[end].trim()) { + end++ + } + return lines.slice(start, end).join(' ').trim() +} + +function stringifyPerson (person) { + if (typeof person !== 'string') { + const name = person.name || '' + const u = person.url || person.web + const wrappedUrl = u ? (' (' + u + ')') : '' + const e = person.email || person.mail + const wrappedEmail = e ? (' <' + e + '>') : '' + person = name + wrappedEmail + wrappedUrl + } + const matchedName = person.match(/^([^(<]+)/) + const matchedUrl = person.match(/\(([^()]+)\)/) + const matchedEmail = person.match(/<([^<>]+)>/) + const parsed = {} + if (matchedName?.[0].trim()) { + parsed.name = matchedName[0].trim() + } + if (matchedEmail) { + parsed.email = matchedEmail[1] + } + if (matchedUrl) { + parsed.url = matchedUrl[1] + } + return parsed +} + +function normalizeData (data, changes) { + // fixDescriptionField + if (data.description && typeof data.description !== 'string') { + changes?.push(`'description' field should be a string`) + delete data.description + } + if (data.readme && !data.description && data.readme !== 'ERROR: No README data found!') { + data.description = extractDescription(data.readme) + } + if (data.description === undefined) { + delete data.description + } + if (!data.description) { + changes?.push('No description') + } + + // fixModulesField + if (data.modules) { + changes?.push(`modules field is deprecated`) + delete data.modules + } + + // fixFilesField + const files = data.files + if (files && !Array.isArray(files)) { + changes?.push(`Invalid 'files' member`) + delete data.files + } else if (data.files) { + data.files = data.files.filter(function (file) { + if (!file || typeof file !== 'string') { + changes?.push(`Invalid filename in 'files' list: ${file}`) + return false + } else { + return true + } + }) + } + + // fixManField + if (data.man && typeof data.man === 'string') { + data.man = [data.man] + } + + // fixBugsField + if (!data.bugs && data.repository?.url) { + const hosted = hostedGitInfo.fromUrl(data.repository.url) + if (hosted && hosted.bugs()) { + data.bugs = { url: hosted.bugs() } + } + } else if (data.bugs) { + if (typeof data.bugs === 'string') { + if (isEmail(data.bugs)) { + data.bugs = { email: data.bugs } + } else if (URL.canParse(data.bugs)) { + data.bugs = { url: data.bugs } + } else { + changes?.push(`Bug string field must be url, email, or {email,url}`) + } + } else { + for (const k in data.bugs) { + if (['web', 'name'].includes(k)) { + changes?.push(`bugs['${k}'] should probably be bugs['url'].`) + data.bugs.url = data.bugs[k] + delete data.bugs[k] + } + } + const oldBugs = data.bugs + data.bugs = {} + if (oldBugs.url) { + if (URL.canParse(oldBugs.url)) { + data.bugs.url = oldBugs.url + } else { + changes?.push('bugs.url field must be a string url. Deleted.') + } + } + if (oldBugs.email) { + if (typeof (oldBugs.email) === 'string' && isEmail(oldBugs.email)) { + data.bugs.email = oldBugs.email + } else { + changes?.push('bugs.email field must be a string email. Deleted.') + } + } + } + if (!data.bugs.email && !data.bugs.url) { + delete data.bugs + changes?.push('Normalized value of bugs field is an empty object. Deleted.') + } + } + // fixKeywordsField + if (typeof data.keywords === 'string') { + data.keywords = data.keywords.split(/,\s+/) + } + if (data.keywords && !Array.isArray(data.keywords)) { + delete data.keywords + changes?.push(`keywords should be an array of strings`) + } else if (data.keywords) { + data.keywords = data.keywords.filter(function (kw) { + if (typeof kw !== 'string' || !kw) { + changes?.push(`keywords should be an array of strings`) + return false + } else { + return true + } + }) + } + // fixBundleDependenciesField + const bdd = 'bundledDependencies' + const bd = 'bundleDependencies' + if (data[bdd] && !data[bd]) { + data[bd] = data[bdd] + delete data[bdd] + } + if (data[bd] && !Array.isArray(data[bd])) { + changes?.push(`Invalid 'bundleDependencies' list. Must be array of package names`) + delete data[bd] + } else if (data[bd]) { + data[bd] = data[bd].filter(function (filtered) { + if (!filtered || typeof filtered !== 'string') { + changes?.push(`Invalid bundleDependencies member: ${filtered}`) + return false + } else { + if (!data.dependencies) { + data.dependencies = {} + } + if (!Object.prototype.hasOwnProperty.call(data.dependencies, filtered)) { + changes?.push(`Non-dependency in bundleDependencies: ${filtered}`) + data.dependencies[filtered] = '*' + } + return true + } + }) + } + // fixHomepageField + if (!data.homepage && data.repository && data.repository.url) { + const hosted = hostedGitInfo.fromUrl(data.repository.url) + if (hosted) { + data.homepage = hosted.docs() + } + } + if (data.homepage) { + if (typeof data.homepage !== 'string') { + changes?.push('homepage field must be a string url. Deleted.') + delete data.homepage + } else { + if (!URL.canParse(data.homepage)) { + data.homepage = 'http://' + data.homepage + } + } + } + // fixReadmeField + if (!data.readme) { + changes?.push('No README data') + data.readme = 'ERROR: No README data found!' + } + // fixLicenseField + const license = data.license || data.licence + if (!license) { + changes?.push('No license field.') + } else if (typeof (license) !== 'string' || license.length < 1 || license.trim() === '') { + changes?.push('license should be a valid SPDX license expression') + } else if (!validateLicense(license).validForNewPackages) { + changes?.push('license should be a valid SPDX license expression') + } + // fixPeople + if (data.author) { + data.author = stringifyPerson(data.author) + } + ['maintainers', 'contributors'].forEach(function (set) { + if (!Array.isArray(data[set])) { + return + } + data[set] = data[set].map(stringifyPerson) + }) + // fixTypos + for (const d in typos) { + if (Object.prototype.hasOwnProperty.call(data, d)) { + changes?.push(`${d} should probably be ${typos[d]}.`) + } + } +} + +module.exports = { normalizeData } diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/package-json/lib/normalize.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/package-json/lib/normalize.js new file mode 100644 index 0000000000000000000000000000000000000000..f65e6ad7ba2c438dbd32fbfb4c41f86cff613eb3 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/package-json/lib/normalize.js @@ -0,0 +1,614 @@ +const valid = require('semver/functions/valid') +const clean = require('semver/functions/clean') +const fs = require('node:fs/promises') +const path = require('node:path') +const { log } = require('proc-log') +const moduleBuiltin = require('node:module') + +/** + * @type {import('hosted-git-info')} + */ +let _hostedGitInfo +function lazyHostedGitInfo () { + if (!_hostedGitInfo) { + _hostedGitInfo = require('hosted-git-info') + } + return _hostedGitInfo +} + +/** + * @type {import('glob').glob} + */ +let _glob +function lazyLoadGlob () { + if (!_glob) { + _glob = require('glob').glob + } + return _glob +} + +// used to be npm-normalize-package-bin +function normalizePackageBin (pkg, changes) { + if (pkg.bin) { + if (typeof pkg.bin === 'string' && pkg.name) { + changes?.push('"bin" was converted to an object') + pkg.bin = { [pkg.name]: pkg.bin } + } else if (Array.isArray(pkg.bin)) { + changes?.push('"bin" was converted to an object') + pkg.bin = pkg.bin.reduce((acc, k) => { + acc[path.basename(k)] = k + return acc + }, {}) + } + if (typeof pkg.bin === 'object') { + for (const binKey in pkg.bin) { + if (typeof pkg.bin[binKey] !== 'string') { + delete pkg.bin[binKey] + changes?.push(`removed invalid "bin[${binKey}]"`) + continue + } + const base = path.basename(secureAndUnixifyPath(binKey)) + if (!base) { + delete pkg.bin[binKey] + changes?.push(`removed invalid "bin[${binKey}]"`) + continue + } + + const binTarget = secureAndUnixifyPath(pkg.bin[binKey]) + + if (!binTarget) { + delete pkg.bin[binKey] + changes?.push(`removed invalid "bin[${binKey}]"`) + continue + } + + if (base !== binKey) { + delete pkg.bin[binKey] + changes?.push(`"bin[${binKey}]" was renamed to "bin[${base}]"`) + } + if (binTarget !== pkg.bin[binKey]) { + changes?.push(`"bin[${base}]" script name ${binTarget} was invalid and removed`) + } + pkg.bin[base] = binTarget + } + + if (Object.keys(pkg.bin).length === 0) { + changes?.push('empty "bin" was removed') + delete pkg.bin + } + + return pkg + } + } + delete pkg.bin +} + +function normalizePackageMan (pkg, changes) { + if (pkg.man) { + const mans = [] + for (const man of (Array.isArray(pkg.man) ? pkg.man : [pkg.man])) { + if (typeof man !== 'string') { + changes?.push(`removed invalid "man [${man}]"`) + } else { + mans.push(secureAndUnixifyPath(man)) + } + } + + if (!mans.length) { + changes?.push('empty "man" was removed') + } else { + pkg.man = mans + return pkg + } + } + delete pkg.man +} + +function isCorrectlyEncodedName (spec) { + return !spec.match(/[/@\s+%:]/) && + spec === encodeURIComponent(spec) +} + +function isValidScopedPackageName (spec) { + if (spec.charAt(0) !== '@') { + return false + } + + const rest = spec.slice(1).split('/') + if (rest.length !== 2) { + return false + } + + return rest[0] && rest[1] && + rest[0] === encodeURIComponent(rest[0]) && + rest[1] === encodeURIComponent(rest[1]) +} + +function unixifyPath (ref) { + return ref.replace(/\\|:/g, '/') +} + +function secureAndUnixifyPath (ref) { + const secured = unixifyPath(path.join('.', path.join('/', unixifyPath(ref)))) + return secured.startsWith('./') ? '' : secured +} + +// Only steps that can be ran synchronously. There are some object constructors (i.e. Aborist Node) that need synchronous normalization so here we are. +function syncSteps (pkg, { strict, steps, changes, allowLegacyCase }) { + const data = pkg.content + const pkgId = `${data.name ?? ''}@${data.version ?? ''}` + + // name and version are load bearing so we have to clean them up first + if (steps.includes('fixName') || steps.includes('fixNameField') || steps.includes('normalizeData')) { + if (!data.name && !strict) { + changes?.push('Missing "name" field was set to an empty string') + data.name = '' + } else { + if (typeof data.name !== 'string') { + throw new Error('name field must be a string.') + } + if (!strict) { + const name = data.name.trim() + if (data.name !== name) { + changes?.push(`Whitespace was trimmed from "name"`) + data.name = name + } + } + + if (data.name.startsWith('.') || + !(isValidScopedPackageName(data.name) || isCorrectlyEncodedName(data.name)) || + (strict && (!allowLegacyCase) && data.name !== data.name.toLowerCase()) || + data.name.toLowerCase() === 'node_modules' || + data.name.toLowerCase() === 'favicon.ico') { + throw new Error('Invalid name: ' + JSON.stringify(data.name)) + } + } + } + + if (steps.includes('fixName')) { + // Check for conflicts with builtin modules + if (moduleBuiltin.builtinModules.includes(data.name)) { + log.warn('package-json', pkgId, `Package name "${data.name}" conflicts with a Node.js built-in module name`) + } + } + + if (steps.includes('fixVersionField') || steps.includes('normalizeData')) { + // allow "loose" semver 1.0 versions in non-strict mode + // enforce strict semver 2.0 compliance in strict mode + const loose = !strict + if (!data.version) { + data.version = '' + } else { + if (!valid(data.version, loose)) { + throw new Error(`Invalid version: "${data.version}"`) + } + const version = clean(data.version, loose) + if (version !== data.version) { + changes?.push(`"version" was cleaned and set to "${version}"`) + data.version = version + } + } + } + + // remove attributes that start with "_" + if (steps.includes('_attributes')) { + for (const key in data) { + if (key.startsWith('_')) { + changes?.push(`"${key}" was removed`) + delete pkg.content[key] + } + } + } + + // build the "_id" attribute + if (steps.includes('_id')) { + if (data.name && data.version) { + changes?.push(`"_id" was set to ${pkgId}`) + data._id = pkgId + } + } + + // fix bundledDependencies typo + if (steps.includes('bundledDependencies')) { + if (data.bundleDependencies === undefined && data.bundledDependencies !== undefined) { + data.bundleDependencies = data.bundledDependencies + changes?.push(`Deleted incorrect "bundledDependencies"`) + } + delete data.bundledDependencies + } + + // expand "bundleDependencies: true or translate from object" + if (steps.includes('bundleDependencies')) { + const bd = data.bundleDependencies + if (bd === false && !steps.includes('bundleDependenciesDeleteFalse')) { + changes?.push(`"bundleDependencies" was changed from "false" to "[]"`) + data.bundleDependencies = [] + } else if (bd === true) { + changes?.push(`"bundleDependencies" was auto-populated from "dependencies"`) + data.bundleDependencies = Object.keys(data.dependencies || {}) + } else if (bd && typeof bd === 'object') { + if (!Array.isArray(bd)) { + changes?.push(`"bundleDependencies" was changed from an object to an array`) + data.bundleDependencies = Object.keys(bd) + } + } else if ('bundleDependencies' in data) { + changes?.push(`"bundleDependencies" was removed`) + delete data.bundleDependencies + } + } + + // it was once common practice to list deps both in optionalDependencies and + // in dependencies, to support npm versions that did not know about + // optionalDependencies. This is no longer a relevant need, so duplicating + // the deps in two places is unnecessary and excessive. + if (steps.includes('optionalDedupe')) { + if (data.dependencies && + data.optionalDependencies && typeof data.optionalDependencies === 'object') { + for (const name in data.optionalDependencies) { + changes?.push(`optionalDependencies."${name}" was removed`) + delete data.dependencies[name] + } + if (!Object.keys(data.dependencies).length) { + changes?.push(`Empty "optionalDependencies" was removed`) + delete data.dependencies + } + } + } + + // strip "node_modules/.bin" from scripts entries + // remove invalid scripts entries (non-strings) + if ((steps.includes('scripts') || steps.includes('scriptpath')) && data.scripts !== undefined) { + const spre = /^(\.[/\\])?node_modules[/\\].bin[\\/]/ + if (typeof data.scripts === 'object') { + for (const name in data.scripts) { + if (typeof data.scripts[name] !== 'string') { + delete data.scripts[name] + changes?.push(`Invalid scripts."${name}" was removed`) + } else if (steps.includes('scriptpath') && spre.test(data.scripts[name])) { + data.scripts[name] = data.scripts[name].replace(spre, '') + changes?.push(`scripts entry "${name}" was fixed to remove node_modules/.bin reference`) + } + } + } else { + changes?.push(`Removed invalid "scripts"`) + delete data.scripts + } + } + + if (steps.includes('funding')) { + if (data.funding && typeof data.funding === 'string') { + data.funding = { url: data.funding } + changes?.push(`"funding" was changed to an object with a url attribute`) + } + } + + // "normalizeData" from "read-package-json", which was just a call through to + // "normalize-package-data". We only call the "fixer" functions because + // outside of that it was also clobbering _id (which we already conditionally + // do) and also adding the gypfile script (which we also already + // conditionally do) + + // Some steps are isolated so we can do a limited subset of these in `fix` + if (steps.includes('fixRepositoryField') || steps.includes('normalizeData')) { + if (data.repositories) { + changes?.push(`"repository" was set to the first entry in "repositories" (${data.repository})`) + data.repository = data.repositories[0] + } + if (data.repository) { + if (typeof data.repository === 'string') { + changes?.push('"repository" was changed from a string to an object') + data.repository = { + type: 'git', + url: data.repository, + } + } + if (data.repository.url) { + const hosted = lazyHostedGitInfo().fromUrl(data.repository.url) + let r + if (hosted) { + if (hosted.getDefaultRepresentation() === 'shortcut') { + r = hosted.https() + } else { + r = hosted.toString() + } + if (r !== data.repository.url) { + changes?.push(`"repository.url" was normalized to "${r}"`) + data.repository.url = r + } + } + } + } + } + + if (steps.includes('fixDependencies') || steps.includes('normalizeData')) { + // peerDependencies? + // devDependencies is meaningless here, it's ignored on an installed package + for (const type of ['dependencies', 'devDependencies', 'optionalDependencies']) { + if (data[type]) { + let secondWarning = true + if (typeof data[type] === 'string') { + changes?.push(`"${type}" was converted from a string into an object`) + data[type] = data[type].trim().split(/[\n\r\s\t ,]+/) + secondWarning = false + } + if (Array.isArray(data[type])) { + if (secondWarning) { + changes?.push(`"${type}" was converted from an array into an object`) + } + const o = {} + for (const d of data[type]) { + if (typeof d === 'string') { + const dep = d.trim().split(/(:?[@\s><=])/) + const dn = dep.shift() + const dv = dep.join('').replace(/^@/, '').trim() + o[dn] = dv + } + } + data[type] = o + } + } + } + // normalize-package-data used to put optional dependencies BACK into + // dependencies here, we no longer do this + + for (const deps of ['dependencies', 'devDependencies']) { + if (deps in data) { + if (!data[deps] || typeof data[deps] !== 'object') { + changes?.push(`Removed invalid "${deps}"`) + delete data[deps] + } else { + for (const d in data[deps]) { + const r = data[deps][d] + if (typeof r !== 'string') { + changes?.push(`Removed invalid "${deps}.${d}"`) + delete data[deps][d] + } + const hosted = lazyHostedGitInfo().fromUrl(data[deps][d])?.toString() + if (hosted && hosted !== data[deps][d]) { + changes?.push(`Normalized git reference to "${deps}.${d}"`) + data[deps][d] = hosted.toString() + } + } + } + } + } + } + + // TODO some of this is duplicated in other steps here, a future breaking change may be able to remove the duplicates involved in this step + if (steps.includes('normalizeData')) { + const { normalizeData } = require('./normalize-data.js') + normalizeData(data, changes) + } +} + +// Steps that require await, distinct from sync-steps.js +async function asyncSteps (pkg, { steps, root, changes }) { + const data = pkg.content + const scripts = data.scripts || {} + const pkgId = `${data.name ?? ''}@${data.version ?? ''}` + + // add "install" attribute if any "*.gyp" files exist + if (steps.includes('gypfile')) { + if (!scripts.install && !scripts.preinstall && data.gypfile !== false) { + const files = await lazyLoadGlob()('*.gyp', { cwd: pkg.path }) + if (files.length) { + scripts.install = 'node-gyp rebuild' + data.scripts = scripts + data.gypfile = true + changes?.push(`"scripts.install" was set to "node-gyp rebuild"`) + changes?.push(`"gypfile" was set to "true"`) + } + } + } + + // add "start" attribute if "server.js" exists + if (steps.includes('serverjs') && !scripts.start) { + try { + await fs.access(path.join(pkg.path, 'server.js')) + scripts.start = 'node server.js' + data.scripts = scripts + changes?.push('"scripts.start" was set to "node server.js"') + } catch { + // do nothing + } + } + + // populate "authors" attribute + if (steps.includes('authors') && !data.contributors) { + try { + const authorData = await fs.readFile(path.join(pkg.path, 'AUTHORS'), 'utf8') + const authors = authorData.split(/\r?\n/g) + .map(line => line.replace(/^\s*#.*$/, '').trim()) + .filter(line => line) + data.contributors = authors + changes?.push('"contributors" was auto-populated with the contents of the "AUTHORS" file') + } catch { + // do nothing + } + } + + // populate "readme" attribute + if (steps.includes('readme') && !data.readme) { + const mdre = /\.m?a?r?k?d?o?w?n?$/i + const files = await lazyLoadGlob()('{README,README.*}', { + cwd: pkg.path, + nocase: true, + mark: true, + }) + let readmeFile + for (const file of files) { + // don't accept directories. + if (!file.endsWith(path.sep)) { + if (file.match(mdre)) { + readmeFile = file + break + } + if (file.endsWith('README')) { + readmeFile = file + } + } + } + if (readmeFile) { + const readmeData = await fs.readFile(path.join(pkg.path, readmeFile), 'utf8') + data.readme = readmeData + data.readmeFilename = readmeFile + changes?.push(`"readme" was set to the contents of ${readmeFile}`) + changes?.push(`"readmeFilename" was set to ${readmeFile}`) + } + if (!data.readme) { + data.readme = 'ERROR: No README data found!' + } + } + + // expand directories.man + if (steps.includes('mans')) { + if (data.directories?.man && !data.man) { + const manDir = secureAndUnixifyPath(data.directories.man) + const cwd = path.resolve(pkg.path, manDir) + const files = await lazyLoadGlob()('**/*.[0-9]', { cwd }) + data.man = files.map(man => + path.relative(pkg.path, path.join(cwd, man)).split(path.sep).join('/') + ) + } + normalizePackageMan(data, changes) + } + + // expand "directories.bin" + if (steps.includes('binDir') && data.directories?.bin && !data.bin) { + const binPath = secureAndUnixifyPath(data.directories.bin) + const bins = await lazyLoadGlob()('**', { cwd: path.resolve(pkg.path, binPath) }) + data.bin = bins.reduce((acc, binFile) => { + if (binFile && !binFile.startsWith('.')) { + const binName = path.basename(binFile) + // binPath is already cleaned and unixified, no need to path.join here. + acc[binName] = `${binPath}/${secureAndUnixifyPath(binFile)}` + } + return acc + }, {}) + } else if (steps.includes('bin') || steps.includes('binDir') || steps.includes('binRefs')) { + normalizePackageBin(data, changes) + } + + // populate "gitHead" attribute + if (steps.includes('gitHead') && !data.gitHead) { + const git = require('@npmcli/git') + const gitRoot = await git.find({ cwd: pkg.path, root }) + let head + if (gitRoot) { + try { + head = await fs.readFile(path.resolve(gitRoot, '.git/HEAD'), 'utf8') + } catch (err) { + // do nothing + } + } + let headData + if (head) { + if (head.startsWith('ref: ')) { + const headRef = head.replace(/^ref: /, '').trim() + const headFile = path.resolve(gitRoot, '.git', headRef) + try { + headData = await fs.readFile(headFile, 'utf8') + headData = headData.replace(/^ref: /, '').trim() + } catch (err) { + // do nothing + } + if (!headData) { + const packFile = path.resolve(gitRoot, '.git/packed-refs') + try { + let refs = await fs.readFile(packFile, 'utf8') + if (refs) { + refs = refs.split('\n') + for (let i = 0; i < refs.length; i++) { + const match = refs[i].match(/^([0-9a-f]{40}) (.+)$/) + if (match && match[2].trim() === headRef) { + headData = match[1] + break + } + } + } + } catch { + // do nothing + } + } + } else { + headData = head.trim() + } + } + if (headData) { + data.gitHead = headData + } + } + + // populate "types" attribute + if (steps.includes('fillTypes')) { + const index = data.main || 'index.js' + + if (typeof index !== 'string') { + throw new TypeError('The "main" attribute must be of type string.') + } + + // TODO exports is much more complicated than this in verbose format + // We need to support for instance + + // "exports": { + // ".": [ + // { + // "default": "./lib/npm.js" + // }, + // "./lib/npm.js" + // ], + // "./package.json": "./package.json" + // }, + // as well as conditional exports + + // if (data.exports && typeof data.exports === 'string') { + // index = data.exports + // } + + // if (data.exports && data.exports['.']) { + // index = data.exports['.'] + // if (typeof index !== 'string') { + // } + // } + const extless = path.join(path.dirname(index), path.basename(index, path.extname(index))) + const dts = `./${extless}.d.ts` + const hasDTSFields = 'types' in data || 'typings' in data + if (!hasDTSFields) { + try { + await fs.access(path.join(pkg.path, dts)) + data.types = dts.split(path.sep).join('/') + } catch { + // do nothing + } + } + } + + // Warn if the bin references don't point to anything. This might be better + // in normalize-package-data if it had access to the file path. + if (steps.includes('binRefs') && data.bin instanceof Object) { + for (const key in data.bin) { + try { + await fs.access(path.resolve(pkg.path, data.bin[key])) + } catch { + log.warn('package-json', pkgId, `No bin file found at ${data.bin[key]}`) + // XXX: should a future breaking change delete bin entries that cannot be accessed? + } + } + } +} + +// We don't want the `changes` array in here by default because this is a hot path for parsing packuments during install. The calling method passes it in if it wants to track changes. +async function normalize (pkg, opts) { + if (!pkg.content) { + throw new Error('Can not normalize without content') + } + await asyncSteps(pkg, opts) + // the normalizeData part of this needs to be the last thing ran, so sync comes second + syncSteps(pkg, opts) +} + +function syncNormalize (pkg, opts) { + syncSteps(pkg, opts) +} + +module.exports = { normalize, syncNormalize } diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/package-json/lib/read-package.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/package-json/lib/read-package.js new file mode 100644 index 0000000000000000000000000000000000000000..d6c86ce388e6ca539eaad8fbb4231e9e887f9e36 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/package-json/lib/read-package.js @@ -0,0 +1,39 @@ +// This is JUST the code needed to open a package.json file and parse it. +// It's isolated out so that code needing to parse a package.json file can do so in the same way as this module does, without needing to require the whole module, or needing to require the underlying parsing library. + +const { readFile } = require('fs/promises') +const parseJSON = require('json-parse-even-better-errors') + +async function read (filename) { + try { + const data = await readFile(filename, 'utf8') + return data + } catch (err) { + err.message = `Could not read package.json: ${err}` + throw err + } +} + +function parse (data) { + try { + const content = parseJSON(data) + return content + } catch (err) { + err.message = `Invalid package.json: ${err}` + throw err + } +} + +// This is what most external libs will use. +// PackageJson will call read and parse separately +async function readPackage (filename) { + const data = await read(filename) + const content = parse(data) + return content +} + +module.exports = { + read, + parse, + readPackage, +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/package-json/lib/sort.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/package-json/lib/sort.js new file mode 100644 index 0000000000000000000000000000000000000000..0bd0d5199da583a195c67edb14435c7c8897e703 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/package-json/lib/sort.js @@ -0,0 +1,101 @@ +/** + * arbitrary sort order for package.json largely pulled from: + * https://github.com/keithamus/sort-package-json/blob/main/defaultRules.md + * + * cross checked with: + * https://github.com/npm/types/blob/main/types/index.d.ts#L104 + * https://docs.npmjs.com/cli/configuring-npm/package-json + */ +function packageSort (json) { + const { + name, + version, + private: isPrivate, + description, + keywords, + homepage, + bugs, + repository, + funding, + license, + author, + maintainers, + contributors, + type, + imports, + exports, + main, + browser, + types, + bin, + man, + directories, + files, + workspaces, + scripts, + config, + dependencies, + devDependencies, + peerDependencies, + peerDependenciesMeta, + optionalDependencies, + bundledDependencies, + bundleDependencies, + engines, + os, + cpu, + publishConfig, + devEngines, + licenses, + overrides, + ...rest + } = json + + return { + ...(typeof name !== 'undefined' ? { name } : {}), + ...(typeof version !== 'undefined' ? { version } : {}), + ...(typeof isPrivate !== 'undefined' ? { private: isPrivate } : {}), + ...(typeof description !== 'undefined' ? { description } : {}), + ...(typeof keywords !== 'undefined' ? { keywords } : {}), + ...(typeof homepage !== 'undefined' ? { homepage } : {}), + ...(typeof bugs !== 'undefined' ? { bugs } : {}), + ...(typeof repository !== 'undefined' ? { repository } : {}), + ...(typeof funding !== 'undefined' ? { funding } : {}), + ...(typeof license !== 'undefined' ? { license } : {}), + ...(typeof author !== 'undefined' ? { author } : {}), + ...(typeof maintainers !== 'undefined' ? { maintainers } : {}), + ...(typeof contributors !== 'undefined' ? { contributors } : {}), + ...(typeof type !== 'undefined' ? { type } : {}), + ...(typeof imports !== 'undefined' ? { imports } : {}), + ...(typeof exports !== 'undefined' ? { exports } : {}), + ...(typeof main !== 'undefined' ? { main } : {}), + ...(typeof browser !== 'undefined' ? { browser } : {}), + ...(typeof types !== 'undefined' ? { types } : {}), + ...(typeof bin !== 'undefined' ? { bin } : {}), + ...(typeof man !== 'undefined' ? { man } : {}), + ...(typeof directories !== 'undefined' ? { directories } : {}), + ...(typeof files !== 'undefined' ? { files } : {}), + ...(typeof workspaces !== 'undefined' ? { workspaces } : {}), + ...(typeof scripts !== 'undefined' ? { scripts } : {}), + ...(typeof config !== 'undefined' ? { config } : {}), + ...(typeof dependencies !== 'undefined' ? { dependencies } : {}), + ...(typeof devDependencies !== 'undefined' ? { devDependencies } : {}), + ...(typeof peerDependencies !== 'undefined' ? { peerDependencies } : {}), + ...(typeof peerDependenciesMeta !== 'undefined' ? { peerDependenciesMeta } : {}), + ...(typeof optionalDependencies !== 'undefined' ? { optionalDependencies } : {}), + ...(typeof bundledDependencies !== 'undefined' ? { bundledDependencies } : {}), + ...(typeof bundleDependencies !== 'undefined' ? { bundleDependencies } : {}), + ...(typeof engines !== 'undefined' ? { engines } : {}), + ...(typeof os !== 'undefined' ? { os } : {}), + ...(typeof cpu !== 'undefined' ? { cpu } : {}), + ...(typeof publishConfig !== 'undefined' ? { publishConfig } : {}), + ...(typeof devEngines !== 'undefined' ? { devEngines } : {}), + ...(typeof licenses !== 'undefined' ? { licenses } : {}), + ...(typeof overrides !== 'undefined' ? { overrides } : {}), + ...rest, + } +} + +module.exports = { + packageSort, +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/package-json/lib/update-dependencies.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/package-json/lib/update-dependencies.js new file mode 100644 index 0000000000000000000000000000000000000000..7259949ab661d8dcc8da969d599adc6ab8bc97b0 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/package-json/lib/update-dependencies.js @@ -0,0 +1,75 @@ +const depTypes = new Set([ + 'dependencies', + 'optionalDependencies', + 'devDependencies', + 'peerDependencies', +]) + +// sort alphabetically all types of deps for a given package +const orderDeps = (content) => { + for (const type of depTypes) { + if (content && content[type]) { + content[type] = Object.keys(content[type]) + .sort((a, b) => a.localeCompare(b, 'en')) + .reduce((res, key) => { + res[key] = content[type][key] + return res + }, {}) + } + } + return content +} + +const updateDependencies = ({ content, originalContent }) => { + const pkg = orderDeps({ + ...content, + }) + + // optionalDependencies don't need to be repeated in two places + if (pkg.dependencies) { + if (pkg.optionalDependencies) { + for (const name of Object.keys(pkg.optionalDependencies)) { + delete pkg.dependencies[name] + } + } + } + + const result = { ...originalContent } + + // loop through all types of dependencies and update package json pkg + for (const type of depTypes) { + if (pkg[type]) { + result[type] = pkg[type] + } + + // prune empty type props from resulting object + const emptyDepType = + pkg[type] + && typeof pkg === 'object' + && Object.keys(pkg[type]).length === 0 + if (emptyDepType) { + delete result[type] + } + } + + // if original package.json had dep in peerDeps AND deps, preserve that. + const { dependencies: origProd, peerDependencies: origPeer } = + originalContent || {} + const { peerDependencies: newPeer } = result + if (origProd && origPeer && newPeer) { + // we have original prod/peer deps, and new peer deps + // copy over any that were in both in the original + for (const name of Object.keys(origPeer)) { + if (origProd[name] !== undefined && newPeer[name] !== undefined) { + result.dependencies = result.dependencies || {} + result.dependencies[name] = newPeer[name] + } + } + } + + return result +} + +updateDependencies.knownKeys = depTypes + +module.exports = updateDependencies diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/package-json/lib/update-scripts.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/package-json/lib/update-scripts.js new file mode 100644 index 0000000000000000000000000000000000000000..30495e54cc3c78db17983eb351f48b63b2001c45 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/package-json/lib/update-scripts.js @@ -0,0 +1,29 @@ +const updateScripts = ({ content, originalContent = {} }) => { + const newScripts = content.scripts + + if (!newScripts) { + return originalContent + } + + // validate scripts content being appended + const hasInvalidScripts = () => + Object.entries(newScripts) + .some(([key, value]) => + typeof key !== 'string' || typeof value !== 'string') + if (hasInvalidScripts()) { + throw Object.assign( + new TypeError( + 'package.json scripts should be a key-value pair of strings.'), + { code: 'ESCRIPTSINVALID' } + ) + } + + return { + ...originalContent, + scripts: { + ...newScripts, + }, + } +} + +module.exports = updateScripts diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/package-json/lib/update-workspaces.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/package-json/lib/update-workspaces.js new file mode 100644 index 0000000000000000000000000000000000000000..04bf63230636fdf4d2b510c5be0893be5d4a5b75 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/package-json/lib/update-workspaces.js @@ -0,0 +1,26 @@ +const updateWorkspaces = ({ content, originalContent = {} }) => { + const newWorkspaces = content.workspaces + + if (!newWorkspaces) { + return originalContent + } + + // validate workspaces content being appended + const hasInvalidWorkspaces = () => + newWorkspaces.some(w => !(typeof w === 'string')) + if (!newWorkspaces.length || hasInvalidWorkspaces()) { + throw Object.assign( + new TypeError('workspaces should be an array of strings.'), + { code: 'EWORKSPACESINVALID' } + ) + } + + return { + ...originalContent, + workspaces: [ + ...newWorkspaces, + ], + } +} + +module.exports = updateWorkspaces diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/package-json/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/package-json/package.json new file mode 100644 index 0000000000000000000000000000000000000000..46c39c22a19007eef2581568f6f03fa4eafbfaf9 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/package-json/package.json @@ -0,0 +1,59 @@ +{ + "name": "@npmcli/package-json", + "version": "7.0.1", + "description": "Programmatic API to update package.json", + "keywords": [ + "npm", + "oss" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/npm/package-json.git" + }, + "license": "ISC", + "author": "GitHub Inc.", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "scripts": { + "snap": "tap", + "test": "tap", + "lint": "npm run eslint", + "lintfix": "npm run eslint -- --fix", + "posttest": "npm run lint", + "postsnap": "npm run lintfix --", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" + }, + "dependencies": { + "@npmcli/git": "^7.0.0", + "glob": "^11.0.3", + "hosted-git-info": "^9.0.0", + "json-parse-even-better-errors": "^4.0.0", + "proc-log": "^5.0.0", + "semver": "^7.5.3", + "validate-npm-package-license": "^3.0.4" + }, + "devDependencies": { + "@npmcli/eslint-config": "^5.1.0", + "@npmcli/template-oss": "4.25.0", + "tap": "^16.0.1" + }, + "engines": { + "node": "^20.17.0 || >=22.9.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.25.0", + "publish": "true" + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/promise-spawn/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/promise-spawn/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..8f90f96f4c6c5076027c2a01bd117f42a8d16bb5 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/promise-spawn/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE NPM DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE NPM BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, +OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, +DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS +ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS +SOFTWARE. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/promise-spawn/lib/escape.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/promise-spawn/lib/escape.js new file mode 100644 index 0000000000000000000000000000000000000000..9aca8bde70a6e9e8f0823e56b24eac812f8f148d --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/promise-spawn/lib/escape.js @@ -0,0 +1,68 @@ +'use strict' + +// eslint-disable-next-line max-len +// this code adapted from: https://blogs.msdn.microsoft.com/twistylittlepassagesallalike/2011/04/23/everyone-quotes-command-line-arguments-the-wrong-way/ +const cmd = (input, doubleEscape) => { + if (!input.length) { + return '""' + } + + let result + if (!/[ \t\n\v"]/.test(input)) { + result = input + } else { + result = '"' + for (let i = 0; i <= input.length; ++i) { + let slashCount = 0 + while (input[i] === '\\') { + ++i + ++slashCount + } + + if (i === input.length) { + result += '\\'.repeat(slashCount * 2) + break + } + + if (input[i] === '"') { + result += '\\'.repeat(slashCount * 2 + 1) + result += input[i] + } else { + result += '\\'.repeat(slashCount) + result += input[i] + } + } + result += '"' + } + + // and finally, prefix shell meta chars with a ^ + result = result.replace(/[ !%^&()<>|"]/g, '^$&') + if (doubleEscape) { + result = result.replace(/[ !%^&()<>|"]/g, '^$&') + } + + return result +} + +const sh = (input) => { + if (!input.length) { + return `''` + } + + if (!/[\t\n\r "#$&'()*;<>?\\`|~]/.test(input)) { + return input + } + + // replace single quotes with '\'' and wrap the whole result in a fresh set of quotes + const result = `'${input.replace(/'/g, `'\\''`)}'` + // if the input string already had single quotes around it, clean those up + .replace(/^(?:'')+(?!$)/, '') + .replace(/\\'''/g, `\\'`) + + return result +} + +module.exports = { + cmd, + sh, +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/promise-spawn/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/promise-spawn/lib/index.js new file mode 100644 index 0000000000000000000000000000000000000000..1faf62c9157df2ed5e1543bd246be2d84b324964 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/promise-spawn/lib/index.js @@ -0,0 +1,218 @@ +'use strict' + +const { spawn } = require('child_process') +const os = require('os') +const which = require('which') + +const escape = require('./escape.js') + +// 'extra' object is for decorating the error a bit more +const promiseSpawn = (cmd, args, opts = {}, extra = {}) => { + if (opts.shell) { + return spawnWithShell(cmd, args, opts, extra) + } + + let resolve, reject + const promise = new Promise((_resolve, _reject) => { + resolve = _resolve + reject = _reject + }) + + // Create error here so we have a more useful stack trace when rejecting + const closeError = new Error('command failed') + + const stdout = [] + const stderr = [] + + const getResult = (result) => ({ + cmd, + args, + ...result, + ...stdioResult(stdout, stderr, opts), + ...extra, + }) + const rejectWithOpts = (er, erOpts) => { + const resultError = getResult(erOpts) + reject(Object.assign(er, resultError)) + } + + const proc = spawn(cmd, args, opts) + promise.stdin = proc.stdin + promise.process = proc + + proc.on('error', rejectWithOpts) + + if (proc.stdout) { + proc.stdout.on('data', c => stdout.push(c)) + proc.stdout.on('error', rejectWithOpts) + } + + if (proc.stderr) { + proc.stderr.on('data', c => stderr.push(c)) + proc.stderr.on('error', rejectWithOpts) + } + + proc.on('close', (code, signal) => { + if (code || signal) { + rejectWithOpts(closeError, { code, signal }) + } else { + resolve(getResult({ code, signal })) + } + }) + + return promise +} + +const spawnWithShell = (cmd, args, opts, extra) => { + let command = opts.shell + // if shell is set to true, we use a platform default. we can't let the core + // spawn method decide this for us because we need to know what shell is in use + // ahead of time so that we can escape arguments properly. we don't need coverage here. + if (command === true) { + // istanbul ignore next + command = process.platform === 'win32' ? (process.env.ComSpec || 'cmd.exe') : 'sh' + } + + const options = { ...opts, shell: false } + const realArgs = [] + let script = cmd + + // first, determine if we're in windows because if we are we need to know if we're + // running an .exe or a .cmd/.bat since the latter requires extra escaping + const isCmd = /(?:^|\\)cmd(?:\.exe)?$/i.test(command) + if (isCmd) { + let doubleEscape = false + + // find the actual command we're running + let initialCmd = '' + let insideQuotes = false + for (let i = 0; i < cmd.length; ++i) { + const char = cmd.charAt(i) + if (char === ' ' && !insideQuotes) { + break + } + + initialCmd += char + if (char === '"' || char === "'") { + insideQuotes = !insideQuotes + } + } + + let pathToInitial + try { + pathToInitial = which.sync(initialCmd, { + path: (options.env && findInObject(options.env, 'PATH')) || process.env.PATH, + pathext: (options.env && findInObject(options.env, 'PATHEXT')) || process.env.PATHEXT, + }).toLowerCase() + } catch (err) { + pathToInitial = initialCmd.toLowerCase() + } + + doubleEscape = pathToInitial.endsWith('.cmd') || pathToInitial.endsWith('.bat') + for (const arg of args) { + script += ` ${escape.cmd(arg, doubleEscape)}` + } + realArgs.push('/d', '/s', '/c', script) + options.windowsVerbatimArguments = true + } else { + for (const arg of args) { + script += ` ${escape.sh(arg)}` + } + realArgs.push('-c', script) + } + + return promiseSpawn(command, realArgs, options, extra) +} + +// open a file with the default application as defined by the user's OS +const open = (_args, opts = {}, extra = {}) => { + const options = { ...opts, shell: true } + const args = [].concat(_args) + + let platform = process.platform + // process.platform === 'linux' may actually indicate WSL, if that's the case + // open the argument with sensible-browser which is pre-installed + // In WSL, set the default browser using, for example, + // export BROWSER="/mnt/c/Program Files (x86)/Google/Chrome/Application/chrome.exe" + // or + // export BROWSER="/mnt/c/Program Files (x86)/Microsoft/Edge/Application/msedge.exe" + // To permanently set the default browser, add the appropriate entry to your shell's + // RC file, e.g. .bashrc or .zshrc. + if (platform === 'linux' && os.release().toLowerCase().includes('microsoft')) { + platform = 'wsl' + if (!process.env.BROWSER) { + return Promise.reject( + new Error('Set the BROWSER environment variable to your desired browser.')) + } + } + + let command = options.command + if (!command) { + if (platform === 'win32') { + // spawnWithShell does not do the additional os.release() check, so we + // have to force the shell here to make sure we treat WSL as windows. + options.shell = process.env.ComSpec + // also, the start command accepts a title so to make sure that we don't + // accidentally interpret the first arg as the title, we stick an empty + // string immediately after the start command + command = 'start ""' + } else if (platform === 'wsl') { + command = 'sensible-browser' + } else if (platform === 'darwin') { + command = 'open' + } else { + command = 'xdg-open' + } + } + + return spawnWithShell(command, args, options, extra) +} +promiseSpawn.open = open + +const isPipe = (stdio = 'pipe', fd) => { + if (stdio === 'pipe' || stdio === null) { + return true + } + + if (Array.isArray(stdio)) { + return isPipe(stdio[fd], fd) + } + + return false +} + +const stdioResult = (stdout, stderr, { stdioString = true, stdio }) => { + const result = { + stdout: null, + stderr: null, + } + + // stdio is [stdin, stdout, stderr] + if (isPipe(stdio, 1)) { + result.stdout = Buffer.concat(stdout) + if (stdioString) { + result.stdout = result.stdout.toString().trim() + } + } + + if (isPipe(stdio, 2)) { + result.stderr = Buffer.concat(stderr) + if (stdioString) { + result.stderr = result.stderr.toString().trim() + } + } + + return result +} + +// case insensitive lookup in an object +const findInObject = (obj, key) => { + key = key.toLowerCase() + for (const objKey of Object.keys(obj).sort()) { + if (objKey.toLowerCase() === key) { + return obj[objKey] + } + } +} + +module.exports = promiseSpawn diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/promise-spawn/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/promise-spawn/package.json new file mode 100644 index 0000000000000000000000000000000000000000..1436659a446126d534aca2ff4309bd98725408ec --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/promise-spawn/package.json @@ -0,0 +1,51 @@ +{ + "name": "@npmcli/promise-spawn", + "version": "8.0.3", + "files": [ + "bin/", + "lib/" + ], + "main": "./lib/index.js", + "description": "spawn processes the way the npm cli likes to do", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/promise-spawn.git" + }, + "author": "GitHub Inc.", + "license": "ISC", + "scripts": { + "test": "tap", + "snap": "tap", + "lint": "npm run eslint", + "lintfix": "npm run eslint -- --fix", + "posttest": "npm run lint", + "postsnap": "npm run lintfix --", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" + }, + "tap": { + "check-coverage": true, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "devDependencies": { + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.25.0", + "spawk": "^1.7.1", + "tap": "^16.0.1" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.25.0", + "publish": true + }, + "dependencies": { + "which": "^5.0.0" + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/query/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/query/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..5fc208ff122e08e2ca9777f80b0551617b30ba2a --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/query/LICENSE @@ -0,0 +1,20 @@ + + +ISC License + +Copyright npm, Inc. + +Permission to use, copy, modify, and/or distribute this +software for any purpose with or without fee is hereby +granted, provided that the above copyright notice and this +permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL +WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO +EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/query/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/query/lib/index.js new file mode 100644 index 0000000000000000000000000000000000000000..db7dc345a8c7549855a11131b178f3bd922b9e70 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/query/lib/index.js @@ -0,0 +1,255 @@ +'use strict' + +const parser = require('postcss-selector-parser') + +const arrayDelimiter = Symbol('arrayDelimiter') + +const escapeSlashes = str => + str.replace(/\//g, '\\/') + +const unescapeSlashes = str => + str.replace(/\\\//g, '/') + +// recursively fixes up any :attr pseudo-class found +const fixupAttr = astNode => { + const properties = [] + const matcher = {} + for (const selectorAstNode of astNode.nodes) { + const [firstAstNode] = selectorAstNode.nodes + if (firstAstNode.type === 'tag') { + properties.push(firstAstNode.value) + } + } + + const lastSelectorAstNode = astNode.nodes.pop() + const [attributeAstNode] = lastSelectorAstNode.nodes + + if (attributeAstNode.value === ':attr') { + const appendParts = fixupAttr(attributeAstNode) + properties.push(arrayDelimiter, ...appendParts.lookupProperties) + matcher.qualifiedAttribute = appendParts.attributeMatcher.qualifiedAttribute + matcher.operator = appendParts.attributeMatcher.operator + matcher.value = appendParts.attributeMatcher.value + + // backwards compatibility + matcher.attribute = appendParts.attributeMatcher.attribute + + if (appendParts.attributeMatcher.insensitive) { + matcher.insensitive = true + } + } else { + if (attributeAstNode.type !== 'attribute') { + throw Object.assign( + new Error('`:attr` pseudo-class expects an attribute matcher as the last value'), + { code: 'EQUERYATTR' } + ) + } + + matcher.qualifiedAttribute = unescapeSlashes(attributeAstNode.qualifiedAttribute) + matcher.operator = attributeAstNode.operator + matcher.value = attributeAstNode.value + + // backwards compatibility + matcher.attribute = matcher.qualifiedAttribute + + if (attributeAstNode.insensitive) { + matcher.insensitive = true + } + } + + astNode.lookupProperties = properties + astNode.attributeMatcher = matcher + astNode.nodes.length = 0 + return astNode +} + +// fixed up nested pseudo nodes will have their internal selectors moved +// to a new root node that will be referenced by the `nestedNode` property, +// this tweak makes it simpler to reuse `retrieveNodesFromParsedAst` to +// recursively parse and extract results from the internal selectors +const fixupNestedPseudo = astNode => { + // create a new ast root node and relocate any children + // selectors of the current ast node to this new root + const newRootNode = parser.root() + astNode.nestedNode = newRootNode + newRootNode.nodes = [...astNode.nodes] + + // clean up the ast by removing the children nodes from the + // current ast node while also cleaning up their `parent` refs + astNode.nodes.length = 0 + for (const currAstNode of newRootNode.nodes) { + currAstNode.parent = newRootNode + } + + // recursively fixup nodes of any nested selector + transformAst(newRootNode) +} + +// :semver(, [version|range|selector], [function]) +// note: the first or second parameter must be a static version or range +const fixupSemverSpecs = astNode => { + // if we have three nodes, the last is the semver function to use, pull that out first + if (astNode.nodes.length === 3) { + const funcNode = astNode.nodes.pop().nodes[0] + if (funcNode.type === 'tag') { + astNode.semverFunc = funcNode.value + } else if (funcNode.type === 'string') { + // a string is always in some type of quotes, we don't want those so slice them off + astNode.semverFunc = funcNode.value.slice(1, -1) + } else { + // anything that isn't a tag or a string isn't a function name + throw Object.assign( + new Error('`:semver` pseudo-class expects a function name as last value'), + { code: 'ESEMVERFUNC' } + ) + } + } + + // now if we have 1 node, it's a static value + // istanbul ignore else + if (astNode.nodes.length === 1) { + const semverNode = astNode.nodes.pop() + astNode.semverValue = semverNode.nodes.reduce((res, next) => `${res}${String(next)}`, '') + } else if (astNode.nodes.length === 2) { + // and if we have two nodes, one of them is a static value and we need to determine which it is + for (let i = 0; i < astNode.nodes.length; ++i) { + const type = astNode.nodes[i].nodes[0].type + // the type of the first child may be combinator for ranges, such as >14 + if (type === 'tag' || type === 'combinator') { + const semverNode = astNode.nodes.splice(i, 1)[0] + astNode.semverValue = semverNode.nodes.reduce((res, next) => `${res}${String(next)}`, '') + astNode.semverPosition = i + break + } + } + + if (typeof astNode.semverValue === 'undefined') { + throw Object.assign( + new Error('`:semver` pseudo-class expects a static value in the first or second position'), + { code: 'ESEMVERVALUE' } + ) + } + } + + // if we got here, the last remaining child should be attribute selector + if (astNode.nodes.length === 1) { + fixupAttr(astNode) + } else { + // if we don't have a selector, we default to `[version]` + astNode.attributeMatcher = { + insensitive: false, + attribute: 'version', + qualifiedAttribute: 'version', + } + astNode.lookupProperties = [] + } + + astNode.nodes.length = 0 +} + +const fixupTypes = astNode => { + const [valueAstNode] = astNode.nodes[0].nodes + const { value } = valueAstNode || {} + astNode.typeValue = value + astNode.nodes.length = 0 +} + +const fixupPaths = astNode => { + astNode.pathValue = unescapeSlashes(String(astNode.nodes[0])) + astNode.nodes.length = 0 +} + +const fixupOutdated = astNode => { + if (astNode.nodes.length) { + astNode.outdatedKind = String(astNode.nodes[0]) + astNode.nodes.length = 0 + } +} + +const fixupVuln = astNode => { + const vulns = [] + if (astNode.nodes.length) { + for (const selector of astNode.nodes) { + const vuln = {} + for (const node of selector.nodes) { + if (node.type !== 'attribute') { + throw Object.assign( + new Error(':vuln pseudo-class only accepts attribute matchers or "cwe" tag'), + { code: 'EQUERYATTR' } + ) + } + if (!['severity', 'cwe'].includes(node._attribute)) { + throw Object.assign( + new Error(':vuln pseudo-class only matches "severity" and "cwe" attributes'), + { code: 'EQUERYATTR' } + ) + } + if (!node.operator) { + node.operator = '=' + node.value = '*' + } + if (node.operator !== '=') { + throw Object.assign( + new Error(':vuln pseudo-class attribute selector only accepts "=" operator', node), + { code: 'EQUERYATTR' } + ) + } + if (!vuln[node._attribute]) { + vuln[node._attribute] = [] + } + vuln[node._attribute].push(node._value) + } + vulns.push(vuln) + } + astNode.vulns = vulns + astNode.nodes.length = 0 + } +} + +// a few of the supported ast nodes need to be tweaked in order to properly be +// interpreted as proper arborist query selectors, namely semver ranges from +// both ids and :semver pseudo-class selectors need to be translated from what +// are usually multiple ast nodes, such as: tag:1, class:.0, class:.0 to a +// single `1.0.0` value, other pseudo-class selectors also get preprocessed in +// order to make it simpler to execute later when traversing each ast node +// using rootNode.walk(), such as :path, :type, etc. transformAst handles all +// these modifications to the parsed ast by doing an extra, initial traversal +// of the parsed ast from the query and modifying the parsed nodes accordingly +const transformAst = selector => { + selector.walk((nextAstNode) => { + switch (nextAstNode.value) { + case ':attr': + return fixupAttr(nextAstNode) + case ':is': + case ':has': + case ':not': + return fixupNestedPseudo(nextAstNode) + case ':path': + return fixupPaths(nextAstNode) + case ':semver': + return fixupSemverSpecs(nextAstNode) + case ':type': + return fixupTypes(nextAstNode) + case ':outdated': + return fixupOutdated(nextAstNode) + case ':vuln': + return fixupVuln(nextAstNode) + } + }) +} + +const queryParser = (query) => { + // if query is an empty string or any falsy + // value, just returns an empty result + if (!query) { + return [] + } + + return parser(transformAst) + .astSync(escapeSlashes(query), { lossless: false }) +} + +module.exports = { + parser: queryParser, + arrayDelimiter, +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/query/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/query/package.json new file mode 100644 index 0000000000000000000000000000000000000000..20660b227834d9ea29d50a17b03ce95c00b73ced --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/query/package.json @@ -0,0 +1,64 @@ +{ + "name": "@npmcli/query", + "version": "4.0.1", + "description": "npm query parser and tools", + "main": "lib/index.js", + "scripts": { + "test": "tap", + "lint": "npm run eslint", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run eslint -- --fix", + "snap": "tap", + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" + }, + "contributors": [ + { + "name": "Ruy Adorno", + "url": "https://ruyadorno.com", + "twitter": "ruyadorno" + } + ], + "keywords": [ + "ast", + "npm", + "npmcli", + "parser", + "postcss", + "postcss-selector-parser", + "query" + ], + "author": "GitHub Inc.", + "license": "ISC", + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^18.17.0 || >=20.5.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.23.3", + "publish": true + }, + "devDependencies": { + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", + "tap": "^16.2.0" + }, + "dependencies": { + "postcss-selector-parser": "^7.0.0" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/query.git" + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/redact/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/redact/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..c21644115c85d05d5ad071e4471d6d81b2fa709d --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/redact/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2024 npm + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/redact/lib/deep-map.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/redact/lib/deep-map.js new file mode 100644 index 0000000000000000000000000000000000000000..c14857c2c01b17deb2686a7f8c233445e82ee155 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/redact/lib/deep-map.js @@ -0,0 +1,71 @@ +const { serializeError } = require('./error') + +const deepMap = (input, handler = v => v, path = ['$'], seen = new Set([input])) => { + // this is in an effort to maintain bole's error logging behavior + if (path.join('.') === '$' && input instanceof Error) { + return deepMap({ err: serializeError(input) }, handler, path, seen) + } + if (input instanceof Error) { + return deepMap(serializeError(input), handler, path, seen) + } + // allows for non-node js environments, sush as workers + if (typeof Buffer !== 'undefined' && input instanceof Buffer) { + return `[unable to log instanceof buffer]` + } + if (input instanceof Uint8Array) { + return `[unable to log instanceof Uint8Array]` + } + + if (Array.isArray(input)) { + const result = [] + for (let i = 0; i < input.length; i++) { + const element = input[i] + const elementPath = [...path, i] + if (element instanceof Object) { + if (!seen.has(element)) { // avoid getting stuck in circular reference + seen.add(element) + result.push(deepMap(handler(element, elementPath), handler, elementPath, seen)) + } + } else { + result.push(handler(element, elementPath)) + } + } + return result + } + + if (input === null) { + return null + } else if (typeof input === 'object' || typeof input === 'function') { + const result = {} + + for (const propertyName of Object.getOwnPropertyNames(input)) { + // skip logging internal properties + if (propertyName.startsWith('_')) { + continue + } + + try { + const property = input[propertyName] + const propertyPath = [...path, propertyName] + if (property instanceof Object) { + if (!seen.has(property)) { // avoid getting stuck in circular reference + seen.add(property) + result[propertyName] = deepMap( + handler(property, propertyPath), handler, propertyPath, seen + ) + } + } else { + result[propertyName] = handler(property, propertyPath) + } + } catch (err) { + // a getter may throw an error + result[propertyName] = `[error getting value: ${err.message}]` + } + } + return result + } + + return handler(input, path) +} + +module.exports = { deepMap } diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/redact/lib/error.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/redact/lib/error.js new file mode 100644 index 0000000000000000000000000000000000000000..e374b3902a28532552be392606c16378ab26cb1f --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/redact/lib/error.js @@ -0,0 +1,28 @@ +/** takes an error object and serializes it to a plan object */ +function serializeError (input) { + if (!(input instanceof Error)) { + if (typeof input === 'string') { + const error = new Error(`attempted to serialize a non-error, string String, "${input}"`) + return serializeError(error) + } + const error = new Error(`attempted to serialize a non-error, ${typeof input} ${input?.constructor?.name}`) + return serializeError(error) + } + // different error objects store status code differently + // AxiosError uses `status`, other services use `statusCode` + const statusCode = input.statusCode ?? input.status + // CAUTION: what we serialize here gets add to the size of logs + return { + errorType: input.errorType ?? input.constructor.name, + ...(input.message ? { message: input.message } : {}), + ...(input.stack ? { stack: input.stack } : {}), + // think of this as error code + ...(input.code ? { code: input.code } : {}), + // think of this as http status code + ...(statusCode ? { statusCode } : {}), + } +} + +module.exports = { + serializeError, +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/redact/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/redact/lib/index.js new file mode 100644 index 0000000000000000000000000000000000000000..9b10c7f6a0081d23529e2842796b56dbb1e414db --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/redact/lib/index.js @@ -0,0 +1,44 @@ +const matchers = require('./matchers') +const { redactUrlPassword } = require('./utils') + +const REPLACE = '***' + +const redact = (value) => { + if (typeof value !== 'string' || !value) { + return value + } + return redactUrlPassword(value, REPLACE) + .replace(matchers.NPM_SECRET.pattern, `npm_${REPLACE}`) + .replace(matchers.UUID.pattern, REPLACE) +} + +// split on \s|= similar to how nopt parses options +const splitAndRedact = (str) => { + // stateful regex, don't move out of this scope + const splitChars = /[\s=]/g + + let match = null + let result = '' + let index = 0 + while (match = splitChars.exec(str)) { + result += redact(str.slice(index, match.index)) + match[0] + index = splitChars.lastIndex + } + + return result + redact(str.slice(index)) +} + +// replaces auth info in an array of arguments or in a strings +const redactLog = (arg) => { + if (typeof arg === 'string') { + return splitAndRedact(arg) + } else if (Array.isArray(arg)) { + return arg.map((a) => typeof a === 'string' ? splitAndRedact(a) : a) + } + return arg +} + +module.exports = { + redact, + redactLog, +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/redact/lib/matchers.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/redact/lib/matchers.js new file mode 100644 index 0000000000000000000000000000000000000000..854ba8e1cbda140d79257f2657ae373fb676c567 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/redact/lib/matchers.js @@ -0,0 +1,88 @@ +const TYPE_REGEX = 'regex' +const TYPE_URL = 'url' +const TYPE_PATH = 'path' + +const NPM_SECRET = { + type: TYPE_REGEX, + pattern: /\b(npms?_)[a-zA-Z0-9]{36,48}\b/gi, + replacement: `[REDACTED_NPM_SECRET]`, +} + +const AUTH_HEADER = { + type: TYPE_REGEX, + pattern: /\b(Basic\s+|Bearer\s+)[\w+=\-.]+\b/gi, + replacement: `[REDACTED_AUTH_HEADER]`, +} + +const JSON_WEB_TOKEN = { + type: TYPE_REGEX, + pattern: /\b[A-Za-z0-9-_]{10,}(?!\.\d+\.)\.[A-Za-z0-9-_]{3,}\.[A-Za-z0-9-_]{20,}\b/gi, + replacement: `[REDACTED_JSON_WEB_TOKEN]`, +} + +const UUID = { + type: TYPE_REGEX, + pattern: /\b[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}\b/gi, + replacement: `[REDACTED_UUID]`, +} + +const URL_MATCHER = { + type: TYPE_REGEX, + pattern: /(?:https?|ftp):\/\/[^\s/"$.?#].[^\s"]*/gi, + replacement: '[REDACTED_URL]', +} + +const DEEP_HEADER_AUTHORIZATION = { + type: TYPE_PATH, + predicate: ({ path }) => path.endsWith('.headers.authorization'), + replacement: '[REDACTED_HEADER_AUTHORIZATION]', +} + +const DEEP_HEADER_SET_COOKIE = { + type: TYPE_PATH, + predicate: ({ path }) => path.endsWith('.headers.set-cookie'), + replacement: '[REDACTED_HEADER_SET_COOKIE]', +} + +const DEEP_HEADER_COOKIE = { + type: TYPE_PATH, + predicate: ({ path }) => path.endsWith('.headers.cookie'), + replacement: '[REDACTED_HEADER_COOKIE]', +} + +const REWRITE_REQUEST = { + type: TYPE_PATH, + predicate: ({ path }) => path.endsWith('.request'), + replacement: (input) => ({ + method: input?.method, + path: input?.path, + headers: input?.headers, + url: input?.url, + }), +} + +const REWRITE_RESPONSE = { + type: TYPE_PATH, + predicate: ({ path }) => path.endsWith('.response'), + replacement: (input) => ({ + data: input?.data, + status: input?.status, + headers: input?.headers, + }), +} + +module.exports = { + TYPE_REGEX, + TYPE_URL, + TYPE_PATH, + NPM_SECRET, + AUTH_HEADER, + JSON_WEB_TOKEN, + UUID, + URL_MATCHER, + DEEP_HEADER_AUTHORIZATION, + DEEP_HEADER_SET_COOKIE, + DEEP_HEADER_COOKIE, + REWRITE_REQUEST, + REWRITE_RESPONSE, +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/redact/lib/server.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/redact/lib/server.js new file mode 100644 index 0000000000000000000000000000000000000000..555e37dcc1f54c42ceddc34be7720cbf4c2533b9 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/redact/lib/server.js @@ -0,0 +1,59 @@ +const { + AUTH_HEADER, + JSON_WEB_TOKEN, + NPM_SECRET, + DEEP_HEADER_AUTHORIZATION, + DEEP_HEADER_SET_COOKIE, + REWRITE_REQUEST, + REWRITE_RESPONSE, + DEEP_HEADER_COOKIE, +} = require('./matchers') + +const { + redactUrlMatcher, + redactUrlPasswordMatcher, + redactMatchers, +} = require('./utils') + +const { serializeError } = require('./error') + +const { deepMap } = require('./deep-map') + +const _redact = redactMatchers( + NPM_SECRET, + AUTH_HEADER, + JSON_WEB_TOKEN, + DEEP_HEADER_AUTHORIZATION, + DEEP_HEADER_SET_COOKIE, + DEEP_HEADER_COOKIE, + REWRITE_REQUEST, + REWRITE_RESPONSE, + redactUrlMatcher( + redactUrlPasswordMatcher() + ) +) + +const redact = (input) => deepMap(input, (value, path) => _redact(value, { path })) + +/** takes an error returns new error keeping some custom properties */ +function redactError (input) { + const { message, ...data } = serializeError(input) + const output = new Error(redact(message)) + return Object.assign(output, redact(data)) +} + +/** runs a function within try / catch and throws error wrapped in redactError */ +function redactThrow (func) { + if (typeof func !== 'function') { + throw new Error('redactThrow expects a function') + } + return async (...args) => { + try { + return await func(...args) + } catch (error) { + throw redactError(error) + } + } +} + +module.exports = { redact, redactError, redactThrow } diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/redact/lib/utils.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/redact/lib/utils.js new file mode 100644 index 0000000000000000000000000000000000000000..8395ab25fc373e4ab8ba3a2a7cee490094a7373d --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/redact/lib/utils.js @@ -0,0 +1,202 @@ +const { + URL_MATCHER, + TYPE_URL, + TYPE_REGEX, + TYPE_PATH, +} = require('./matchers') + +/** + * creates a string of asterisks, + * this forces a minimum asterisk for security purposes + */ +const asterisk = (length = 0) => { + length = typeof length === 'string' ? length.length : length + if (length < 8) { + return '*'.repeat(8) + } + return '*'.repeat(length) +} + +/** + * escapes all special regex chars + * @see https://stackoverflow.com/a/9310752 + * @see https://github.com/tc39/proposal-regex-escaping + */ +const escapeRegExp = (text) => { + return text.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, `\\$&`) +} + +/** + * provieds a regex "or" of the url versions of a string + */ +const urlEncodeRegexGroup = (value) => { + const decoded = decodeURIComponent(value) + const encoded = encodeURIComponent(value) + const union = [...new Set([encoded, decoded, value])].map(escapeRegExp).join('|') + return union +} + +/** + * a tagged template literal that returns a regex ensures all variables are excaped + */ +const urlEncodeRegexTag = (strings, ...values) => { + let pattern = '' + for (let i = 0; i < values.length; i++) { + pattern += strings[i] + `(${urlEncodeRegexGroup(values[i])})` + } + pattern += strings[strings.length - 1] + return new RegExp(pattern) +} + +/** + * creates a matcher for redacting url hostname + */ +const redactUrlHostnameMatcher = ({ hostname, replacement } = {}) => ({ + type: TYPE_URL, + predicate: ({ url }) => url.hostname === hostname, + pattern: ({ url }) => { + return urlEncodeRegexTag`(^${url.protocol}//${url.username}:.+@)?${url.hostname}` + }, + replacement: `$1${replacement || asterisk()}`, +}) + +/** + * creates a matcher for redacting url search / query parameter values + */ +const redactUrlSearchParamsMatcher = ({ param, replacement } = {}) => ({ + type: TYPE_URL, + predicate: ({ url }) => url.searchParams.has(param), + pattern: ({ url }) => urlEncodeRegexTag`(${param}=)${url.searchParams.get(param)}`, + replacement: `$1${replacement || asterisk()}`, +}) + +/** creates a matcher for redacting the url password */ +const redactUrlPasswordMatcher = ({ replacement } = {}) => ({ + type: TYPE_URL, + predicate: ({ url }) => url.password, + pattern: ({ url }) => urlEncodeRegexTag`(^${url.protocol}//${url.username}:)${url.password}`, + replacement: `$1${replacement || asterisk()}`, +}) + +const redactUrlReplacement = (...matchers) => (subValue) => { + try { + const url = new URL(subValue) + return redactMatchers(...matchers)(subValue, { url }) + } catch (err) { + return subValue + } +} + +/** + * creates a matcher / submatcher for urls, this function allows you to first + * collect all urls within a larger string and then pass those urls to a + * submatcher + * + * @example + * console.log("this will first match all urls, then pass those urls to the password patcher") + * redactMatchers(redactUrlMatcher(redactUrlPasswordMatcher())) + * + * @example + * console.log( + * "this will assume you are passing in a string that is a url, and will redact the password" + * ) + * redactMatchers(redactUrlPasswordMatcher()) + * + */ +const redactUrlMatcher = (...matchers) => { + return { + ...URL_MATCHER, + replacement: redactUrlReplacement(...matchers), + } +} + +const matcherFunctions = { + [TYPE_REGEX]: (matcher) => (value) => { + if (typeof value === 'string') { + value = value.replace(matcher.pattern, matcher.replacement) + } + return value + }, + [TYPE_URL]: (matcher) => (value, ctx) => { + if (typeof value === 'string') { + try { + const url = ctx?.url || new URL(value) + const { predicate, pattern } = matcher + const predicateValue = predicate({ url }) + if (predicateValue) { + value = value.replace(pattern({ url }), matcher.replacement) + } + } catch (_e) { + return value + } + } + return value + }, + [TYPE_PATH]: (matcher) => (value, ctx) => { + const rawPath = ctx?.path + const path = rawPath.join('.').toLowerCase() + const { predicate, replacement } = matcher + const replace = typeof replacement === 'function' ? replacement : () => replacement + const shouldRun = predicate({ rawPath, path }) + if (shouldRun) { + value = replace(value, { rawPath, path }) + } + return value + }, +} + +/** converts a matcher to a function */ +const redactMatcher = (matcher) => { + return matcherFunctions[matcher.type](matcher) +} + +/** converts a series of matchers to a function */ +const redactMatchers = (...matchers) => (value, ctx) => { + const flatMatchers = matchers.flat() + return flatMatchers.reduce((result, matcher) => { + const fn = (typeof matcher === 'function') ? matcher : redactMatcher(matcher) + return fn(result, ctx) + }, value) +} + +/** + * replacement handler, keeping $1 (if it exists) and replacing the + * rest of the string with asterisks, maintaining string length + */ +const redactDynamicReplacement = () => (value, start) => { + if (typeof start === 'number') { + return asterisk(value) + } + return start + asterisk(value.substring(start.length).length) +} + +/** + * replacement handler, keeping $1 (if it exists) and replacing the + * rest of the string with a fixed number of asterisks + */ +const redactFixedReplacement = (length) => (_value, start) => { + if (typeof start === 'number') { + return asterisk(length) + } + return start + asterisk(length) +} + +const redactUrlPassword = (value, replacement) => { + return redactMatchers(redactUrlPasswordMatcher({ replacement }))(value) +} + +module.exports = { + asterisk, + escapeRegExp, + urlEncodeRegexGroup, + urlEncodeRegexTag, + redactUrlHostnameMatcher, + redactUrlSearchParamsMatcher, + redactUrlPasswordMatcher, + redactUrlMatcher, + redactUrlReplacement, + redactDynamicReplacement, + redactFixedReplacement, + redactMatchers, + redactUrlPassword, +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/redact/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/redact/package.json new file mode 100644 index 0000000000000000000000000000000000000000..b5070113b1330c002ec49eac43bd8b59ba814573 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/redact/package.json @@ -0,0 +1,52 @@ +{ + "name": "@npmcli/redact", + "version": "3.2.2", + "description": "Redact sensitive npm information from output", + "main": "lib/index.js", + "exports": { + ".": "./lib/index.js", + "./server": "./lib/server.js", + "./package.json": "./package.json" + }, + "scripts": { + "test": "tap", + "lint": "npm run eslint", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run eslint -- --fix", + "snap": "tap", + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" + }, + "keywords": [], + "author": "GitHub Inc.", + "license": "ISC", + "files": [ + "bin/", + "lib/" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/npm/redact.git" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.24.3", + "publish": true + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ], + "timeout": 120 + }, + "devDependencies": { + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.24.3", + "tap": "^16.3.10" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/run-script/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/run-script/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..19cec97b1846830f5628807533a144313cd67532 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/run-script/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/run-script/lib/is-server-package.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/run-script/lib/is-server-package.js new file mode 100644 index 0000000000000000000000000000000000000000..c36c40d4898d5cbfd93b5025548047572a95d2e1 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/run-script/lib/is-server-package.js @@ -0,0 +1,11 @@ +const { stat } = require('node:fs/promises') +const { resolve } = require('node:path') + +module.exports = async path => { + try { + const st = await stat(resolve(path, 'server.js')) + return st.isFile() + } catch (er) { + return false + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/run-script/lib/make-spawn-args.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/run-script/lib/make-spawn-args.js new file mode 100644 index 0000000000000000000000000000000000000000..1c9f02c062f72645b75947204a53457e181384e4 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/run-script/lib/make-spawn-args.js @@ -0,0 +1,53 @@ +/* eslint camelcase: "off" */ +const setPATH = require('./set-path.js') +const { resolve } = require('path') + +let npm_config_node_gyp + +const makeSpawnArgs = options => { + const { + args, + binPaths, + cmd, + env, + event, + nodeGyp, + path, + scriptShell = true, + stdio, + stdioString, + } = options + + if (nodeGyp) { + // npm already pulled this from env and passes it in to options + npm_config_node_gyp = nodeGyp + } else if (env.npm_config_node_gyp) { + // legacy mode for standalone user + npm_config_node_gyp = env.npm_config_node_gyp + } else { + // default + npm_config_node_gyp = require.resolve('node-gyp/bin/node-gyp.js') + } + + const spawnEnv = setPATH(path, binPaths, { + // we need to at least save the PATH environment var + ...process.env, + ...env, + npm_package_json: resolve(path, 'package.json'), + npm_lifecycle_event: event, + npm_lifecycle_script: cmd, + npm_config_node_gyp, + }) + + const spawnOpts = { + env: spawnEnv, + stdioString, + stdio, + cwd: path, + shell: scriptShell, + } + + return [cmd, args, spawnOpts] +} + +module.exports = makeSpawnArgs diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp new file mode 100644 index 0000000000000000000000000000000000000000..5bec64d961a3a9fc8ec007d33c24a7a55c45b9b6 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp @@ -0,0 +1,2 @@ +#!/usr/bin/env sh +node "$npm_config_node_gyp" "$@" diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp.cmd b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp.cmd new file mode 100644 index 0000000000000000000000000000000000000000..4c6987ac9868b2240e6260f0295a85936a2fe1bc --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp.cmd @@ -0,0 +1 @@ +@node "%npm_config_node_gyp%" %* diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/run-script/lib/package-envs.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/run-script/lib/package-envs.js new file mode 100644 index 0000000000000000000000000000000000000000..612f850fb076caa1e046c0c1aec3ee57f4804118 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/run-script/lib/package-envs.js @@ -0,0 +1,29 @@ +const packageEnvs = (vals, prefix, env = {}) => { + for (const [key, val] of Object.entries(vals)) { + if (val === undefined) { + continue + } else if (val === null || val === false) { + env[`${prefix}${key}`] = '' + } else if (Array.isArray(val)) { + val.forEach((item, index) => { + packageEnvs({ [`${key}_${index}`]: item }, `${prefix}`, env) + }) + } else if (typeof val === 'object') { + packageEnvs(val, `${prefix}${key}_`, env) + } else { + env[`${prefix}${key}`] = String(val) + } + } + return env +} + +// https://github.com/npm/rfcs/pull/183 defines which fields we put into the environment +module.exports = pkg => { + return packageEnvs({ + name: pkg.name, + version: pkg.version, + config: pkg.config, + engines: pkg.engines, + bin: pkg.bin, + }, 'npm_package_') +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/run-script/lib/run-script-pkg.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/run-script/lib/run-script-pkg.js new file mode 100644 index 0000000000000000000000000000000000000000..161caebb98d975c3dac2e1303bd094d1d1696f74 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/run-script/lib/run-script-pkg.js @@ -0,0 +1,114 @@ +const makeSpawnArgs = require('./make-spawn-args.js') +const promiseSpawn = require('@npmcli/promise-spawn') +const packageEnvs = require('./package-envs.js') +const { isNodeGypPackage, defaultGypInstallScript } = require('@npmcli/node-gyp') +const signalManager = require('./signal-manager.js') +const isServerPackage = require('./is-server-package.js') + +const runScriptPkg = async options => { + const { + args = [], + binPaths = false, + env = {}, + event, + nodeGyp, + path, + pkg, + scriptShell, + // how long to wait for a process.kill signal + // only exposed here so that we can make the test go a bit faster. + signalTimeout = 500, + stdio = 'pipe', + stdioString, + } = options + + const { scripts = {}, gypfile } = pkg + let cmd = null + if (options.cmd) { + cmd = options.cmd + } else if (pkg.scripts && pkg.scripts[event]) { + cmd = pkg.scripts[event] + } else if ( + // If there is no preinstall or install script, default to rebuilding node-gyp packages. + event === 'install' && + !scripts.install && + !scripts.preinstall && + gypfile !== false && + await isNodeGypPackage(path) + ) { + cmd = defaultGypInstallScript + } else if (event === 'start' && await isServerPackage(path)) { + cmd = 'node server.js' + } + + if (!cmd) { + return { code: 0, signal: null } + } + + let inputEnd = () => {} + if (stdio === 'inherit') { + let banner + if (pkg._id) { + banner = `\n> ${pkg._id} ${event}\n` + } else { + banner = `\n> ${event}\n` + } + banner += `> ${cmd.trim().replace(/\n/g, '\n> ')}` + if (args.length) { + banner += ` ${args.join(' ')}` + } + banner += '\n' + const { output, input } = require('proc-log') + output.standard(banner) + inputEnd = input.start() + } + + const [spawnShell, spawnArgs, spawnOpts] = makeSpawnArgs({ + args, + binPaths, + cmd, + env: { ...env, ...packageEnvs(pkg) }, + event, + nodeGyp, + path, + scriptShell, + stdio, + stdioString, + }) + + const p = promiseSpawn(spawnShell, spawnArgs, spawnOpts, { + event, + script: cmd, + pkgid: pkg._id, + path, + }) + + if (stdio === 'inherit') { + signalManager.add(p.process) + } + + if (p.stdin) { + p.stdin.end() + } + + return p.catch(er => { + const { signal } = er + // coverage disabled because win32 never emits signals + /* istanbul ignore next */ + if (stdio === 'inherit' && signal) { + // by the time we reach here, the child has already exited. we send the + // signal back to ourselves again so that npm will exit with the same + // status as the child + process.kill(process.pid, signal) + + // just in case we don't die, reject after 500ms + // this also keeps the node process open long enough to actually + // get the signal, rather than terminating gracefully. + return new Promise((res, rej) => setTimeout(() => rej(er), signalTimeout)) + } else { + throw er + } + }).finally(inputEnd) +} + +module.exports = runScriptPkg diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/run-script/lib/run-script.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/run-script/lib/run-script.js new file mode 100644 index 0000000000000000000000000000000000000000..b00304c8d6e7f562c1736ec1603ec5172b856a75 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/run-script/lib/run-script.js @@ -0,0 +1,15 @@ +const PackageJson = require('@npmcli/package-json') +const runScriptPkg = require('./run-script-pkg.js') +const validateOptions = require('./validate-options.js') +const isServerPackage = require('./is-server-package.js') + +const runScript = async options => { + validateOptions(options) + if (options.pkg) { + return runScriptPkg(options) + } + const { content: pkg } = await PackageJson.normalize(options.path) + return runScriptPkg({ ...options, pkg }) +} + +module.exports = Object.assign(runScript, { isServerPackage }) diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/run-script/lib/set-path.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/run-script/lib/set-path.js new file mode 100644 index 0000000000000000000000000000000000000000..c59c270d9969a05a686cd9acd70ac45bdcc1af30 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/run-script/lib/set-path.js @@ -0,0 +1,45 @@ +const { resolve, dirname, delimiter } = require('path') +// the path here is relative, even though it does not need to be +// in order to make the posix tests pass in windows +const nodeGypPath = resolve(__dirname, '../lib/node-gyp-bin') + +// Windows typically calls its PATH environ 'Path', but this is not +// guaranteed, nor is it guaranteed to be the only one. Merge them +// all together in the order they appear in the object. +const setPATH = (projectPath, binPaths, env) => { + const PATH = Object.keys(env).filter(p => /^path$/i.test(p) && env[p]) + .map(p => env[p].split(delimiter)) + .reduce((set, p) => set.concat(p.filter(concatted => !set.includes(concatted))), []) + .join(delimiter) + + const pathArr = [] + if (binPaths) { + pathArr.push(...binPaths) + } + // unshift the ./node_modules/.bin from every folder + // walk up until dirname() does nothing, at the root + // XXX we should specify a cwd that we don't go above + let p = projectPath + let pp + do { + pathArr.push(resolve(p, 'node_modules', '.bin')) + pp = p + p = dirname(p) + } while (p !== pp) + pathArr.push(nodeGypPath, PATH) + + const pathVal = pathArr.join(delimiter) + + // XXX include the node-gyp-bin path somehow? Probably better for + // npm or arborist or whoever to just provide that by putting it in + // the PATH environ, since that's preserved anyway. + for (const key of Object.keys(env)) { + if (/^path$/i.test(key)) { + env[key] = pathVal + } + } + + return env +} + +module.exports = setPATH diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/run-script/lib/signal-manager.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/run-script/lib/signal-manager.js new file mode 100644 index 0000000000000000000000000000000000000000..a099a4af2b9be37c1a946cd518979397ef5eb85c --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/run-script/lib/signal-manager.js @@ -0,0 +1,50 @@ +const runningProcs = new Set() +let handlersInstalled = false + +const forwardedSignals = [ + 'SIGINT', + 'SIGTERM', +] + +// no-op, this is so receiving the signal doesn't cause us to exit immediately +// instead, we exit after all children have exited when we re-send the signal +// to ourselves. see the catch handler at the bottom of run-script-pkg.js +const handleSignal = signal => { + for (const proc of runningProcs) { + proc.kill(signal) + } +} + +const setupListeners = () => { + for (const signal of forwardedSignals) { + process.on(signal, handleSignal) + } + handlersInstalled = true +} + +const cleanupListeners = () => { + if (runningProcs.size === 0) { + for (const signal of forwardedSignals) { + process.removeListener(signal, handleSignal) + } + handlersInstalled = false + } +} + +const add = proc => { + runningProcs.add(proc) + if (!handlersInstalled) { + setupListeners() + } + + proc.once('exit', () => { + runningProcs.delete(proc) + cleanupListeners() + }) +} + +module.exports = { + add, + handleSignal, + forwardedSignals, +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/run-script/lib/validate-options.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/run-script/lib/validate-options.js new file mode 100644 index 0000000000000000000000000000000000000000..8d855916ecd151bc10524e0806f55f6caadd62e3 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/run-script/lib/validate-options.js @@ -0,0 +1,39 @@ +const validateOptions = options => { + if (typeof options !== 'object' || !options) { + throw new TypeError('invalid options object provided to runScript') + } + + const { + event, + path, + scriptShell, + env = {}, + stdio = 'pipe', + args = [], + cmd, + } = options + + if (!event || typeof event !== 'string') { + throw new TypeError('valid event not provided to runScript') + } + if (!path || typeof path !== 'string') { + throw new TypeError('valid path not provided to runScript') + } + if (scriptShell !== undefined && typeof scriptShell !== 'string') { + throw new TypeError('invalid scriptShell option provided to runScript') + } + if (typeof env !== 'object' || !env) { + throw new TypeError('invalid env option provided to runScript') + } + if (typeof stdio !== 'string' && !Array.isArray(stdio)) { + throw new TypeError('invalid stdio option provided to runScript') + } + if (!Array.isArray(args) || args.some(a => typeof a !== 'string')) { + throw new TypeError('invalid args option provided to runScript') + } + if (cmd !== undefined && typeof cmd !== 'string') { + throw new TypeError('invalid cmd option provided to runScript') + } +} + +module.exports = validateOptions diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/run-script/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/run-script/package.json new file mode 100644 index 0000000000000000000000000000000000000000..2873f7cbf91c52e4459a60664c75cd712b8df668 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@npmcli/run-script/package.json @@ -0,0 +1,54 @@ +{ + "name": "@npmcli/run-script", + "version": "10.0.0", + "description": "Run a lifecycle script for a package (descendant of npm-lifecycle)", + "author": "GitHub Inc.", + "license": "ISC", + "scripts": { + "test": "tap", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", + "lintfix": "npm run eslint -- --fix", + "postlint": "template-oss-check", + "snap": "tap", + "posttest": "npm run lint", + "template-oss-apply": "template-oss-apply --force" + }, + "devDependencies": { + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.25.0", + "spawk": "^1.8.1", + "tap": "^16.0.1" + }, + "dependencies": { + "@npmcli/node-gyp": "^4.0.0", + "@npmcli/package-json": "^7.0.0", + "@npmcli/promise-spawn": "^8.0.0", + "node-gyp": "^11.0.0", + "proc-log": "^5.0.0", + "which": "^5.0.0" + }, + "files": [ + "bin/", + "lib/" + ], + "main": "lib/run-script.js", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/run-script.git" + }, + "engines": { + "node": "^20.17.0 || >=22.9.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.25.0", + "publish": "true" + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..261eeb9e9f8b2b4b0d119366dda99c6fd7d35c64 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/examples/is-default-value.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/examples/is-default-value.js new file mode 100644 index 0000000000000000000000000000000000000000..0a67972b71d135a4b75953d2fe2b43bf9a0c585e --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/examples/is-default-value.js @@ -0,0 +1,25 @@ +'use strict'; + +// This example shows how to understand if a default value is used or not. + +// 1. const { parseArgs } = require('node:util'); // from node +// 2. const { parseArgs } = require('@pkgjs/parseargs'); // from package +const { parseArgs } = require('..'); // in repo + +const options = { + file: { short: 'f', type: 'string', default: 'FOO' }, +}; + +const { values, tokens } = parseArgs({ options, tokens: true }); + +const isFileDefault = !tokens.some((token) => token.kind === 'option' && + token.name === 'file' +); + +console.log(values); +console.log(`Is the file option [${values.file}] the default value? ${isFileDefault}`); + +// Try the following: +// node is-default-value.js +// node is-default-value.js -f FILE +// node is-default-value.js --file FILE diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/examples/limit-long-syntax.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/examples/limit-long-syntax.js new file mode 100644 index 0000000000000000000000000000000000000000..943e643ee9553b3877b6b7ed58e69dcb9e94fb39 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/examples/limit-long-syntax.js @@ -0,0 +1,35 @@ +'use strict'; + +// This is an example of using tokens to add a custom behaviour. +// +// Require the use of `=` for long options and values by blocking +// the use of space separated values. +// So allow `--foo=bar`, and not allow `--foo bar`. +// +// Note: this is not a common behaviour, most CLIs allow both forms. + +// 1. const { parseArgs } = require('node:util'); // from node +// 2. const { parseArgs } = require('@pkgjs/parseargs'); // from package +const { parseArgs } = require('..'); // in repo + +const options = { + file: { short: 'f', type: 'string' }, + log: { type: 'string' }, +}; + +const { values, tokens } = parseArgs({ options, tokens: true }); + +const badToken = tokens.find((token) => token.kind === 'option' && + token.value != null && + token.rawName.startsWith('--') && + !token.inlineValue +); +if (badToken) { + throw new Error(`Option value for '${badToken.rawName}' must be inline, like '${badToken.rawName}=VALUE'`); +} + +console.log(values); + +// Try the following: +// node limit-long-syntax.js -f FILE --log=LOG +// node limit-long-syntax.js --file FILE diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/examples/negate.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/examples/negate.js new file mode 100644 index 0000000000000000000000000000000000000000..b6634690a4a0c03d4c50239bc16fa38440dea5ae --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/examples/negate.js @@ -0,0 +1,43 @@ +'use strict'; + +// This example is used in the documentation. + +// How might I add my own support for --no-foo? + +// 1. const { parseArgs } = require('node:util'); // from node +// 2. const { parseArgs } = require('@pkgjs/parseargs'); // from package +const { parseArgs } = require('..'); // in repo + +const options = { + 'color': { type: 'boolean' }, + 'no-color': { type: 'boolean' }, + 'logfile': { type: 'string' }, + 'no-logfile': { type: 'boolean' }, +}; +const { values, tokens } = parseArgs({ options, tokens: true }); + +// Reprocess the option tokens and overwrite the returned values. +tokens + .filter((token) => token.kind === 'option') + .forEach((token) => { + if (token.name.startsWith('no-')) { + // Store foo:false for --no-foo + const positiveName = token.name.slice(3); + values[positiveName] = false; + delete values[token.name]; + } else { + // Resave value so last one wins if both --foo and --no-foo. + values[token.name] = token.value ?? true; + } + }); + +const color = values.color; +const logfile = values.logfile ?? 'default.log'; + +console.log({ logfile, color }); + +// Try the following: +// node negate.js +// node negate.js --no-logfile --no-color +// negate.js --logfile=test.log --color +// node negate.js --no-logfile --logfile=test.log --color --no-color diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/examples/no-repeated-options.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/examples/no-repeated-options.js new file mode 100644 index 0000000000000000000000000000000000000000..0c324688af03058fcd0a8b0b5dbdcb188e902f50 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/examples/no-repeated-options.js @@ -0,0 +1,31 @@ +'use strict'; + +// This is an example of using tokens to add a custom behaviour. +// +// Throw an error if an option is used more than once. + +// 1. const { parseArgs } = require('node:util'); // from node +// 2. const { parseArgs } = require('@pkgjs/parseargs'); // from package +const { parseArgs } = require('..'); // in repo + +const options = { + ding: { type: 'boolean', short: 'd' }, + beep: { type: 'boolean', short: 'b' } +}; +const { values, tokens } = parseArgs({ options, tokens: true }); + +const seenBefore = new Set(); +tokens.forEach((token) => { + if (token.kind !== 'option') return; + if (seenBefore.has(token.name)) { + throw new Error(`option '${token.name}' used multiple times`); + } + seenBefore.add(token.name); +}); + +console.log(values); + +// Try the following: +// node no-repeated-options --ding --beep +// node no-repeated-options --beep -b +// node no-repeated-options -ddd diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/examples/ordered-options.mjs b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/examples/ordered-options.mjs new file mode 100644 index 0000000000000000000000000000000000000000..8ab7367b8bbb1120db7fc9f9365353af0ab9377e --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/examples/ordered-options.mjs @@ -0,0 +1,41 @@ +// This is an example of using tokens to add a custom behaviour. +// +// This adds a option order check so that --some-unstable-option +// may only be used after --enable-experimental-options +// +// Note: this is not a common behaviour, the order of different options +// does not usually matter. + +import { parseArgs } from '../index.js'; + +function findTokenIndex(tokens, target) { + return tokens.findIndex((token) => token.kind === 'option' && + token.name === target + ); +} + +const experimentalName = 'enable-experimental-options'; +const unstableName = 'some-unstable-option'; + +const options = { + [experimentalName]: { type: 'boolean' }, + [unstableName]: { type: 'boolean' }, +}; + +const { values, tokens } = parseArgs({ options, tokens: true }); + +const experimentalIndex = findTokenIndex(tokens, experimentalName); +const unstableIndex = findTokenIndex(tokens, unstableName); +if (unstableIndex !== -1 && + ((experimentalIndex === -1) || (unstableIndex < experimentalIndex))) { + throw new Error(`'--${experimentalName}' must be specified before '--${unstableName}'`); +} + +console.log(values); + +/* eslint-disable max-len */ +// Try the following: +// node ordered-options.mjs +// node ordered-options.mjs --some-unstable-option +// node ordered-options.mjs --some-unstable-option --enable-experimental-options +// node ordered-options.mjs --enable-experimental-options --some-unstable-option diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/examples/simple-hard-coded.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/examples/simple-hard-coded.js new file mode 100644 index 0000000000000000000000000000000000000000..eff04c2a60fa21dffb1b293c27068d0a0af3c963 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/examples/simple-hard-coded.js @@ -0,0 +1,26 @@ +'use strict'; + +// This example is used in the documentation. + +// 1. const { parseArgs } = require('node:util'); // from node +// 2. const { parseArgs } = require('@pkgjs/parseargs'); // from package +const { parseArgs } = require('..'); // in repo + +const args = ['-f', '--bar', 'b']; +const options = { + foo: { + type: 'boolean', + short: 'f' + }, + bar: { + type: 'string' + } +}; +const { + values, + positionals +} = parseArgs({ args, options }); +console.log(values, positionals); + +// Try the following: +// node simple-hard-coded.js diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/index.js new file mode 100644 index 0000000000000000000000000000000000000000..b1004c7b72f271f4f529b1597d643a9272e3cf06 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/index.js @@ -0,0 +1,396 @@ +'use strict'; + +const { + ArrayPrototypeForEach, + ArrayPrototypeIncludes, + ArrayPrototypeMap, + ArrayPrototypePush, + ArrayPrototypePushApply, + ArrayPrototypeShift, + ArrayPrototypeSlice, + ArrayPrototypeUnshiftApply, + ObjectEntries, + ObjectPrototypeHasOwnProperty: ObjectHasOwn, + StringPrototypeCharAt, + StringPrototypeIndexOf, + StringPrototypeSlice, + StringPrototypeStartsWith, +} = require('./internal/primordials'); + +const { + validateArray, + validateBoolean, + validateBooleanArray, + validateObject, + validateString, + validateStringArray, + validateUnion, +} = require('./internal/validators'); + +const { + kEmptyObject, +} = require('./internal/util'); + +const { + findLongOptionForShort, + isLoneLongOption, + isLoneShortOption, + isLongOptionAndValue, + isOptionValue, + isOptionLikeValue, + isShortOptionAndValue, + isShortOptionGroup, + useDefaultValueOption, + objectGetOwn, + optionsGetOwn, +} = require('./utils'); + +const { + codes: { + ERR_INVALID_ARG_VALUE, + ERR_PARSE_ARGS_INVALID_OPTION_VALUE, + ERR_PARSE_ARGS_UNKNOWN_OPTION, + ERR_PARSE_ARGS_UNEXPECTED_POSITIONAL, + }, +} = require('./internal/errors'); + +function getMainArgs() { + // Work out where to slice process.argv for user supplied arguments. + + // Check node options for scenarios where user CLI args follow executable. + const execArgv = process.execArgv; + if (ArrayPrototypeIncludes(execArgv, '-e') || + ArrayPrototypeIncludes(execArgv, '--eval') || + ArrayPrototypeIncludes(execArgv, '-p') || + ArrayPrototypeIncludes(execArgv, '--print')) { + return ArrayPrototypeSlice(process.argv, 1); + } + + // Normally first two arguments are executable and script, then CLI arguments + return ArrayPrototypeSlice(process.argv, 2); +} + +/** + * In strict mode, throw for possible usage errors like --foo --bar + * + * @param {object} token - from tokens as available from parseArgs + */ +function checkOptionLikeValue(token) { + if (!token.inlineValue && isOptionLikeValue(token.value)) { + // Only show short example if user used short option. + const example = StringPrototypeStartsWith(token.rawName, '--') ? + `'${token.rawName}=-XYZ'` : + `'--${token.name}=-XYZ' or '${token.rawName}-XYZ'`; + const errorMessage = `Option '${token.rawName}' argument is ambiguous. +Did you forget to specify the option argument for '${token.rawName}'? +To specify an option argument starting with a dash use ${example}.`; + throw new ERR_PARSE_ARGS_INVALID_OPTION_VALUE(errorMessage); + } +} + +/** + * In strict mode, throw for usage errors. + * + * @param {object} config - from config passed to parseArgs + * @param {object} token - from tokens as available from parseArgs + */ +function checkOptionUsage(config, token) { + if (!ObjectHasOwn(config.options, token.name)) { + throw new ERR_PARSE_ARGS_UNKNOWN_OPTION( + token.rawName, config.allowPositionals); + } + + const short = optionsGetOwn(config.options, token.name, 'short'); + const shortAndLong = `${short ? `-${short}, ` : ''}--${token.name}`; + const type = optionsGetOwn(config.options, token.name, 'type'); + if (type === 'string' && typeof token.value !== 'string') { + throw new ERR_PARSE_ARGS_INVALID_OPTION_VALUE(`Option '${shortAndLong} ' argument missing`); + } + // (Idiomatic test for undefined||null, expecting undefined.) + if (type === 'boolean' && token.value != null) { + throw new ERR_PARSE_ARGS_INVALID_OPTION_VALUE(`Option '${shortAndLong}' does not take an argument`); + } +} + + +/** + * Store the option value in `values`. + * + * @param {string} longOption - long option name e.g. 'foo' + * @param {string|undefined} optionValue - value from user args + * @param {object} options - option configs, from parseArgs({ options }) + * @param {object} values - option values returned in `values` by parseArgs + */ +function storeOption(longOption, optionValue, options, values) { + if (longOption === '__proto__') { + return; // No. Just no. + } + + // We store based on the option value rather than option type, + // preserving the users intent for author to deal with. + const newValue = optionValue ?? true; + if (optionsGetOwn(options, longOption, 'multiple')) { + // Always store value in array, including for boolean. + // values[longOption] starts out not present, + // first value is added as new array [newValue], + // subsequent values are pushed to existing array. + // (note: values has null prototype, so simpler usage) + if (values[longOption]) { + ArrayPrototypePush(values[longOption], newValue); + } else { + values[longOption] = [newValue]; + } + } else { + values[longOption] = newValue; + } +} + +/** + * Store the default option value in `values`. + * + * @param {string} longOption - long option name e.g. 'foo' + * @param {string + * | boolean + * | string[] + * | boolean[]} optionValue - default value from option config + * @param {object} values - option values returned in `values` by parseArgs + */ +function storeDefaultOption(longOption, optionValue, values) { + if (longOption === '__proto__') { + return; // No. Just no. + } + + values[longOption] = optionValue; +} + +/** + * Process args and turn into identified tokens: + * - option (along with value, if any) + * - positional + * - option-terminator + * + * @param {string[]} args - from parseArgs({ args }) or mainArgs + * @param {object} options - option configs, from parseArgs({ options }) + */ +function argsToTokens(args, options) { + const tokens = []; + let index = -1; + let groupCount = 0; + + const remainingArgs = ArrayPrototypeSlice(args); + while (remainingArgs.length > 0) { + const arg = ArrayPrototypeShift(remainingArgs); + const nextArg = remainingArgs[0]; + if (groupCount > 0) + groupCount--; + else + index++; + + // Check if `arg` is an options terminator. + // Guideline 10 in https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap12.html + if (arg === '--') { + // Everything after a bare '--' is considered a positional argument. + ArrayPrototypePush(tokens, { kind: 'option-terminator', index }); + ArrayPrototypePushApply( + tokens, ArrayPrototypeMap(remainingArgs, (arg) => { + return { kind: 'positional', index: ++index, value: arg }; + }) + ); + break; // Finished processing args, leave while loop. + } + + if (isLoneShortOption(arg)) { + // e.g. '-f' + const shortOption = StringPrototypeCharAt(arg, 1); + const longOption = findLongOptionForShort(shortOption, options); + let value; + let inlineValue; + if (optionsGetOwn(options, longOption, 'type') === 'string' && + isOptionValue(nextArg)) { + // e.g. '-f', 'bar' + value = ArrayPrototypeShift(remainingArgs); + inlineValue = false; + } + ArrayPrototypePush( + tokens, + { kind: 'option', name: longOption, rawName: arg, + index, value, inlineValue }); + if (value != null) ++index; + continue; + } + + if (isShortOptionGroup(arg, options)) { + // Expand -fXzy to -f -X -z -y + const expanded = []; + for (let index = 1; index < arg.length; index++) { + const shortOption = StringPrototypeCharAt(arg, index); + const longOption = findLongOptionForShort(shortOption, options); + if (optionsGetOwn(options, longOption, 'type') !== 'string' || + index === arg.length - 1) { + // Boolean option, or last short in group. Well formed. + ArrayPrototypePush(expanded, `-${shortOption}`); + } else { + // String option in middle. Yuck. + // Expand -abfFILE to -a -b -fFILE + ArrayPrototypePush(expanded, `-${StringPrototypeSlice(arg, index)}`); + break; // finished short group + } + } + ArrayPrototypeUnshiftApply(remainingArgs, expanded); + groupCount = expanded.length; + continue; + } + + if (isShortOptionAndValue(arg, options)) { + // e.g. -fFILE + const shortOption = StringPrototypeCharAt(arg, 1); + const longOption = findLongOptionForShort(shortOption, options); + const value = StringPrototypeSlice(arg, 2); + ArrayPrototypePush( + tokens, + { kind: 'option', name: longOption, rawName: `-${shortOption}`, + index, value, inlineValue: true }); + continue; + } + + if (isLoneLongOption(arg)) { + // e.g. '--foo' + const longOption = StringPrototypeSlice(arg, 2); + let value; + let inlineValue; + if (optionsGetOwn(options, longOption, 'type') === 'string' && + isOptionValue(nextArg)) { + // e.g. '--foo', 'bar' + value = ArrayPrototypeShift(remainingArgs); + inlineValue = false; + } + ArrayPrototypePush( + tokens, + { kind: 'option', name: longOption, rawName: arg, + index, value, inlineValue }); + if (value != null) ++index; + continue; + } + + if (isLongOptionAndValue(arg)) { + // e.g. --foo=bar + const equalIndex = StringPrototypeIndexOf(arg, '='); + const longOption = StringPrototypeSlice(arg, 2, equalIndex); + const value = StringPrototypeSlice(arg, equalIndex + 1); + ArrayPrototypePush( + tokens, + { kind: 'option', name: longOption, rawName: `--${longOption}`, + index, value, inlineValue: true }); + continue; + } + + ArrayPrototypePush(tokens, { kind: 'positional', index, value: arg }); + } + + return tokens; +} + +const parseArgs = (config = kEmptyObject) => { + const args = objectGetOwn(config, 'args') ?? getMainArgs(); + const strict = objectGetOwn(config, 'strict') ?? true; + const allowPositionals = objectGetOwn(config, 'allowPositionals') ?? !strict; + const returnTokens = objectGetOwn(config, 'tokens') ?? false; + const options = objectGetOwn(config, 'options') ?? { __proto__: null }; + // Bundle these up for passing to strict-mode checks. + const parseConfig = { args, strict, options, allowPositionals }; + + // Validate input configuration. + validateArray(args, 'args'); + validateBoolean(strict, 'strict'); + validateBoolean(allowPositionals, 'allowPositionals'); + validateBoolean(returnTokens, 'tokens'); + validateObject(options, 'options'); + ArrayPrototypeForEach( + ObjectEntries(options), + ({ 0: longOption, 1: optionConfig }) => { + validateObject(optionConfig, `options.${longOption}`); + + // type is required + const optionType = objectGetOwn(optionConfig, 'type'); + validateUnion(optionType, `options.${longOption}.type`, ['string', 'boolean']); + + if (ObjectHasOwn(optionConfig, 'short')) { + const shortOption = optionConfig.short; + validateString(shortOption, `options.${longOption}.short`); + if (shortOption.length !== 1) { + throw new ERR_INVALID_ARG_VALUE( + `options.${longOption}.short`, + shortOption, + 'must be a single character' + ); + } + } + + const multipleOption = objectGetOwn(optionConfig, 'multiple'); + if (ObjectHasOwn(optionConfig, 'multiple')) { + validateBoolean(multipleOption, `options.${longOption}.multiple`); + } + + const defaultValue = objectGetOwn(optionConfig, 'default'); + if (defaultValue !== undefined) { + let validator; + switch (optionType) { + case 'string': + validator = multipleOption ? validateStringArray : validateString; + break; + + case 'boolean': + validator = multipleOption ? validateBooleanArray : validateBoolean; + break; + } + validator(defaultValue, `options.${longOption}.default`); + } + } + ); + + // Phase 1: identify tokens + const tokens = argsToTokens(args, options); + + // Phase 2: process tokens into parsed option values and positionals + const result = { + values: { __proto__: null }, + positionals: [], + }; + if (returnTokens) { + result.tokens = tokens; + } + ArrayPrototypeForEach(tokens, (token) => { + if (token.kind === 'option') { + if (strict) { + checkOptionUsage(parseConfig, token); + checkOptionLikeValue(token); + } + storeOption(token.name, token.value, options, result.values); + } else if (token.kind === 'positional') { + if (!allowPositionals) { + throw new ERR_PARSE_ARGS_UNEXPECTED_POSITIONAL(token.value); + } + ArrayPrototypePush(result.positionals, token.value); + } + }); + + // Phase 3: fill in default values for missing args + ArrayPrototypeForEach(ObjectEntries(options), ({ 0: longOption, + 1: optionConfig }) => { + const mustSetDefault = useDefaultValueOption(longOption, + optionConfig, + result.values); + if (mustSetDefault) { + storeDefaultOption(longOption, + objectGetOwn(optionConfig, 'default'), + result.values); + } + }); + + + return result; +}; + +module.exports = { + parseArgs, +}; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/internal/errors.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/internal/errors.js new file mode 100644 index 0000000000000000000000000000000000000000..e1b237b5b16395895e310d18d0733410c940d17a --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/internal/errors.js @@ -0,0 +1,47 @@ +'use strict'; + +class ERR_INVALID_ARG_TYPE extends TypeError { + constructor(name, expected, actual) { + super(`${name} must be ${expected} got ${actual}`); + this.code = 'ERR_INVALID_ARG_TYPE'; + } +} + +class ERR_INVALID_ARG_VALUE extends TypeError { + constructor(arg1, arg2, expected) { + super(`The property ${arg1} ${expected}. Received '${arg2}'`); + this.code = 'ERR_INVALID_ARG_VALUE'; + } +} + +class ERR_PARSE_ARGS_INVALID_OPTION_VALUE extends Error { + constructor(message) { + super(message); + this.code = 'ERR_PARSE_ARGS_INVALID_OPTION_VALUE'; + } +} + +class ERR_PARSE_ARGS_UNKNOWN_OPTION extends Error { + constructor(option, allowPositionals) { + const suggestDashDash = allowPositionals ? `. To specify a positional argument starting with a '-', place it at the end of the command after '--', as in '-- ${JSON.stringify(option)}` : ''; + super(`Unknown option '${option}'${suggestDashDash}`); + this.code = 'ERR_PARSE_ARGS_UNKNOWN_OPTION'; + } +} + +class ERR_PARSE_ARGS_UNEXPECTED_POSITIONAL extends Error { + constructor(positional) { + super(`Unexpected argument '${positional}'. This command does not take positional arguments`); + this.code = 'ERR_PARSE_ARGS_UNEXPECTED_POSITIONAL'; + } +} + +module.exports = { + codes: { + ERR_INVALID_ARG_TYPE, + ERR_INVALID_ARG_VALUE, + ERR_PARSE_ARGS_INVALID_OPTION_VALUE, + ERR_PARSE_ARGS_UNKNOWN_OPTION, + ERR_PARSE_ARGS_UNEXPECTED_POSITIONAL, + } +}; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/internal/primordials.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/internal/primordials.js new file mode 100644 index 0000000000000000000000000000000000000000..63e23ab117a9ccd324f792113c7a62e2f17e8ea2 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/internal/primordials.js @@ -0,0 +1,393 @@ +/* +This file is copied from https://github.com/nodejs/node/blob/v14.19.3/lib/internal/per_context/primordials.js +under the following license: + +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +*/ + +'use strict'; + +/* eslint-disable node-core/prefer-primordials */ + +// This file subclasses and stores the JS builtins that come from the VM +// so that Node.js's builtin modules do not need to later look these up from +// the global proxy, which can be mutated by users. + +// Use of primordials have sometimes a dramatic impact on performance, please +// benchmark all changes made in performance-sensitive areas of the codebase. +// See: https://github.com/nodejs/node/pull/38248 + +const primordials = {}; + +const { + defineProperty: ReflectDefineProperty, + getOwnPropertyDescriptor: ReflectGetOwnPropertyDescriptor, + ownKeys: ReflectOwnKeys, +} = Reflect; + +// `uncurryThis` is equivalent to `func => Function.prototype.call.bind(func)`. +// It is using `bind.bind(call)` to avoid using `Function.prototype.bind` +// and `Function.prototype.call` after it may have been mutated by users. +const { apply, bind, call } = Function.prototype; +const uncurryThis = bind.bind(call); +primordials.uncurryThis = uncurryThis; + +// `applyBind` is equivalent to `func => Function.prototype.apply.bind(func)`. +// It is using `bind.bind(apply)` to avoid using `Function.prototype.bind` +// and `Function.prototype.apply` after it may have been mutated by users. +const applyBind = bind.bind(apply); +primordials.applyBind = applyBind; + +// Methods that accept a variable number of arguments, and thus it's useful to +// also create `${prefix}${key}Apply`, which uses `Function.prototype.apply`, +// instead of `Function.prototype.call`, and thus doesn't require iterator +// destructuring. +const varargsMethods = [ + // 'ArrayPrototypeConcat' is omitted, because it performs the spread + // on its own for arrays and array-likes with a truthy + // @@isConcatSpreadable symbol property. + 'ArrayOf', + 'ArrayPrototypePush', + 'ArrayPrototypeUnshift', + // 'FunctionPrototypeCall' is omitted, since there's 'ReflectApply' + // and 'FunctionPrototypeApply'. + 'MathHypot', + 'MathMax', + 'MathMin', + 'StringPrototypeConcat', + 'TypedArrayOf', +]; + +function getNewKey(key) { + return typeof key === 'symbol' ? + `Symbol${key.description[7].toUpperCase()}${key.description.slice(8)}` : + `${key[0].toUpperCase()}${key.slice(1)}`; +} + +function copyAccessor(dest, prefix, key, { enumerable, get, set }) { + ReflectDefineProperty(dest, `${prefix}Get${key}`, { + value: uncurryThis(get), + enumerable + }); + if (set !== undefined) { + ReflectDefineProperty(dest, `${prefix}Set${key}`, { + value: uncurryThis(set), + enumerable + }); + } +} + +function copyPropsRenamed(src, dest, prefix) { + for (const key of ReflectOwnKeys(src)) { + const newKey = getNewKey(key); + const desc = ReflectGetOwnPropertyDescriptor(src, key); + if ('get' in desc) { + copyAccessor(dest, prefix, newKey, desc); + } else { + const name = `${prefix}${newKey}`; + ReflectDefineProperty(dest, name, desc); + if (varargsMethods.includes(name)) { + ReflectDefineProperty(dest, `${name}Apply`, { + // `src` is bound as the `this` so that the static `this` points + // to the object it was defined on, + // e.g.: `ArrayOfApply` gets a `this` of `Array`: + value: applyBind(desc.value, src), + }); + } + } + } +} + +function copyPropsRenamedBound(src, dest, prefix) { + for (const key of ReflectOwnKeys(src)) { + const newKey = getNewKey(key); + const desc = ReflectGetOwnPropertyDescriptor(src, key); + if ('get' in desc) { + copyAccessor(dest, prefix, newKey, desc); + } else { + const { value } = desc; + if (typeof value === 'function') { + desc.value = value.bind(src); + } + + const name = `${prefix}${newKey}`; + ReflectDefineProperty(dest, name, desc); + if (varargsMethods.includes(name)) { + ReflectDefineProperty(dest, `${name}Apply`, { + value: applyBind(value, src), + }); + } + } + } +} + +function copyPrototype(src, dest, prefix) { + for (const key of ReflectOwnKeys(src)) { + const newKey = getNewKey(key); + const desc = ReflectGetOwnPropertyDescriptor(src, key); + if ('get' in desc) { + copyAccessor(dest, prefix, newKey, desc); + } else { + const { value } = desc; + if (typeof value === 'function') { + desc.value = uncurryThis(value); + } + + const name = `${prefix}${newKey}`; + ReflectDefineProperty(dest, name, desc); + if (varargsMethods.includes(name)) { + ReflectDefineProperty(dest, `${name}Apply`, { + value: applyBind(value), + }); + } + } + } +} + +// Create copies of configurable value properties of the global object +[ + 'Proxy', + 'globalThis', +].forEach((name) => { + // eslint-disable-next-line no-restricted-globals + primordials[name] = globalThis[name]; +}); + +// Create copies of URI handling functions +[ + decodeURI, + decodeURIComponent, + encodeURI, + encodeURIComponent, +].forEach((fn) => { + primordials[fn.name] = fn; +}); + +// Create copies of the namespace objects +[ + 'JSON', + 'Math', + 'Proxy', + 'Reflect', +].forEach((name) => { + // eslint-disable-next-line no-restricted-globals + copyPropsRenamed(global[name], primordials, name); +}); + +// Create copies of intrinsic objects +[ + 'Array', + 'ArrayBuffer', + 'BigInt', + 'BigInt64Array', + 'BigUint64Array', + 'Boolean', + 'DataView', + 'Date', + 'Error', + 'EvalError', + 'Float32Array', + 'Float64Array', + 'Function', + 'Int16Array', + 'Int32Array', + 'Int8Array', + 'Map', + 'Number', + 'Object', + 'RangeError', + 'ReferenceError', + 'RegExp', + 'Set', + 'String', + 'Symbol', + 'SyntaxError', + 'TypeError', + 'URIError', + 'Uint16Array', + 'Uint32Array', + 'Uint8Array', + 'Uint8ClampedArray', + 'WeakMap', + 'WeakSet', +].forEach((name) => { + // eslint-disable-next-line no-restricted-globals + const original = global[name]; + primordials[name] = original; + copyPropsRenamed(original, primordials, name); + copyPrototype(original.prototype, primordials, `${name}Prototype`); +}); + +// Create copies of intrinsic objects that require a valid `this` to call +// static methods. +// Refs: https://www.ecma-international.org/ecma-262/#sec-promise.all +[ + 'Promise', +].forEach((name) => { + // eslint-disable-next-line no-restricted-globals + const original = global[name]; + primordials[name] = original; + copyPropsRenamedBound(original, primordials, name); + copyPrototype(original.prototype, primordials, `${name}Prototype`); +}); + +// Create copies of abstract intrinsic objects that are not directly exposed +// on the global object. +// Refs: https://tc39.es/ecma262/#sec-%typedarray%-intrinsic-object +[ + { name: 'TypedArray', original: Reflect.getPrototypeOf(Uint8Array) }, + { name: 'ArrayIterator', original: { + prototype: Reflect.getPrototypeOf(Array.prototype[Symbol.iterator]()), + } }, + { name: 'StringIterator', original: { + prototype: Reflect.getPrototypeOf(String.prototype[Symbol.iterator]()), + } }, +].forEach(({ name, original }) => { + primordials[name] = original; + // The static %TypedArray% methods require a valid `this`, but can't be bound, + // as they need a subclass constructor as the receiver: + copyPrototype(original, primordials, name); + copyPrototype(original.prototype, primordials, `${name}Prototype`); +}); + +/* eslint-enable node-core/prefer-primordials */ + +const { + ArrayPrototypeForEach, + FunctionPrototypeCall, + Map, + ObjectFreeze, + ObjectSetPrototypeOf, + Set, + SymbolIterator, + WeakMap, + WeakSet, +} = primordials; + +// Because these functions are used by `makeSafe`, which is exposed +// on the `primordials` object, it's important to use const references +// to the primordials that they use: +const createSafeIterator = (factory, next) => { + class SafeIterator { + constructor(iterable) { + this._iterator = factory(iterable); + } + next() { + return next(this._iterator); + } + [SymbolIterator]() { + return this; + } + } + ObjectSetPrototypeOf(SafeIterator.prototype, null); + ObjectFreeze(SafeIterator.prototype); + ObjectFreeze(SafeIterator); + return SafeIterator; +}; + +primordials.SafeArrayIterator = createSafeIterator( + primordials.ArrayPrototypeSymbolIterator, + primordials.ArrayIteratorPrototypeNext +); +primordials.SafeStringIterator = createSafeIterator( + primordials.StringPrototypeSymbolIterator, + primordials.StringIteratorPrototypeNext +); + +const copyProps = (src, dest) => { + ArrayPrototypeForEach(ReflectOwnKeys(src), (key) => { + if (!ReflectGetOwnPropertyDescriptor(dest, key)) { + ReflectDefineProperty( + dest, + key, + ReflectGetOwnPropertyDescriptor(src, key)); + } + }); +}; + +const makeSafe = (unsafe, safe) => { + if (SymbolIterator in unsafe.prototype) { + const dummy = new unsafe(); + let next; // We can reuse the same `next` method. + + ArrayPrototypeForEach(ReflectOwnKeys(unsafe.prototype), (key) => { + if (!ReflectGetOwnPropertyDescriptor(safe.prototype, key)) { + const desc = ReflectGetOwnPropertyDescriptor(unsafe.prototype, key); + if ( + typeof desc.value === 'function' && + desc.value.length === 0 && + SymbolIterator in (FunctionPrototypeCall(desc.value, dummy) ?? {}) + ) { + const createIterator = uncurryThis(desc.value); + next = next ?? uncurryThis(createIterator(dummy).next); + const SafeIterator = createSafeIterator(createIterator, next); + desc.value = function() { + return new SafeIterator(this); + }; + } + ReflectDefineProperty(safe.prototype, key, desc); + } + }); + } else { + copyProps(unsafe.prototype, safe.prototype); + } + copyProps(unsafe, safe); + + ObjectSetPrototypeOf(safe.prototype, null); + ObjectFreeze(safe.prototype); + ObjectFreeze(safe); + return safe; +}; +primordials.makeSafe = makeSafe; + +// Subclass the constructors because we need to use their prototype +// methods later. +// Defining the `constructor` is necessary here to avoid the default +// constructor which uses the user-mutable `%ArrayIteratorPrototype%.next`. +primordials.SafeMap = makeSafe( + Map, + class SafeMap extends Map { + constructor(i) { super(i); } // eslint-disable-line no-useless-constructor + } +); +primordials.SafeWeakMap = makeSafe( + WeakMap, + class SafeWeakMap extends WeakMap { + constructor(i) { super(i); } // eslint-disable-line no-useless-constructor + } +); +primordials.SafeSet = makeSafe( + Set, + class SafeSet extends Set { + constructor(i) { super(i); } // eslint-disable-line no-useless-constructor + } +); +primordials.SafeWeakSet = makeSafe( + WeakSet, + class SafeWeakSet extends WeakSet { + constructor(i) { super(i); } // eslint-disable-line no-useless-constructor + } +); + +ObjectSetPrototypeOf(primordials, null); +ObjectFreeze(primordials); + +module.exports = primordials; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/internal/util.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/internal/util.js new file mode 100644 index 0000000000000000000000000000000000000000..b9b8fe5b8d7c02ec295f7b7b01d0bf3ef9fbccf1 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/internal/util.js @@ -0,0 +1,14 @@ +'use strict'; + +// This is a placeholder for util.js in node.js land. + +const { + ObjectCreate, + ObjectFreeze, +} = require('./primordials'); + +const kEmptyObject = ObjectFreeze(ObjectCreate(null)); + +module.exports = { + kEmptyObject, +}; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/internal/validators.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/internal/validators.js new file mode 100644 index 0000000000000000000000000000000000000000..b5ac4fb501eff454ad583570027f8b1aa044ca43 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/internal/validators.js @@ -0,0 +1,89 @@ +'use strict'; + +// This file is a proxy of the original file located at: +// https://github.com/nodejs/node/blob/main/lib/internal/validators.js +// Every addition or modification to this file must be evaluated +// during the PR review. + +const { + ArrayIsArray, + ArrayPrototypeIncludes, + ArrayPrototypeJoin, +} = require('./primordials'); + +const { + codes: { + ERR_INVALID_ARG_TYPE + } +} = require('./errors'); + +function validateString(value, name) { + if (typeof value !== 'string') { + throw new ERR_INVALID_ARG_TYPE(name, 'String', value); + } +} + +function validateUnion(value, name, union) { + if (!ArrayPrototypeIncludes(union, value)) { + throw new ERR_INVALID_ARG_TYPE(name, `('${ArrayPrototypeJoin(union, '|')}')`, value); + } +} + +function validateBoolean(value, name) { + if (typeof value !== 'boolean') { + throw new ERR_INVALID_ARG_TYPE(name, 'Boolean', value); + } +} + +function validateArray(value, name) { + if (!ArrayIsArray(value)) { + throw new ERR_INVALID_ARG_TYPE(name, 'Array', value); + } +} + +function validateStringArray(value, name) { + validateArray(value, name); + for (let i = 0; i < value.length; i++) { + validateString(value[i], `${name}[${i}]`); + } +} + +function validateBooleanArray(value, name) { + validateArray(value, name); + for (let i = 0; i < value.length; i++) { + validateBoolean(value[i], `${name}[${i}]`); + } +} + +/** + * @param {unknown} value + * @param {string} name + * @param {{ + * allowArray?: boolean, + * allowFunction?: boolean, + * nullable?: boolean + * }} [options] + */ +function validateObject(value, name, options) { + const useDefaultOptions = options == null; + const allowArray = useDefaultOptions ? false : options.allowArray; + const allowFunction = useDefaultOptions ? false : options.allowFunction; + const nullable = useDefaultOptions ? false : options.nullable; + if ((!nullable && value === null) || + (!allowArray && ArrayIsArray(value)) || + (typeof value !== 'object' && ( + !allowFunction || typeof value !== 'function' + ))) { + throw new ERR_INVALID_ARG_TYPE(name, 'Object', value); + } +} + +module.exports = { + validateArray, + validateObject, + validateString, + validateStringArray, + validateUnion, + validateBoolean, + validateBooleanArray, +}; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/package.json new file mode 100644 index 0000000000000000000000000000000000000000..0bcc05c0d4a3ec6b9616824c5ed25feb899d76ec --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/package.json @@ -0,0 +1,36 @@ +{ + "name": "@pkgjs/parseargs", + "version": "0.11.0", + "description": "Polyfill of future proposal for `util.parseArgs()`", + "engines": { + "node": ">=14" + }, + "main": "index.js", + "exports": { + ".": "./index.js", + "./package.json": "./package.json" + }, + "scripts": { + "coverage": "c8 --check-coverage tape 'test/*.js'", + "test": "c8 tape 'test/*.js'", + "posttest": "eslint .", + "fix": "npm run posttest -- --fix" + }, + "repository": { + "type": "git", + "url": "git@github.com:pkgjs/parseargs.git" + }, + "keywords": [], + "author": "", + "license": "MIT", + "bugs": { + "url": "https://github.com/pkgjs/parseargs/issues" + }, + "homepage": "https://github.com/pkgjs/parseargs#readme", + "devDependencies": { + "c8": "^7.10.0", + "eslint": "^8.2.0", + "eslint-plugin-node-core": "iansu/eslint-plugin-node-core", + "tape": "^5.2.2" + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/utils.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/utils.js new file mode 100644 index 0000000000000000000000000000000000000000..d7f420a2339246233300ad2cdb2619f79261e4b8 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@pkgjs/parseargs/utils.js @@ -0,0 +1,198 @@ +'use strict'; + +const { + ArrayPrototypeFind, + ObjectEntries, + ObjectPrototypeHasOwnProperty: ObjectHasOwn, + StringPrototypeCharAt, + StringPrototypeIncludes, + StringPrototypeStartsWith, +} = require('./internal/primordials'); + +const { + validateObject, +} = require('./internal/validators'); + +// These are internal utilities to make the parsing logic easier to read, and +// add lots of detail for the curious. They are in a separate file to allow +// unit testing, although that is not essential (this could be rolled into +// main file and just tested implicitly via API). +// +// These routines are for internal use, not for export to client. + +/** + * Return the named property, but only if it is an own property. + */ +function objectGetOwn(obj, prop) { + if (ObjectHasOwn(obj, prop)) + return obj[prop]; +} + +/** + * Return the named options property, but only if it is an own property. + */ +function optionsGetOwn(options, longOption, prop) { + if (ObjectHasOwn(options, longOption)) + return objectGetOwn(options[longOption], prop); +} + +/** + * Determines if the argument may be used as an option value. + * @example + * isOptionValue('V') // returns true + * isOptionValue('-v') // returns true (greedy) + * isOptionValue('--foo') // returns true (greedy) + * isOptionValue(undefined) // returns false + */ +function isOptionValue(value) { + if (value == null) return false; + + // Open Group Utility Conventions are that an option-argument + // is the argument after the option, and may start with a dash. + return true; // greedy! +} + +/** + * Detect whether there is possible confusion and user may have omitted + * the option argument, like `--port --verbose` when `port` of type:string. + * In strict mode we throw errors if value is option-like. + */ +function isOptionLikeValue(value) { + if (value == null) return false; + + return value.length > 1 && StringPrototypeCharAt(value, 0) === '-'; +} + +/** + * Determines if `arg` is just a short option. + * @example '-f' + */ +function isLoneShortOption(arg) { + return arg.length === 2 && + StringPrototypeCharAt(arg, 0) === '-' && + StringPrototypeCharAt(arg, 1) !== '-'; +} + +/** + * Determines if `arg` is a lone long option. + * @example + * isLoneLongOption('a') // returns false + * isLoneLongOption('-a') // returns false + * isLoneLongOption('--foo') // returns true + * isLoneLongOption('--foo=bar') // returns false + */ +function isLoneLongOption(arg) { + return arg.length > 2 && + StringPrototypeStartsWith(arg, '--') && + !StringPrototypeIncludes(arg, '=', 3); +} + +/** + * Determines if `arg` is a long option and value in the same argument. + * @example + * isLongOptionAndValue('--foo') // returns false + * isLongOptionAndValue('--foo=bar') // returns true + */ +function isLongOptionAndValue(arg) { + return arg.length > 2 && + StringPrototypeStartsWith(arg, '--') && + StringPrototypeIncludes(arg, '=', 3); +} + +/** + * Determines if `arg` is a short option group. + * + * See Guideline 5 of the [Open Group Utility Conventions](https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap12.html). + * One or more options without option-arguments, followed by at most one + * option that takes an option-argument, should be accepted when grouped + * behind one '-' delimiter. + * @example + * isShortOptionGroup('-a', {}) // returns false + * isShortOptionGroup('-ab', {}) // returns true + * // -fb is an option and a value, not a short option group + * isShortOptionGroup('-fb', { + * options: { f: { type: 'string' } } + * }) // returns false + * isShortOptionGroup('-bf', { + * options: { f: { type: 'string' } } + * }) // returns true + * // -bfb is an edge case, return true and caller sorts it out + * isShortOptionGroup('-bfb', { + * options: { f: { type: 'string' } } + * }) // returns true + */ +function isShortOptionGroup(arg, options) { + if (arg.length <= 2) return false; + if (StringPrototypeCharAt(arg, 0) !== '-') return false; + if (StringPrototypeCharAt(arg, 1) === '-') return false; + + const firstShort = StringPrototypeCharAt(arg, 1); + const longOption = findLongOptionForShort(firstShort, options); + return optionsGetOwn(options, longOption, 'type') !== 'string'; +} + +/** + * Determine if arg is a short string option followed by its value. + * @example + * isShortOptionAndValue('-a', {}); // returns false + * isShortOptionAndValue('-ab', {}); // returns false + * isShortOptionAndValue('-fFILE', { + * options: { foo: { short: 'f', type: 'string' }} + * }) // returns true + */ +function isShortOptionAndValue(arg, options) { + validateObject(options, 'options'); + + if (arg.length <= 2) return false; + if (StringPrototypeCharAt(arg, 0) !== '-') return false; + if (StringPrototypeCharAt(arg, 1) === '-') return false; + + const shortOption = StringPrototypeCharAt(arg, 1); + const longOption = findLongOptionForShort(shortOption, options); + return optionsGetOwn(options, longOption, 'type') === 'string'; +} + +/** + * Find the long option associated with a short option. Looks for a configured + * `short` and returns the short option itself if a long option is not found. + * @example + * findLongOptionForShort('a', {}) // returns 'a' + * findLongOptionForShort('b', { + * options: { bar: { short: 'b' } } + * }) // returns 'bar' + */ +function findLongOptionForShort(shortOption, options) { + validateObject(options, 'options'); + const longOptionEntry = ArrayPrototypeFind( + ObjectEntries(options), + ({ 1: optionConfig }) => objectGetOwn(optionConfig, 'short') === shortOption + ); + return longOptionEntry?.[0] ?? shortOption; +} + +/** + * Check if the given option includes a default value + * and that option has not been set by the input args. + * + * @param {string} longOption - long option name e.g. 'foo' + * @param {object} optionConfig - the option configuration properties + * @param {object} values - option values returned in `values` by parseArgs + */ +function useDefaultValueOption(longOption, optionConfig, values) { + return objectGetOwn(optionConfig, 'default') !== undefined && + values[longOption] === undefined; +} + +module.exports = { + findLongOptionForShort, + isLoneLongOption, + isLoneShortOption, + isLongOptionAndValue, + isOptionValue, + isOptionLikeValue, + isShortOptionAndValue, + isShortOptionGroup, + useDefaultValueOption, + objectGetOwn, + optionsGetOwn, +}; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/bundle/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/bundle/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..e9e7c1679a09dfcb0793682d99f5129e206a8abd --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/bundle/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 The Sigstore Authors + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/bundle/dist/build.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/bundle/dist/build.js new file mode 100644 index 0000000000000000000000000000000000000000..ade736407554c6ffb9bf963998aed704e9b793fa --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/bundle/dist/build.js @@ -0,0 +1,100 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.toMessageSignatureBundle = toMessageSignatureBundle; +exports.toDSSEBundle = toDSSEBundle; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const protobuf_specs_1 = require("@sigstore/protobuf-specs"); +const bundle_1 = require("./bundle"); +// Message signature bundle - $case: 'messageSignature' +function toMessageSignatureBundle(options) { + return { + mediaType: options.certificateChain + ? bundle_1.BUNDLE_V02_MEDIA_TYPE + : bundle_1.BUNDLE_V03_MEDIA_TYPE, + content: { + $case: 'messageSignature', + messageSignature: { + messageDigest: { + algorithm: protobuf_specs_1.HashAlgorithm.SHA2_256, + digest: options.digest, + }, + signature: options.signature, + }, + }, + verificationMaterial: toVerificationMaterial(options), + }; +} +// DSSE envelope bundle - $case: 'dsseEnvelope' +function toDSSEBundle(options) { + return { + mediaType: options.certificateChain + ? bundle_1.BUNDLE_V02_MEDIA_TYPE + : bundle_1.BUNDLE_V03_MEDIA_TYPE, + content: { + $case: 'dsseEnvelope', + dsseEnvelope: toEnvelope(options), + }, + verificationMaterial: toVerificationMaterial(options), + }; +} +function toEnvelope(options) { + return { + payloadType: options.artifactType, + payload: options.artifact, + signatures: [toSignature(options)], + }; +} +function toSignature(options) { + return { + keyid: options.keyHint || '', + sig: options.signature, + }; +} +// Verification material +function toVerificationMaterial(options) { + return { + content: toKeyContent(options), + tlogEntries: [], + timestampVerificationData: { rfc3161Timestamps: [] }, + }; +} +function toKeyContent(options) { + if (options.certificate) { + if (options.certificateChain) { + return { + $case: 'x509CertificateChain', + x509CertificateChain: { + certificates: [{ rawBytes: options.certificate }], + }, + }; + } + else { + return { + $case: 'certificate', + certificate: { rawBytes: options.certificate }, + }; + } + } + else { + return { + $case: 'publicKey', + publicKey: { + hint: options.keyHint || '', + }, + }; + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/bundle/dist/bundle.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/bundle/dist/bundle.js new file mode 100644 index 0000000000000000000000000000000000000000..eb67a0ddc17bbbb8a22d1e2a125a6d9392c47042 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/bundle/dist/bundle.js @@ -0,0 +1,24 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.BUNDLE_V03_MEDIA_TYPE = exports.BUNDLE_V03_LEGACY_MEDIA_TYPE = exports.BUNDLE_V02_MEDIA_TYPE = exports.BUNDLE_V01_MEDIA_TYPE = void 0; +exports.isBundleWithCertificateChain = isBundleWithCertificateChain; +exports.isBundleWithPublicKey = isBundleWithPublicKey; +exports.isBundleWithMessageSignature = isBundleWithMessageSignature; +exports.isBundleWithDsseEnvelope = isBundleWithDsseEnvelope; +exports.BUNDLE_V01_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.1'; +exports.BUNDLE_V02_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.2'; +exports.BUNDLE_V03_LEGACY_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.3'; +exports.BUNDLE_V03_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle.v0.3+json'; +// Type guards for bundle variants. +function isBundleWithCertificateChain(b) { + return b.verificationMaterial.content.$case === 'x509CertificateChain'; +} +function isBundleWithPublicKey(b) { + return b.verificationMaterial.content.$case === 'publicKey'; +} +function isBundleWithMessageSignature(b) { + return b.content.$case === 'messageSignature'; +} +function isBundleWithDsseEnvelope(b) { + return b.content.$case === 'dsseEnvelope'; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/bundle/dist/error.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/bundle/dist/error.js new file mode 100644 index 0000000000000000000000000000000000000000..f84295323b812ef3f33a18748e3a7cc5d3cfe443 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/bundle/dist/error.js @@ -0,0 +1,25 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ValidationError = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +class ValidationError extends Error { + constructor(message, fields) { + super(message); + this.fields = fields; + } +} +exports.ValidationError = ValidationError; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/bundle/dist/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/bundle/dist/index.js new file mode 100644 index 0000000000000000000000000000000000000000..1b012acad4d85b166ff1460c3d877e70fee08f34 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/bundle/dist/index.js @@ -0,0 +1,43 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isBundleV01 = exports.assertBundleV02 = exports.assertBundleV01 = exports.assertBundleLatest = exports.assertBundle = exports.envelopeToJSON = exports.envelopeFromJSON = exports.bundleToJSON = exports.bundleFromJSON = exports.ValidationError = exports.isBundleWithPublicKey = exports.isBundleWithMessageSignature = exports.isBundleWithDsseEnvelope = exports.isBundleWithCertificateChain = exports.BUNDLE_V03_MEDIA_TYPE = exports.BUNDLE_V03_LEGACY_MEDIA_TYPE = exports.BUNDLE_V02_MEDIA_TYPE = exports.BUNDLE_V01_MEDIA_TYPE = exports.toMessageSignatureBundle = exports.toDSSEBundle = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +var build_1 = require("./build"); +Object.defineProperty(exports, "toDSSEBundle", { enumerable: true, get: function () { return build_1.toDSSEBundle; } }); +Object.defineProperty(exports, "toMessageSignatureBundle", { enumerable: true, get: function () { return build_1.toMessageSignatureBundle; } }); +var bundle_1 = require("./bundle"); +Object.defineProperty(exports, "BUNDLE_V01_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V01_MEDIA_TYPE; } }); +Object.defineProperty(exports, "BUNDLE_V02_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V02_MEDIA_TYPE; } }); +Object.defineProperty(exports, "BUNDLE_V03_LEGACY_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V03_LEGACY_MEDIA_TYPE; } }); +Object.defineProperty(exports, "BUNDLE_V03_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V03_MEDIA_TYPE; } }); +Object.defineProperty(exports, "isBundleWithCertificateChain", { enumerable: true, get: function () { return bundle_1.isBundleWithCertificateChain; } }); +Object.defineProperty(exports, "isBundleWithDsseEnvelope", { enumerable: true, get: function () { return bundle_1.isBundleWithDsseEnvelope; } }); +Object.defineProperty(exports, "isBundleWithMessageSignature", { enumerable: true, get: function () { return bundle_1.isBundleWithMessageSignature; } }); +Object.defineProperty(exports, "isBundleWithPublicKey", { enumerable: true, get: function () { return bundle_1.isBundleWithPublicKey; } }); +var error_1 = require("./error"); +Object.defineProperty(exports, "ValidationError", { enumerable: true, get: function () { return error_1.ValidationError; } }); +var serialized_1 = require("./serialized"); +Object.defineProperty(exports, "bundleFromJSON", { enumerable: true, get: function () { return serialized_1.bundleFromJSON; } }); +Object.defineProperty(exports, "bundleToJSON", { enumerable: true, get: function () { return serialized_1.bundleToJSON; } }); +Object.defineProperty(exports, "envelopeFromJSON", { enumerable: true, get: function () { return serialized_1.envelopeFromJSON; } }); +Object.defineProperty(exports, "envelopeToJSON", { enumerable: true, get: function () { return serialized_1.envelopeToJSON; } }); +var validate_1 = require("./validate"); +Object.defineProperty(exports, "assertBundle", { enumerable: true, get: function () { return validate_1.assertBundle; } }); +Object.defineProperty(exports, "assertBundleLatest", { enumerable: true, get: function () { return validate_1.assertBundleLatest; } }); +Object.defineProperty(exports, "assertBundleV01", { enumerable: true, get: function () { return validate_1.assertBundleV01; } }); +Object.defineProperty(exports, "assertBundleV02", { enumerable: true, get: function () { return validate_1.assertBundleV02; } }); +Object.defineProperty(exports, "isBundleV01", { enumerable: true, get: function () { return validate_1.isBundleV01; } }); diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/bundle/dist/serialized.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/bundle/dist/serialized.js new file mode 100644 index 0000000000000000000000000000000000000000..be0d2a2d54d0921f3031f830e9f5d011ffbcb655 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/bundle/dist/serialized.js @@ -0,0 +1,49 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.envelopeToJSON = exports.envelopeFromJSON = exports.bundleToJSON = exports.bundleFromJSON = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const protobuf_specs_1 = require("@sigstore/protobuf-specs"); +const bundle_1 = require("./bundle"); +const validate_1 = require("./validate"); +const bundleFromJSON = (obj) => { + const bundle = protobuf_specs_1.Bundle.fromJSON(obj); + switch (bundle.mediaType) { + case bundle_1.BUNDLE_V01_MEDIA_TYPE: + (0, validate_1.assertBundleV01)(bundle); + break; + case bundle_1.BUNDLE_V02_MEDIA_TYPE: + (0, validate_1.assertBundleV02)(bundle); + break; + default: + (0, validate_1.assertBundleLatest)(bundle); + break; + } + return bundle; +}; +exports.bundleFromJSON = bundleFromJSON; +const bundleToJSON = (bundle) => { + return protobuf_specs_1.Bundle.toJSON(bundle); +}; +exports.bundleToJSON = bundleToJSON; +const envelopeFromJSON = (obj) => { + return protobuf_specs_1.Envelope.fromJSON(obj); +}; +exports.envelopeFromJSON = envelopeFromJSON; +const envelopeToJSON = (envelope) => { + return protobuf_specs_1.Envelope.toJSON(envelope); +}; +exports.envelopeToJSON = envelopeToJSON; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/bundle/dist/utility.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/bundle/dist/utility.js new file mode 100644 index 0000000000000000000000000000000000000000..c8ad2e549bdc6801e0d1c80b0308d4b9bd4985ce --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/bundle/dist/utility.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/bundle/dist/validate.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/bundle/dist/validate.js new file mode 100644 index 0000000000000000000000000000000000000000..21b8b5ee293ba199af03d1aef58c98d61467424f --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/bundle/dist/validate.js @@ -0,0 +1,199 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.assertBundle = assertBundle; +exports.assertBundleV01 = assertBundleV01; +exports.isBundleV01 = isBundleV01; +exports.assertBundleV02 = assertBundleV02; +exports.assertBundleLatest = assertBundleLatest; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const error_1 = require("./error"); +// Performs basic validation of a Sigstore bundle to ensure that all required +// fields are populated. This is not a complete validation of the bundle, but +// rather a check that the bundle is in a valid state to be processed by the +// rest of the code. +function assertBundle(b) { + const invalidValues = validateBundleBase(b); + if (invalidValues.length > 0) { + throw new error_1.ValidationError('invalid bundle', invalidValues); + } +} +// Asserts that the given bundle conforms to the v0.1 bundle format. +function assertBundleV01(b) { + const invalidValues = []; + invalidValues.push(...validateBundleBase(b)); + invalidValues.push(...validateInclusionPromise(b)); + if (invalidValues.length > 0) { + throw new error_1.ValidationError('invalid v0.1 bundle', invalidValues); + } +} +// Type guard to determine if Bundle is a v0.1 bundle. +function isBundleV01(b) { + try { + assertBundleV01(b); + return true; + } + catch (e) { + return false; + } +} +// Asserts that the given bundle conforms to the v0.2 bundle format. +function assertBundleV02(b) { + const invalidValues = []; + invalidValues.push(...validateBundleBase(b)); + invalidValues.push(...validateInclusionProof(b)); + if (invalidValues.length > 0) { + throw new error_1.ValidationError('invalid v0.2 bundle', invalidValues); + } +} +// Asserts that the given bundle conforms to the newest (0.3) bundle format. +function assertBundleLatest(b) { + const invalidValues = []; + invalidValues.push(...validateBundleBase(b)); + invalidValues.push(...validateInclusionProof(b)); + invalidValues.push(...validateNoCertificateChain(b)); + if (invalidValues.length > 0) { + throw new error_1.ValidationError('invalid bundle', invalidValues); + } +} +function validateBundleBase(b) { + const invalidValues = []; + // Media type validation + if (b.mediaType === undefined || + (!b.mediaType.match(/^application\/vnd\.dev\.sigstore\.bundle\+json;version=\d\.\d/) && + !b.mediaType.match(/^application\/vnd\.dev\.sigstore\.bundle\.v\d\.\d\+json/))) { + invalidValues.push('mediaType'); + } + // Content-related validation + if (b.content === undefined) { + invalidValues.push('content'); + } + else { + switch (b.content.$case) { + case 'messageSignature': + if (b.content.messageSignature.messageDigest === undefined) { + invalidValues.push('content.messageSignature.messageDigest'); + } + else { + if (b.content.messageSignature.messageDigest.digest.length === 0) { + invalidValues.push('content.messageSignature.messageDigest.digest'); + } + } + if (b.content.messageSignature.signature.length === 0) { + invalidValues.push('content.messageSignature.signature'); + } + break; + case 'dsseEnvelope': + if (b.content.dsseEnvelope.payload.length === 0) { + invalidValues.push('content.dsseEnvelope.payload'); + } + if (b.content.dsseEnvelope.signatures.length !== 1) { + invalidValues.push('content.dsseEnvelope.signatures'); + } + else { + if (b.content.dsseEnvelope.signatures[0].sig.length === 0) { + invalidValues.push('content.dsseEnvelope.signatures[0].sig'); + } + } + break; + } + } + // Verification material-related validation + if (b.verificationMaterial === undefined) { + invalidValues.push('verificationMaterial'); + } + else { + if (b.verificationMaterial.content === undefined) { + invalidValues.push('verificationMaterial.content'); + } + else { + switch (b.verificationMaterial.content.$case) { + case 'x509CertificateChain': + if (b.verificationMaterial.content.x509CertificateChain.certificates + .length === 0) { + invalidValues.push('verificationMaterial.content.x509CertificateChain.certificates'); + } + b.verificationMaterial.content.x509CertificateChain.certificates.forEach((cert, i) => { + if (cert.rawBytes.length === 0) { + invalidValues.push(`verificationMaterial.content.x509CertificateChain.certificates[${i}].rawBytes`); + } + }); + break; + case 'certificate': + if (b.verificationMaterial.content.certificate.rawBytes.length === 0) { + invalidValues.push('verificationMaterial.content.certificate.rawBytes'); + } + break; + } + } + if (b.verificationMaterial.tlogEntries === undefined) { + invalidValues.push('verificationMaterial.tlogEntries'); + } + else { + if (b.verificationMaterial.tlogEntries.length > 0) { + b.verificationMaterial.tlogEntries.forEach((entry, i) => { + if (entry.logId === undefined) { + invalidValues.push(`verificationMaterial.tlogEntries[${i}].logId`); + } + if (entry.kindVersion === undefined) { + invalidValues.push(`verificationMaterial.tlogEntries[${i}].kindVersion`); + } + }); + } + } + } + return invalidValues; +} +// Necessary for V01 bundles +function validateInclusionPromise(b) { + const invalidValues = []; + if (b.verificationMaterial && + b.verificationMaterial.tlogEntries?.length > 0) { + b.verificationMaterial.tlogEntries.forEach((entry, i) => { + if (entry.inclusionPromise === undefined) { + invalidValues.push(`verificationMaterial.tlogEntries[${i}].inclusionPromise`); + } + }); + } + return invalidValues; +} +// Necessary for V02 and later bundles +function validateInclusionProof(b) { + const invalidValues = []; + if (b.verificationMaterial && + b.verificationMaterial.tlogEntries?.length > 0) { + b.verificationMaterial.tlogEntries.forEach((entry, i) => { + if (entry.inclusionProof === undefined) { + invalidValues.push(`verificationMaterial.tlogEntries[${i}].inclusionProof`); + } + else { + if (entry.inclusionProof.checkpoint === undefined) { + invalidValues.push(`verificationMaterial.tlogEntries[${i}].inclusionProof.checkpoint`); + } + } + }); + } + return invalidValues; +} +// Necessary for V03 and later bundles +function validateNoCertificateChain(b) { + const invalidValues = []; + /* istanbul ignore next */ + if (b.verificationMaterial?.content?.$case === 'x509CertificateChain') { + invalidValues.push('verificationMaterial.content.$case'); + } + return invalidValues; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/bundle/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/bundle/package.json new file mode 100644 index 0000000000000000000000000000000000000000..03291b2159b79077ff4f23a7d4c263d5176f1495 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/bundle/package.json @@ -0,0 +1,35 @@ +{ + "name": "@sigstore/bundle", + "version": "4.0.0", + "description": "Sigstore bundle type", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "clean": "shx rm -rf dist *.tsbuildinfo", + "build": "tsc --build", + "test": "jest" + }, + "files": [ + "dist", + "store" + ], + "author": "bdehamer@github.com", + "license": "Apache-2.0", + "repository": { + "type": "git", + "url": "git+https://github.com/sigstore/sigstore-js.git" + }, + "bugs": { + "url": "https://github.com/sigstore/sigstore-js/issues" + }, + "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/bundle#readme", + "publishConfig": { + "provenance": true + }, + "dependencies": { + "@sigstore/protobuf-specs": "^0.5.0" + }, + "engines": { + "node": "^20.17.0 || >=22.9.0" + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..e9e7c1679a09dfcb0793682d99f5129e206a8abd --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 The Sigstore Authors + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/asn1/error.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/asn1/error.js new file mode 100644 index 0000000000000000000000000000000000000000..17d93b0f7e706425681ea4245dd9c46d318b9dcf --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/asn1/error.js @@ -0,0 +1,24 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ASN1TypeError = exports.ASN1ParseError = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +class ASN1ParseError extends Error { +} +exports.ASN1ParseError = ASN1ParseError; +class ASN1TypeError extends Error { +} +exports.ASN1TypeError = ASN1TypeError; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/asn1/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/asn1/index.js new file mode 100644 index 0000000000000000000000000000000000000000..348b2ea4022e5ef9f26758b3435da1d612a489ee --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/asn1/index.js @@ -0,0 +1,20 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ASN1Obj = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +var obj_1 = require("./obj"); +Object.defineProperty(exports, "ASN1Obj", { enumerable: true, get: function () { return obj_1.ASN1Obj; } }); diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/asn1/length.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/asn1/length.js new file mode 100644 index 0000000000000000000000000000000000000000..cb7ebf09dbefa49e9aca6aaa868ecc63fcdb5d6b --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/asn1/length.js @@ -0,0 +1,62 @@ +"use strict"; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.decodeLength = decodeLength; +exports.encodeLength = encodeLength; +const error_1 = require("./error"); +// Decodes the length of a DER-encoded ANS.1 element from the supplied stream. +// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-encoded-length-and-value-bytes +function decodeLength(stream) { + const buf = stream.getUint8(); + // If the most significant bit is UNSET the length is just the value of the + // byte. + if ((buf & 0x80) === 0x00) { + return buf; + } + // Otherwise, the lower 7 bits of the first byte indicate the number of bytes + // that follow to encode the length. + const byteCount = buf & 0x7f; + // Ensure the encoded length can safely fit in a JS number. + if (byteCount > 6) { + throw new error_1.ASN1ParseError('length exceeds 6 byte limit'); + } + // Iterate over the bytes that encode the length. + let len = 0; + for (let i = 0; i < byteCount; i++) { + len = len * 256 + stream.getUint8(); + } + // This is a valid ASN.1 length encoding, but we don't support it. + if (len === 0) { + throw new error_1.ASN1ParseError('indefinite length encoding not supported'); + } + return len; +} +// Translates the supplied value to a DER-encoded length. +function encodeLength(len) { + if (len < 128) { + return Buffer.from([len]); + } + // Bitwise operations on large numbers are not supported in JS, so we need to + // use BigInts. + let val = BigInt(len); + const bytes = []; + while (val > 0n) { + bytes.unshift(Number(val & 255n)); + val = val >> 8n; + } + return Buffer.from([0x80 | bytes.length, ...bytes]); +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/asn1/obj.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/asn1/obj.js new file mode 100644 index 0000000000000000000000000000000000000000..5f9ac9cdbc49360bfa8576e65ad46c8709590313 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/asn1/obj.js @@ -0,0 +1,152 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ASN1Obj = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const stream_1 = require("../stream"); +const error_1 = require("./error"); +const length_1 = require("./length"); +const parse_1 = require("./parse"); +const tag_1 = require("./tag"); +class ASN1Obj { + constructor(tag, value, subs) { + this.tag = tag; + this.value = value; + this.subs = subs; + } + // Constructs an ASN.1 object from a Buffer of DER-encoded bytes. + static parseBuffer(buf) { + return parseStream(new stream_1.ByteStream(buf)); + } + toDER() { + const valueStream = new stream_1.ByteStream(); + if (this.subs.length > 0) { + for (const sub of this.subs) { + valueStream.appendView(sub.toDER()); + } + } + else { + valueStream.appendView(this.value); + } + const value = valueStream.buffer; + // Concat tag/length/value + const obj = new stream_1.ByteStream(); + obj.appendChar(this.tag.toDER()); + obj.appendView((0, length_1.encodeLength)(value.length)); + obj.appendView(value); + return obj.buffer; + } + ///////////////////////////////////////////////////////////////////////////// + // Convenience methods for parsing ASN.1 primitives into JS types + // Returns the ASN.1 object's value as a boolean. Throws an error if the + // object is not a boolean. + toBoolean() { + if (!this.tag.isBoolean()) { + throw new error_1.ASN1TypeError('not a boolean'); + } + return (0, parse_1.parseBoolean)(this.value); + } + // Returns the ASN.1 object's value as a BigInt. Throws an error if the + // object is not an integer. + toInteger() { + if (!this.tag.isInteger()) { + throw new error_1.ASN1TypeError('not an integer'); + } + return (0, parse_1.parseInteger)(this.value); + } + // Returns the ASN.1 object's value as an OID string. Throws an error if the + // object is not an OID. + toOID() { + if (!this.tag.isOID()) { + throw new error_1.ASN1TypeError('not an OID'); + } + return (0, parse_1.parseOID)(this.value); + } + // Returns the ASN.1 object's value as a Date. Throws an error if the object + // is not either a UTCTime or a GeneralizedTime. + toDate() { + switch (true) { + case this.tag.isUTCTime(): + return (0, parse_1.parseTime)(this.value, true); + case this.tag.isGeneralizedTime(): + return (0, parse_1.parseTime)(this.value, false); + default: + throw new error_1.ASN1TypeError('not a date'); + } + } + // Returns the ASN.1 object's value as a number[] where each number is the + // value of a bit in the bit string. Throws an error if the object is not a + // bit string. + toBitString() { + if (!this.tag.isBitString()) { + throw new error_1.ASN1TypeError('not a bit string'); + } + return (0, parse_1.parseBitString)(this.value); + } +} +exports.ASN1Obj = ASN1Obj; +///////////////////////////////////////////////////////////////////////////// +// Internal stream parsing functions +function parseStream(stream) { + // Parse tag, length, and value from stream + const tag = new tag_1.ASN1Tag(stream.getUint8()); + const len = (0, length_1.decodeLength)(stream); + const value = stream.slice(stream.position, len); + const start = stream.position; + let subs = []; + // If the object is constructed, parse its children. Sometimes, children + // are embedded in OCTESTRING objects, so we need to check those + // for children as well. + if (tag.constructed) { + subs = collectSubs(stream, len); + } + else if (tag.isOctetString()) { + // Attempt to parse children of OCTETSTRING objects. If anything fails, + // assume the object is not constructed and treat as primitive. + try { + subs = collectSubs(stream, len); + } + catch (e) { + // Fail silently and treat as primitive + } + } + // If there are no children, move stream cursor to the end of the object + if (subs.length === 0) { + stream.seek(start + len); + } + return new ASN1Obj(tag, value, subs); +} +function collectSubs(stream, len) { + // Calculate end of object content + const end = stream.position + len; + // Make sure there are enough bytes left in the stream. This should never + // happen, cause it'll get caught when the stream is sliced in parseStream. + // Leaving as an extra check just in case. + /* istanbul ignore if */ + if (end > stream.length) { + throw new error_1.ASN1ParseError('invalid length'); + } + // Parse all children + const subs = []; + while (stream.position < end) { + subs.push(parseStream(stream)); + } + // When we're done parsing children, we should be at the end of the object + if (stream.position !== end) { + throw new error_1.ASN1ParseError('invalid length'); + } + return subs; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/asn1/parse.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/asn1/parse.js new file mode 100644 index 0000000000000000000000000000000000000000..7fbb42632c60e833f4e95ce6395c33e3be7e66c6 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/asn1/parse.js @@ -0,0 +1,124 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseInteger = parseInteger; +exports.parseStringASCII = parseStringASCII; +exports.parseTime = parseTime; +exports.parseOID = parseOID; +exports.parseBoolean = parseBoolean; +exports.parseBitString = parseBitString; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const RE_TIME_SHORT_YEAR = /^(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})(\.\d{3})?Z$/; +const RE_TIME_LONG_YEAR = /^(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})(\.\d{3})?Z$/; +// Parse a BigInt from the DER-encoded buffer +// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-integer +function parseInteger(buf) { + let pos = 0; + const end = buf.length; + let val = buf[pos]; + const neg = val > 0x7f; + // Consume any padding bytes + const pad = neg ? 0xff : 0x00; + while (val == pad && ++pos < end) { + val = buf[pos]; + } + // Calculate remaining bytes to read + const len = end - pos; + if (len === 0) + return BigInt(neg ? -1 : 0); + // Handle two's complement for negative numbers + val = neg ? val - 256 : val; + // Parse remaining bytes + let n = BigInt(val); + for (let i = pos + 1; i < end; ++i) { + n = n * BigInt(256) + BigInt(buf[i]); + } + return n; +} +// Parse an ASCII string from the DER-encoded buffer +// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-basic-types#boolean +function parseStringASCII(buf) { + return buf.toString('ascii'); +} +// Parse a Date from the DER-encoded buffer +// https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.5.1 +function parseTime(buf, shortYear) { + const timeStr = parseStringASCII(buf); + // Parse the time string into matches - captured groups start at index 1 + const m = shortYear + ? RE_TIME_SHORT_YEAR.exec(timeStr) + : RE_TIME_LONG_YEAR.exec(timeStr); + if (!m) { + throw new Error('invalid time'); + } + // Translate dates with a 2-digit year to 4 digits per the spec + if (shortYear) { + let year = Number(m[1]); + year += year >= 50 ? 1900 : 2000; + m[1] = year.toString(); + } + // Translate to ISO8601 format and parse + return new Date(`${m[1]}-${m[2]}-${m[3]}T${m[4]}:${m[5]}:${m[6]}Z`); +} +// Parse an OID from the DER-encoded buffer +// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-object-identifier +function parseOID(buf) { + let pos = 0; + const end = buf.length; + // Consume first byte which encodes the first two OID components + let n = buf[pos++]; + const first = Math.floor(n / 40); + const second = n % 40; + let oid = `${first}.${second}`; + // Consume remaining bytes + let val = 0; + for (; pos < end; ++pos) { + n = buf[pos]; + val = (val << 7) + (n & 0x7f); + // If the left-most bit is NOT set, then this is the last byte in the + // sequence and we can add the value to the OID and reset the accumulator + if ((n & 0x80) === 0) { + oid += `.${val}`; + val = 0; + } + } + return oid; +} +// Parse a boolean from the DER-encoded buffer +// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-basic-types#boolean +function parseBoolean(buf) { + return buf[0] !== 0; +} +// Parse a bit string from the DER-encoded buffer +// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-bit-string +function parseBitString(buf) { + // First byte tell us how many unused bits are in the last byte + const unused = buf[0]; + const start = 1; + const end = buf.length; + const bits = []; + for (let i = start; i < end; ++i) { + const byte = buf[i]; + // The skip value is only used for the last byte + const skip = i === end - 1 ? unused : 0; + // Iterate over each bit in the byte (most significant first) + for (let j = 7; j >= skip; --j) { + // Read the bit and add it to the bit string + bits.push((byte >> j) & 0x01); + } + } + return bits; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/asn1/tag.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/asn1/tag.js new file mode 100644 index 0000000000000000000000000000000000000000..84dd938d049aa580717f76114a64bfc798c4c86d --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/asn1/tag.js @@ -0,0 +1,86 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ASN1Tag = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const error_1 = require("./error"); +const UNIVERSAL_TAG = { + BOOLEAN: 0x01, + INTEGER: 0x02, + BIT_STRING: 0x03, + OCTET_STRING: 0x04, + OBJECT_IDENTIFIER: 0x06, + SEQUENCE: 0x10, + SET: 0x11, + PRINTABLE_STRING: 0x13, + UTC_TIME: 0x17, + GENERALIZED_TIME: 0x18, +}; +const TAG_CLASS = { + UNIVERSAL: 0x00, + APPLICATION: 0x01, + CONTEXT_SPECIFIC: 0x02, + PRIVATE: 0x03, +}; +// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-encoded-tag-bytes +class ASN1Tag { + constructor(enc) { + // Bits 0 through 4 are the tag number + this.number = enc & 0x1f; + // Bit 5 is the constructed bit + this.constructed = (enc & 0x20) === 0x20; + // Bit 6 & 7 are the class + this.class = enc >> 6; + if (this.number === 0x1f) { + throw new error_1.ASN1ParseError('long form tags not supported'); + } + if (this.class === TAG_CLASS.UNIVERSAL && this.number === 0x00) { + throw new error_1.ASN1ParseError('unsupported tag 0x00'); + } + } + isUniversal() { + return this.class === TAG_CLASS.UNIVERSAL; + } + isContextSpecific(num) { + const res = this.class === TAG_CLASS.CONTEXT_SPECIFIC; + return num !== undefined ? res && this.number === num : res; + } + isBoolean() { + return this.isUniversal() && this.number === UNIVERSAL_TAG.BOOLEAN; + } + isInteger() { + return this.isUniversal() && this.number === UNIVERSAL_TAG.INTEGER; + } + isBitString() { + return this.isUniversal() && this.number === UNIVERSAL_TAG.BIT_STRING; + } + isOctetString() { + return this.isUniversal() && this.number === UNIVERSAL_TAG.OCTET_STRING; + } + isOID() { + return (this.isUniversal() && this.number === UNIVERSAL_TAG.OBJECT_IDENTIFIER); + } + isUTCTime() { + return this.isUniversal() && this.number === UNIVERSAL_TAG.UTC_TIME; + } + isGeneralizedTime() { + return this.isUniversal() && this.number === UNIVERSAL_TAG.GENERALIZED_TIME; + } + toDER() { + return this.number | (this.constructed ? 0x20 : 0x00) | (this.class << 6); + } +} +exports.ASN1Tag = ASN1Tag; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/crypto.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/crypto.js new file mode 100644 index 0000000000000000000000000000000000000000..296b5ba43e86a0dc5c62b36ee34203f2fe26a013 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/crypto.js @@ -0,0 +1,60 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createPublicKey = createPublicKey; +exports.digest = digest; +exports.verify = verify; +exports.bufferEqual = bufferEqual; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const crypto_1 = __importDefault(require("crypto")); +function createPublicKey(key, type = 'spki') { + if (typeof key === 'string') { + return crypto_1.default.createPublicKey(key); + } + else { + return crypto_1.default.createPublicKey({ key, format: 'der', type: type }); + } +} +function digest(algorithm, ...data) { + const hash = crypto_1.default.createHash(algorithm); + for (const d of data) { + hash.update(d); + } + return hash.digest(); +} +function verify(data, key, signature, algorithm) { + // The try/catch is to work around an issue in Node 14.x where verify throws + // an error in some scenarios if the signature is invalid. + try { + return crypto_1.default.verify(algorithm, data, key, signature); + } + catch (e) { + /* istanbul ignore next */ + return false; + } +} +function bufferEqual(a, b) { + try { + return crypto_1.default.timingSafeEqual(a, b); + } + catch { + /* istanbul ignore next */ + return false; + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/dsse.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/dsse.js new file mode 100644 index 0000000000000000000000000000000000000000..ca7b63630e2ba90d5e86dd763e8624627ce54f32 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/dsse.js @@ -0,0 +1,30 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.preAuthEncoding = preAuthEncoding; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const PAE_PREFIX = 'DSSEv1'; +// DSSE Pre-Authentication Encoding +function preAuthEncoding(payloadType, payload) { + const prefix = [ + PAE_PREFIX, + payloadType.length, + payloadType, + payload.length, + '', + ].join(' '); + return Buffer.concat([Buffer.from(prefix, 'ascii'), payload]); +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/encoding.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/encoding.js new file mode 100644 index 0000000000000000000000000000000000000000..7113af66db4c2db22612289e8194572ced1d2d10 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/encoding.js @@ -0,0 +1,27 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.base64Encode = base64Encode; +exports.base64Decode = base64Decode; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const BASE64_ENCODING = 'base64'; +const UTF8_ENCODING = 'utf-8'; +function base64Encode(str) { + return Buffer.from(str, UTF8_ENCODING).toString(BASE64_ENCODING); +} +function base64Decode(str) { + return Buffer.from(str, BASE64_ENCODING).toString(UTF8_ENCODING); +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/index.js new file mode 100644 index 0000000000000000000000000000000000000000..49859d84db7561836b05853859e49724dc17b749 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/index.js @@ -0,0 +1,66 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +Object.defineProperty(exports, "__esModule", { value: true }); +exports.X509SCTExtension = exports.X509Certificate = exports.EXTENSION_OID_SCT = exports.ByteStream = exports.RFC3161Timestamp = exports.pem = exports.json = exports.encoding = exports.dsse = exports.crypto = exports.ASN1Obj = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +var asn1_1 = require("./asn1"); +Object.defineProperty(exports, "ASN1Obj", { enumerable: true, get: function () { return asn1_1.ASN1Obj; } }); +exports.crypto = __importStar(require("./crypto")); +exports.dsse = __importStar(require("./dsse")); +exports.encoding = __importStar(require("./encoding")); +exports.json = __importStar(require("./json")); +exports.pem = __importStar(require("./pem")); +var rfc3161_1 = require("./rfc3161"); +Object.defineProperty(exports, "RFC3161Timestamp", { enumerable: true, get: function () { return rfc3161_1.RFC3161Timestamp; } }); +var stream_1 = require("./stream"); +Object.defineProperty(exports, "ByteStream", { enumerable: true, get: function () { return stream_1.ByteStream; } }); +var x509_1 = require("./x509"); +Object.defineProperty(exports, "EXTENSION_OID_SCT", { enumerable: true, get: function () { return x509_1.EXTENSION_OID_SCT; } }); +Object.defineProperty(exports, "X509Certificate", { enumerable: true, get: function () { return x509_1.X509Certificate; } }); +Object.defineProperty(exports, "X509SCTExtension", { enumerable: true, get: function () { return x509_1.X509SCTExtension; } }); diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/json.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/json.js new file mode 100644 index 0000000000000000000000000000000000000000..7808d033b98cc990c64305dc827f93527b6420ca --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/json.js @@ -0,0 +1,60 @@ +"use strict"; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.canonicalize = canonicalize; +// JSON canonicalization per https://github.com/cyberphone/json-canonicalization +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function canonicalize(object) { + let buffer = ''; + if (object === null || typeof object !== 'object' || object.toJSON != null) { + // Primitives or toJSONable objects + buffer += JSON.stringify(object); + } + else if (Array.isArray(object)) { + // Array - maintain element order + buffer += '['; + let first = true; + object.forEach((element) => { + if (!first) { + buffer += ','; + } + first = false; + // recursive call + buffer += canonicalize(element); + }); + buffer += ']'; + } + else { + // Object - Sort properties before serializing + buffer += '{'; + let first = true; + Object.keys(object) + .sort() + .forEach((property) => { + if (!first) { + buffer += ','; + } + first = false; + buffer += JSON.stringify(property); + buffer += ':'; + // recursive call + buffer += canonicalize(object[property]); + }); + buffer += '}'; + } + return buffer; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/oid.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/oid.js new file mode 100644 index 0000000000000000000000000000000000000000..ac7a643067ad023c7981614fc01b143aef5399e0 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/oid.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SHA2_HASH_ALGOS = exports.ECDSA_SIGNATURE_ALGOS = void 0; +exports.ECDSA_SIGNATURE_ALGOS = { + '1.2.840.10045.4.3.1': 'sha224', + '1.2.840.10045.4.3.2': 'sha256', + '1.2.840.10045.4.3.3': 'sha384', + '1.2.840.10045.4.3.4': 'sha512', +}; +exports.SHA2_HASH_ALGOS = { + '2.16.840.1.101.3.4.2.1': 'sha256', + '2.16.840.1.101.3.4.2.2': 'sha384', + '2.16.840.1.101.3.4.2.3': 'sha512', +}; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/pem.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/pem.js new file mode 100644 index 0000000000000000000000000000000000000000..f1241d28d586ec3e71d87a86d4d86be0c908df0d --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/pem.js @@ -0,0 +1,43 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.toDER = toDER; +exports.fromDER = fromDER; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const PEM_HEADER = /-----BEGIN (.*)-----/; +const PEM_FOOTER = /-----END (.*)-----/; +function toDER(certificate) { + let der = ''; + certificate.split('\n').forEach((line) => { + if (line.match(PEM_HEADER) || line.match(PEM_FOOTER)) { + return; + } + der += line; + }); + return Buffer.from(der, 'base64'); +} +// Translates a DER-encoded buffer into a PEM-encoded string. Standard PEM +// encoding dictates that each certificate should have a trailing newline after +// the footer. +function fromDER(certificate, type = 'CERTIFICATE') { + // Base64-encode the certificate. + const der = certificate.toString('base64'); + // Split the certificate into lines of 64 characters. + const lines = der.match(/.{1,64}/g) || ''; + return [`-----BEGIN ${type}-----`, ...lines, `-----END ${type}-----`] + .join('\n') + .concat('\n'); +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/rfc3161/error.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/rfc3161/error.js new file mode 100644 index 0000000000000000000000000000000000000000..b9b549b0bb32355c02ac4c01ac64a5b64eec4568 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/rfc3161/error.js @@ -0,0 +1,21 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.RFC3161TimestampVerificationError = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +class RFC3161TimestampVerificationError extends Error { +} +exports.RFC3161TimestampVerificationError = RFC3161TimestampVerificationError; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/rfc3161/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/rfc3161/index.js new file mode 100644 index 0000000000000000000000000000000000000000..b77ecf1c7d50c29b4f2d22cc3ab7c3ad33145b08 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/rfc3161/index.js @@ -0,0 +1,20 @@ +"use strict"; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.RFC3161Timestamp = void 0; +var timestamp_1 = require("./timestamp"); +Object.defineProperty(exports, "RFC3161Timestamp", { enumerable: true, get: function () { return timestamp_1.RFC3161Timestamp; } }); diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/rfc3161/timestamp.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/rfc3161/timestamp.js new file mode 100644 index 0000000000000000000000000000000000000000..982fb5e6126e816c250a5dab5cb13f0ff1a2a8bc --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/rfc3161/timestamp.js @@ -0,0 +1,211 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +Object.defineProperty(exports, "__esModule", { value: true }); +exports.RFC3161Timestamp = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const asn1_1 = require("../asn1"); +const crypto = __importStar(require("../crypto")); +const oid_1 = require("../oid"); +const error_1 = require("./error"); +const tstinfo_1 = require("./tstinfo"); +const OID_PKCS9_CONTENT_TYPE_SIGNED_DATA = '1.2.840.113549.1.7.2'; +const OID_PKCS9_CONTENT_TYPE_TSTINFO = '1.2.840.113549.1.9.16.1.4'; +const OID_PKCS9_MESSAGE_DIGEST_KEY = '1.2.840.113549.1.9.4'; +class RFC3161Timestamp { + constructor(asn1) { + this.root = asn1; + } + static parse(der) { + const asn1 = asn1_1.ASN1Obj.parseBuffer(der); + return new RFC3161Timestamp(asn1); + } + get status() { + return this.pkiStatusInfoObj.subs[0].toInteger(); + } + get contentType() { + return this.contentTypeObj.toOID(); + } + get eContentType() { + return this.eContentTypeObj.toOID(); + } + get signingTime() { + return this.tstInfo.genTime; + } + get signerIssuer() { + return this.signerSidObj.subs[0].value; + } + get signerSerialNumber() { + return this.signerSidObj.subs[1].value; + } + get signerDigestAlgorithm() { + const oid = this.signerDigestAlgorithmObj.subs[0].toOID(); + return oid_1.SHA2_HASH_ALGOS[oid]; + } + get signatureAlgorithm() { + const oid = this.signatureAlgorithmObj.subs[0].toOID(); + return oid_1.ECDSA_SIGNATURE_ALGOS[oid]; + } + get signatureValue() { + return this.signatureValueObj.value; + } + get tstInfo() { + // Need to unpack tstInfo from an OCTET STRING + return new tstinfo_1.TSTInfo(this.eContentObj.subs[0].subs[0]); + } + verify(data, publicKey) { + if (!this.timeStampTokenObj) { + throw new error_1.RFC3161TimestampVerificationError('timeStampToken is missing'); + } + // Check for expected ContentInfo content type + if (this.contentType !== OID_PKCS9_CONTENT_TYPE_SIGNED_DATA) { + throw new error_1.RFC3161TimestampVerificationError(`incorrect content type: ${this.contentType}`); + } + // Check for expected encapsulated content type + if (this.eContentType !== OID_PKCS9_CONTENT_TYPE_TSTINFO) { + throw new error_1.RFC3161TimestampVerificationError(`incorrect encapsulated content type: ${this.eContentType}`); + } + // Check that the tstInfo references the correct artifact + this.tstInfo.verify(data); + // Check that the signed message digest matches the tstInfo + this.verifyMessageDigest(); + // Check that the signature is valid for the signed attributes + this.verifySignature(publicKey); + } + verifyMessageDigest() { + // Check that the tstInfo matches the signed data + const tstInfoDigest = crypto.digest(this.signerDigestAlgorithm, this.tstInfo.raw); + const expectedDigest = this.messageDigestAttributeObj.subs[1].subs[0].value; + if (!crypto.bufferEqual(tstInfoDigest, expectedDigest)) { + throw new error_1.RFC3161TimestampVerificationError('signed data does not match tstInfo'); + } + } + verifySignature(key) { + // Encode the signed attributes for verification + const signedAttrs = this.signedAttrsObj.toDER(); + signedAttrs[0] = 0x31; // Change context-specific tag to SET + // Check that the signature is valid for the signed attributes + const verified = crypto.verify(signedAttrs, key, this.signatureValue, this.signatureAlgorithm); + if (!verified) { + throw new error_1.RFC3161TimestampVerificationError('signature verification failed'); + } + } + // https://www.rfc-editor.org/rfc/rfc3161#section-2.4.2 + get pkiStatusInfoObj() { + // pkiStatusInfo is the first element of the timestamp response sequence + return this.root.subs[0]; + } + // https://www.rfc-editor.org/rfc/rfc3161#section-2.4.2 + get timeStampTokenObj() { + // timeStampToken is the first element of the timestamp response sequence + return this.root.subs[1]; + } + // https://datatracker.ietf.org/doc/html/rfc5652#section-3 + get contentTypeObj() { + return this.timeStampTokenObj.subs[0]; + } + // https://www.rfc-editor.org/rfc/rfc5652#section-3 + get signedDataObj() { + const obj = this.timeStampTokenObj.subs.find((sub) => sub.tag.isContextSpecific(0x00)); + return obj.subs[0]; + } + // https://datatracker.ietf.org/doc/html/rfc5652#section-5.1 + get encapContentInfoObj() { + return this.signedDataObj.subs[2]; + } + // https://datatracker.ietf.org/doc/html/rfc5652#section-5.1 + get signerInfosObj() { + // SignerInfos is the last element of the signed data sequence + const sd = this.signedDataObj; + return sd.subs[sd.subs.length - 1]; + } + // https://www.rfc-editor.org/rfc/rfc5652#section-5.1 + get signerInfoObj() { + // Only supporting one signer + return this.signerInfosObj.subs[0]; + } + // https://datatracker.ietf.org/doc/html/rfc5652#section-5.2 + get eContentTypeObj() { + return this.encapContentInfoObj.subs[0]; + } + // https://datatracker.ietf.org/doc/html/rfc5652#section-5.2 + get eContentObj() { + return this.encapContentInfoObj.subs[1]; + } + // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3 + get signedAttrsObj() { + const signedAttrs = this.signerInfoObj.subs.find((sub) => sub.tag.isContextSpecific(0x00)); + return signedAttrs; + } + // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3 + get messageDigestAttributeObj() { + const messageDigest = this.signedAttrsObj.subs.find((sub) => sub.subs[0].tag.isOID() && + sub.subs[0].toOID() === OID_PKCS9_MESSAGE_DIGEST_KEY); + return messageDigest; + } + // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3 + get signerSidObj() { + return this.signerInfoObj.subs[1]; + } + // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3 + get signerDigestAlgorithmObj() { + // Signature is the 2nd element of the signerInfoObj object + return this.signerInfoObj.subs[2]; + } + // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3 + get signatureAlgorithmObj() { + // Signature is the 4th element of the signerInfoObj object + return this.signerInfoObj.subs[4]; + } + // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3 + get signatureValueObj() { + // Signature is the 6th element of the signerInfoObj object + return this.signerInfoObj.subs[5]; + } +} +exports.RFC3161Timestamp = RFC3161Timestamp; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js new file mode 100644 index 0000000000000000000000000000000000000000..d5001c42c108f2f163074ad066a416c6e2bacc22 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js @@ -0,0 +1,71 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TSTInfo = void 0; +const crypto = __importStar(require("../crypto")); +const oid_1 = require("../oid"); +const error_1 = require("./error"); +class TSTInfo { + constructor(asn1) { + this.root = asn1; + } + get version() { + return this.root.subs[0].toInteger(); + } + get genTime() { + return this.root.subs[4].toDate(); + } + get messageImprintHashAlgorithm() { + const oid = this.messageImprintObj.subs[0].subs[0].toOID(); + return oid_1.SHA2_HASH_ALGOS[oid]; + } + get messageImprintHashedMessage() { + return this.messageImprintObj.subs[1].value; + } + get raw() { + return this.root.toDER(); + } + verify(data) { + const digest = crypto.digest(this.messageImprintHashAlgorithm, data); + if (!crypto.bufferEqual(digest, this.messageImprintHashedMessage)) { + throw new error_1.RFC3161TimestampVerificationError('message imprint does not match artifact'); + } + } + // https://www.rfc-editor.org/rfc/rfc3161#section-2.4.2 + get messageImprintObj() { + return this.root.subs[2]; + } +} +exports.TSTInfo = TSTInfo; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/stream.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/stream.js new file mode 100644 index 0000000000000000000000000000000000000000..0a24f8582eb23a15b30f563c0624506d1e253626 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/stream.js @@ -0,0 +1,115 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ByteStream = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +class StreamError extends Error { +} +class ByteStream { + constructor(buffer) { + this.start = 0; + if (buffer) { + this.buf = buffer; + this.view = Buffer.from(buffer); + } + else { + this.buf = new ArrayBuffer(0); + this.view = Buffer.from(this.buf); + } + } + get buffer() { + return this.view.subarray(0, this.start); + } + get length() { + return this.view.byteLength; + } + get position() { + return this.start; + } + seek(position) { + this.start = position; + } + // Returns a Buffer containing the specified number of bytes starting at the + // given start position. + slice(start, len) { + const end = start + len; + if (end > this.length) { + throw new StreamError('request past end of buffer'); + } + return this.view.subarray(start, end); + } + appendChar(char) { + this.ensureCapacity(1); + this.view[this.start] = char; + this.start += 1; + } + appendUint16(num) { + this.ensureCapacity(2); + const value = new Uint16Array([num]); + const view = new Uint8Array(value.buffer); + this.view[this.start] = view[1]; + this.view[this.start + 1] = view[0]; + this.start += 2; + } + appendUint24(num) { + this.ensureCapacity(3); + const value = new Uint32Array([num]); + const view = new Uint8Array(value.buffer); + this.view[this.start] = view[2]; + this.view[this.start + 1] = view[1]; + this.view[this.start + 2] = view[0]; + this.start += 3; + } + appendView(view) { + this.ensureCapacity(view.length); + this.view.set(view, this.start); + this.start += view.length; + } + getBlock(size) { + if (size <= 0) { + return Buffer.alloc(0); + } + if (this.start + size > this.view.length) { + throw new Error('request past end of buffer'); + } + const result = this.view.subarray(this.start, this.start + size); + this.start += size; + return result; + } + getUint8() { + return this.getBlock(1)[0]; + } + getUint16() { + const block = this.getBlock(2); + return (block[0] << 8) | block[1]; + } + ensureCapacity(size) { + if (this.start + size > this.view.byteLength) { + const blockSize = ByteStream.BLOCK_SIZE + (size > ByteStream.BLOCK_SIZE ? size : 0); + this.realloc(this.view.byteLength + blockSize); + } + } + realloc(size) { + const newArray = new ArrayBuffer(size); + const newView = Buffer.from(newArray); + // Copy the old buffer into the new one + newView.set(this.view); + this.buf = newArray; + this.view = newView; + } +} +exports.ByteStream = ByteStream; +ByteStream.BLOCK_SIZE = 1024; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/x509/cert.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/x509/cert.js new file mode 100644 index 0000000000000000000000000000000000000000..83aee7d1215a4c6cd1d25e015d0bd00c4120d915 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/x509/cert.js @@ -0,0 +1,241 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +Object.defineProperty(exports, "__esModule", { value: true }); +exports.X509Certificate = exports.EXTENSION_OID_SCT = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const asn1_1 = require("../asn1"); +const crypto = __importStar(require("../crypto")); +const oid_1 = require("../oid"); +const pem = __importStar(require("../pem")); +const ext_1 = require("./ext"); +const EXTENSION_OID_SUBJECT_KEY_ID = '2.5.29.14'; +const EXTENSION_OID_KEY_USAGE = '2.5.29.15'; +const EXTENSION_OID_SUBJECT_ALT_NAME = '2.5.29.17'; +const EXTENSION_OID_BASIC_CONSTRAINTS = '2.5.29.19'; +const EXTENSION_OID_AUTHORITY_KEY_ID = '2.5.29.35'; +exports.EXTENSION_OID_SCT = '1.3.6.1.4.1.11129.2.4.2'; +class X509Certificate { + constructor(asn1) { + this.root = asn1; + } + static parse(cert) { + const der = typeof cert === 'string' ? pem.toDER(cert) : cert; + const asn1 = asn1_1.ASN1Obj.parseBuffer(der); + return new X509Certificate(asn1); + } + get tbsCertificate() { + return this.tbsCertificateObj; + } + get version() { + // version number is the first element of the version context specific tag + const ver = this.versionObj.subs[0].toInteger(); + return `v${(ver + BigInt(1)).toString()}`; + } + get serialNumber() { + return this.serialNumberObj.value; + } + get notBefore() { + // notBefore is the first element of the validity sequence + return this.validityObj.subs[0].toDate(); + } + get notAfter() { + // notAfter is the second element of the validity sequence + return this.validityObj.subs[1].toDate(); + } + get issuer() { + return this.issuerObj.value; + } + get subject() { + return this.subjectObj.value; + } + get publicKey() { + return this.subjectPublicKeyInfoObj.toDER(); + } + get signatureAlgorithm() { + const oid = this.signatureAlgorithmObj.subs[0].toOID(); + return oid_1.ECDSA_SIGNATURE_ALGOS[oid]; + } + get signatureValue() { + // Signature value is a bit string, so we need to skip the first byte + return this.signatureValueObj.value.subarray(1); + } + get subjectAltName() { + const ext = this.extSubjectAltName; + return ext?.uri || /* istanbul ignore next */ ext?.rfc822Name; + } + get extensions() { + // The extension list is the first (and only) element of the extensions + // context specific tag + /* istanbul ignore next */ + const extSeq = this.extensionsObj?.subs[0]; + /* istanbul ignore next */ + return extSeq?.subs || []; + } + get extKeyUsage() { + const ext = this.findExtension(EXTENSION_OID_KEY_USAGE); + return ext ? new ext_1.X509KeyUsageExtension(ext) : undefined; + } + get extBasicConstraints() { + const ext = this.findExtension(EXTENSION_OID_BASIC_CONSTRAINTS); + return ext ? new ext_1.X509BasicConstraintsExtension(ext) : undefined; + } + get extSubjectAltName() { + const ext = this.findExtension(EXTENSION_OID_SUBJECT_ALT_NAME); + return ext ? new ext_1.X509SubjectAlternativeNameExtension(ext) : undefined; + } + get extAuthorityKeyID() { + const ext = this.findExtension(EXTENSION_OID_AUTHORITY_KEY_ID); + return ext ? new ext_1.X509AuthorityKeyIDExtension(ext) : undefined; + } + get extSubjectKeyID() { + const ext = this.findExtension(EXTENSION_OID_SUBJECT_KEY_ID); + return ext + ? new ext_1.X509SubjectKeyIDExtension(ext) + : /* istanbul ignore next */ undefined; + } + get extSCT() { + const ext = this.findExtension(exports.EXTENSION_OID_SCT); + return ext ? new ext_1.X509SCTExtension(ext) : undefined; + } + get isCA() { + const ca = this.extBasicConstraints?.isCA || false; + // If the KeyUsage extension is present, keyCertSign must be set + /* istanbul ignore else */ + if (this.extKeyUsage) { + return ca && this.extKeyUsage.keyCertSign; + } + // TODO: test coverage for this case + /* istanbul ignore next */ + return ca; + } + extension(oid) { + const ext = this.findExtension(oid); + return ext ? new ext_1.X509Extension(ext) : undefined; + } + verify(issuerCertificate) { + // Use the issuer's public key if provided, otherwise use the subject's + const publicKey = issuerCertificate?.publicKey || this.publicKey; + const key = crypto.createPublicKey(publicKey); + return crypto.verify(this.tbsCertificate.toDER(), key, this.signatureValue, this.signatureAlgorithm); + } + validForDate(date) { + return this.notBefore <= date && date <= this.notAfter; + } + equals(other) { + return this.root.toDER().equals(other.root.toDER()); + } + // Creates a copy of the certificate with a new buffer + clone() { + const der = this.root.toDER(); + const clone = Buffer.alloc(der.length); + der.copy(clone); + return X509Certificate.parse(clone); + } + findExtension(oid) { + // Find the extension with the given OID. The OID will always be the first + // element of the extension sequence + return this.extensions.find((ext) => ext.subs[0].toOID() === oid); + } + ///////////////////////////////////////////////////////////////////////////// + // The following properties use the documented x509 structure to locate the + // desired ASN.1 object + // https://www.rfc-editor.org/rfc/rfc5280#section-4.1 + // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.1.1 + get tbsCertificateObj() { + // tbsCertificate is the first element of the certificate sequence + return this.root.subs[0]; + } + // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.1.2 + get signatureAlgorithmObj() { + // signatureAlgorithm is the second element of the certificate sequence + return this.root.subs[1]; + } + // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.1.3 + get signatureValueObj() { + // signatureValue is the third element of the certificate sequence + return this.root.subs[2]; + } + // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.1 + get versionObj() { + // version is the first element of the tbsCertificate sequence + return this.tbsCertificateObj.subs[0]; + } + // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.2 + get serialNumberObj() { + // serialNumber is the second element of the tbsCertificate sequence + return this.tbsCertificateObj.subs[1]; + } + // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.4 + get issuerObj() { + // issuer is the fourth element of the tbsCertificate sequence + return this.tbsCertificateObj.subs[3]; + } + // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.5 + get validityObj() { + // version is the fifth element of the tbsCertificate sequence + return this.tbsCertificateObj.subs[4]; + } + // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.6 + get subjectObj() { + // subject is the sixth element of the tbsCertificate sequence + return this.tbsCertificateObj.subs[5]; + } + // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.7 + get subjectPublicKeyInfoObj() { + // subjectPublicKeyInfo is the seventh element of the tbsCertificate sequence + return this.tbsCertificateObj.subs[6]; + } + // Extensions can't be located by index because their position varies. Instead, + // we need to find the extensions context specific tag + // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.9 + get extensionsObj() { + return this.tbsCertificateObj.subs.find((sub) => sub.tag.isContextSpecific(0x03)); + } +} +exports.X509Certificate = X509Certificate; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/x509/ext.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/x509/ext.js new file mode 100644 index 0000000000000000000000000000000000000000..1d481261b0aa693e3605c433331db1ee5b33eb61 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/x509/ext.js @@ -0,0 +1,145 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.X509SCTExtension = exports.X509SubjectKeyIDExtension = exports.X509AuthorityKeyIDExtension = exports.X509SubjectAlternativeNameExtension = exports.X509KeyUsageExtension = exports.X509BasicConstraintsExtension = exports.X509Extension = void 0; +const stream_1 = require("../stream"); +const sct_1 = require("./sct"); +// https://www.rfc-editor.org/rfc/rfc5280#section-4.1 +class X509Extension { + constructor(asn1) { + this.root = asn1; + } + get oid() { + return this.root.subs[0].toOID(); + } + get critical() { + // The critical field is optional and will be the second element of the + // extension sequence if present. Default to false if not present. + return this.root.subs.length === 3 ? this.root.subs[1].toBoolean() : false; + } + get value() { + return this.extnValueObj.value; + } + get valueObj() { + return this.extnValueObj; + } + get extnValueObj() { + // The extnValue field will be the last element of the extension sequence + return this.root.subs[this.root.subs.length - 1]; + } +} +exports.X509Extension = X509Extension; +// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.9 +class X509BasicConstraintsExtension extends X509Extension { + get isCA() { + return this.sequence.subs[0]?.toBoolean() ?? false; + } + get pathLenConstraint() { + return this.sequence.subs.length > 1 + ? this.sequence.subs[1].toInteger() + : undefined; + } + // The extnValue field contains a single sequence wrapping the isCA and + // pathLenConstraint. + get sequence() { + return this.extnValueObj.subs[0]; + } +} +exports.X509BasicConstraintsExtension = X509BasicConstraintsExtension; +// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.3 +class X509KeyUsageExtension extends X509Extension { + get digitalSignature() { + return this.bitString[0] === 1; + } + get keyCertSign() { + return this.bitString[5] === 1; + } + get crlSign() { + return this.bitString[6] === 1; + } + // The extnValue field contains a single bit string which is a bit mask + // indicating which key usages are enabled. + get bitString() { + return this.extnValueObj.subs[0].toBitString(); + } +} +exports.X509KeyUsageExtension = X509KeyUsageExtension; +// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.6 +class X509SubjectAlternativeNameExtension extends X509Extension { + get rfc822Name() { + return this.findGeneralName(0x01)?.value.toString('ascii'); + } + get uri() { + return this.findGeneralName(0x06)?.value.toString('ascii'); + } + // Retrieve the value of an otherName with the given OID. + otherName(oid) { + const otherName = this.findGeneralName(0x00); + if (otherName === undefined) { + return undefined; + } + // The otherName is a sequence containing an OID and a value. + // Need to check that the OID matches the one we're looking for. + const otherNameOID = otherName.subs[0].toOID(); + if (otherNameOID !== oid) { + return undefined; + } + // The otherNameValue is a sequence containing the actual value. + const otherNameValue = otherName.subs[1]; + return otherNameValue.subs[0].value.toString('ascii'); + } + findGeneralName(tag) { + return this.generalNames.find((gn) => gn.tag.isContextSpecific(tag)); + } + // The extnValue field contains a sequence of GeneralNames. + get generalNames() { + return this.extnValueObj.subs[0].subs; + } +} +exports.X509SubjectAlternativeNameExtension = X509SubjectAlternativeNameExtension; +// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.1 +class X509AuthorityKeyIDExtension extends X509Extension { + get keyIdentifier() { + return this.findSequenceMember(0x00)?.value; + } + findSequenceMember(tag) { + return this.sequence.subs.find((el) => el.tag.isContextSpecific(tag)); + } + // The extnValue field contains a single sequence wrapping the keyIdentifier + get sequence() { + return this.extnValueObj.subs[0]; + } +} +exports.X509AuthorityKeyIDExtension = X509AuthorityKeyIDExtension; +// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.2 +class X509SubjectKeyIDExtension extends X509Extension { + get keyIdentifier() { + return this.extnValueObj.subs[0].value; + } +} +exports.X509SubjectKeyIDExtension = X509SubjectKeyIDExtension; +// https://www.rfc-editor.org/rfc/rfc6962#section-3.3 +class X509SCTExtension extends X509Extension { + constructor(asn1) { + super(asn1); + } + get signedCertificateTimestamps() { + const buf = this.extnValueObj.subs[0].value; + const stream = new stream_1.ByteStream(buf); + // The overall list length is encoded in the first two bytes -- note this + // is the length of the list in bytes, NOT the number of SCTs in the list + const end = stream.getUint16() + 2; + const sctList = []; + while (stream.position < end) { + // Read the length of the next SCT + const sctLength = stream.getUint16(); + // Slice out the bytes for the next SCT and parse it + const sct = stream.getBlock(sctLength); + sctList.push(sct_1.SignedCertificateTimestamp.parse(sct)); + } + if (stream.position !== end) { + throw new Error('SCT list length does not match actual length'); + } + return sctList; + } +} +exports.X509SCTExtension = X509SCTExtension; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/x509/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/x509/index.js new file mode 100644 index 0000000000000000000000000000000000000000..cdd77e58f37d5a7ffe1dcfdd33ad8bb3733d59cb --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/x509/index.js @@ -0,0 +1,23 @@ +"use strict"; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.X509SCTExtension = exports.X509Certificate = exports.EXTENSION_OID_SCT = void 0; +var cert_1 = require("./cert"); +Object.defineProperty(exports, "EXTENSION_OID_SCT", { enumerable: true, get: function () { return cert_1.EXTENSION_OID_SCT; } }); +Object.defineProperty(exports, "X509Certificate", { enumerable: true, get: function () { return cert_1.X509Certificate; } }); +var ext_1 = require("./ext"); +Object.defineProperty(exports, "X509SCTExtension", { enumerable: true, get: function () { return ext_1.X509SCTExtension; } }); diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/x509/sct.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/x509/sct.js new file mode 100644 index 0000000000000000000000000000000000000000..55885e3b307427c164efcaa2c78e063a652890e6 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/dist/x509/sct.js @@ -0,0 +1,151 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SignedCertificateTimestamp = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const crypto = __importStar(require("../crypto")); +const stream_1 = require("../stream"); +class SignedCertificateTimestamp { + constructor(options) { + this.version = options.version; + this.logID = options.logID; + this.timestamp = options.timestamp; + this.extensions = options.extensions; + this.hashAlgorithm = options.hashAlgorithm; + this.signatureAlgorithm = options.signatureAlgorithm; + this.signature = options.signature; + } + get datetime() { + return new Date(Number(this.timestamp.readBigInt64BE())); + } + // Returns the hash algorithm used to generate the SCT's signature. + // https://www.rfc-editor.org/rfc/rfc5246#section-7.4.1.4.1 + get algorithm() { + switch (this.hashAlgorithm) { + /* istanbul ignore next */ + case 0: + return 'none'; + /* istanbul ignore next */ + case 1: + return 'md5'; + /* istanbul ignore next */ + case 2: + return 'sha1'; + /* istanbul ignore next */ + case 3: + return 'sha224'; + case 4: + return 'sha256'; + /* istanbul ignore next */ + case 5: + return 'sha384'; + /* istanbul ignore next */ + case 6: + return 'sha512'; + /* istanbul ignore next */ + default: + return 'unknown'; + } + } + verify(preCert, key) { + // Assemble the digitally-signed struct (the data over which the signature + // was generated). + // https://www.rfc-editor.org/rfc/rfc6962#section-3.2 + const stream = new stream_1.ByteStream(); + stream.appendChar(this.version); + stream.appendChar(0x00); // SignatureType = certificate_timestamp(0) + stream.appendView(this.timestamp); + stream.appendUint16(0x01); // LogEntryType = precert_entry(1) + stream.appendView(preCert); + stream.appendUint16(this.extensions.byteLength); + /* istanbul ignore next - extensions are very uncommon */ + if (this.extensions.byteLength > 0) { + stream.appendView(this.extensions); + } + return crypto.verify(stream.buffer, key, this.signature, this.algorithm); + } + // Parses a SignedCertificateTimestamp from a buffer. SCTs are encoded using + // TLS encoding which means the fields and lengths of most fields are + // specified as part of the SCT and TLS specs. + // https://www.rfc-editor.org/rfc/rfc6962#section-3.2 + // https://www.rfc-editor.org/rfc/rfc5246#section-7.4.1.4.1 + static parse(buf) { + const stream = new stream_1.ByteStream(buf); + // Version - enum { v1(0), (255) } + const version = stream.getUint8(); + // Log ID - struct { opaque key_id[32]; } + const logID = stream.getBlock(32); + // Timestamp - uint64 + const timestamp = stream.getBlock(8); + // Extensions - opaque extensions<0..2^16-1>; + const extenstionLength = stream.getUint16(); + const extensions = stream.getBlock(extenstionLength); + // Hash algo - enum { sha256(4), . . . (255) } + const hashAlgorithm = stream.getUint8(); + // Signature algo - enum { anonymous(0), rsa(1), dsa(2), ecdsa(3), (255) } + const signatureAlgorithm = stream.getUint8(); + // Signature - opaque signature<0..2^16-1>; + const sigLength = stream.getUint16(); + const signature = stream.getBlock(sigLength); + // Check that we read the entire buffer + if (stream.position !== buf.length) { + throw new Error('SCT buffer length mismatch'); + } + return new SignedCertificateTimestamp({ + version, + logID, + timestamp, + extensions, + hashAlgorithm, + signatureAlgorithm, + signature, + }); + } +} +exports.SignedCertificateTimestamp = SignedCertificateTimestamp; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/package.json new file mode 100644 index 0000000000000000000000000000000000000000..7d2f8d5de3f7a89e3e2d8351cb6c6224fd17d9a7 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/core/package.json @@ -0,0 +1,31 @@ +{ + "name": "@sigstore/core", + "version": "3.0.0", + "description": "Base library for Sigstore", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "clean": "shx rm -rf dist *.tsbuildinfo", + "build": "tsc --build", + "test": "jest" + }, + "files": [ + "dist" + ], + "author": "bdehamer@github.com", + "license": "Apache-2.0", + "repository": { + "type": "git", + "url": "git+https://github.com/sigstore/sigstore-js.git" + }, + "bugs": { + "url": "https://github.com/sigstore/sigstore-js/issues" + }, + "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/core#readme", + "publishConfig": { + "provenance": true + }, + "engines": { + "node": "^20.17.0 || >=22.9.0" + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..e9e7c1679a09dfcb0793682d99f5129e206a8abd --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 The Sigstore Authors + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js new file mode 100644 index 0000000000000000000000000000000000000000..5c4f37bfaf3fb14c98b947698eafe53bc4f1fbac --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js @@ -0,0 +1,59 @@ +"use strict"; +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.7.5 +// protoc v6.30.2 +// source: envelope.proto +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Signature = exports.Envelope = void 0; +exports.Envelope = { + fromJSON(object) { + return { + payload: isSet(object.payload) ? Buffer.from(bytesFromBase64(object.payload)) : Buffer.alloc(0), + payloadType: isSet(object.payloadType) ? globalThis.String(object.payloadType) : "", + signatures: globalThis.Array.isArray(object?.signatures) + ? object.signatures.map((e) => exports.Signature.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.payload.length !== 0) { + obj.payload = base64FromBytes(message.payload); + } + if (message.payloadType !== "") { + obj.payloadType = message.payloadType; + } + if (message.signatures?.length) { + obj.signatures = message.signatures.map((e) => exports.Signature.toJSON(e)); + } + return obj; + }, +}; +exports.Signature = { + fromJSON(object) { + return { + sig: isSet(object.sig) ? Buffer.from(bytesFromBase64(object.sig)) : Buffer.alloc(0), + keyid: isSet(object.keyid) ? globalThis.String(object.keyid) : "", + }; + }, + toJSON(message) { + const obj = {}; + if (message.sig.length !== 0) { + obj.sig = base64FromBytes(message.sig); + } + if (message.keyid !== "") { + obj.keyid = message.keyid; + } + return obj; + }, +}; +function bytesFromBase64(b64) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); +} +function base64FromBytes(arr) { + return globalThis.Buffer.from(arr).toString("base64"); +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js new file mode 100644 index 0000000000000000000000000000000000000000..6138fef5672fc2dea027535cb5dbd0f8889aa7a2 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js @@ -0,0 +1,174 @@ +"use strict"; +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.7.5 +// protoc v6.30.2 +// source: events.proto +Object.defineProperty(exports, "__esModule", { value: true }); +exports.CloudEventBatch = exports.CloudEvent_CloudEventAttributeValue = exports.CloudEvent_AttributesEntry = exports.CloudEvent = void 0; +/* eslint-disable */ +const any_1 = require("./google/protobuf/any"); +const timestamp_1 = require("./google/protobuf/timestamp"); +exports.CloudEvent = { + fromJSON(object) { + return { + id: isSet(object.id) ? globalThis.String(object.id) : "", + source: isSet(object.source) ? globalThis.String(object.source) : "", + specVersion: isSet(object.specVersion) ? globalThis.String(object.specVersion) : "", + type: isSet(object.type) ? globalThis.String(object.type) : "", + attributes: isObject(object.attributes) + ? Object.entries(object.attributes).reduce((acc, [key, value]) => { + acc[key] = exports.CloudEvent_CloudEventAttributeValue.fromJSON(value); + return acc; + }, {}) + : {}, + data: isSet(object.binaryData) + ? { $case: "binaryData", binaryData: Buffer.from(bytesFromBase64(object.binaryData)) } + : isSet(object.textData) + ? { $case: "textData", textData: globalThis.String(object.textData) } + : isSet(object.protoData) + ? { $case: "protoData", protoData: any_1.Any.fromJSON(object.protoData) } + : undefined, + }; + }, + toJSON(message) { + const obj = {}; + if (message.id !== "") { + obj.id = message.id; + } + if (message.source !== "") { + obj.source = message.source; + } + if (message.specVersion !== "") { + obj.specVersion = message.specVersion; + } + if (message.type !== "") { + obj.type = message.type; + } + if (message.attributes) { + const entries = Object.entries(message.attributes); + if (entries.length > 0) { + obj.attributes = {}; + entries.forEach(([k, v]) => { + obj.attributes[k] = exports.CloudEvent_CloudEventAttributeValue.toJSON(v); + }); + } + } + if (message.data?.$case === "binaryData") { + obj.binaryData = base64FromBytes(message.data.binaryData); + } + else if (message.data?.$case === "textData") { + obj.textData = message.data.textData; + } + else if (message.data?.$case === "protoData") { + obj.protoData = any_1.Any.toJSON(message.data.protoData); + } + return obj; + }, +}; +exports.CloudEvent_AttributesEntry = { + fromJSON(object) { + return { + key: isSet(object.key) ? globalThis.String(object.key) : "", + value: isSet(object.value) ? exports.CloudEvent_CloudEventAttributeValue.fromJSON(object.value) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + if (message.key !== "") { + obj.key = message.key; + } + if (message.value !== undefined) { + obj.value = exports.CloudEvent_CloudEventAttributeValue.toJSON(message.value); + } + return obj; + }, +}; +exports.CloudEvent_CloudEventAttributeValue = { + fromJSON(object) { + return { + attr: isSet(object.ceBoolean) + ? { $case: "ceBoolean", ceBoolean: globalThis.Boolean(object.ceBoolean) } + : isSet(object.ceInteger) + ? { $case: "ceInteger", ceInteger: globalThis.Number(object.ceInteger) } + : isSet(object.ceString) + ? { $case: "ceString", ceString: globalThis.String(object.ceString) } + : isSet(object.ceBytes) + ? { $case: "ceBytes", ceBytes: Buffer.from(bytesFromBase64(object.ceBytes)) } + : isSet(object.ceUri) + ? { $case: "ceUri", ceUri: globalThis.String(object.ceUri) } + : isSet(object.ceUriRef) + ? { $case: "ceUriRef", ceUriRef: globalThis.String(object.ceUriRef) } + : isSet(object.ceTimestamp) + ? { $case: "ceTimestamp", ceTimestamp: fromJsonTimestamp(object.ceTimestamp) } + : undefined, + }; + }, + toJSON(message) { + const obj = {}; + if (message.attr?.$case === "ceBoolean") { + obj.ceBoolean = message.attr.ceBoolean; + } + else if (message.attr?.$case === "ceInteger") { + obj.ceInteger = Math.round(message.attr.ceInteger); + } + else if (message.attr?.$case === "ceString") { + obj.ceString = message.attr.ceString; + } + else if (message.attr?.$case === "ceBytes") { + obj.ceBytes = base64FromBytes(message.attr.ceBytes); + } + else if (message.attr?.$case === "ceUri") { + obj.ceUri = message.attr.ceUri; + } + else if (message.attr?.$case === "ceUriRef") { + obj.ceUriRef = message.attr.ceUriRef; + } + else if (message.attr?.$case === "ceTimestamp") { + obj.ceTimestamp = message.attr.ceTimestamp.toISOString(); + } + return obj; + }, +}; +exports.CloudEventBatch = { + fromJSON(object) { + return { + events: globalThis.Array.isArray(object?.events) ? object.events.map((e) => exports.CloudEvent.fromJSON(e)) : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.events?.length) { + obj.events = message.events.map((e) => exports.CloudEvent.toJSON(e)); + } + return obj; + }, +}; +function bytesFromBase64(b64) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); +} +function base64FromBytes(arr) { + return globalThis.Buffer.from(arr).toString("base64"); +} +function fromTimestamp(t) { + let millis = (globalThis.Number(t.seconds) || 0) * 1_000; + millis += (t.nanos || 0) / 1_000_000; + return new globalThis.Date(millis); +} +function fromJsonTimestamp(o) { + if (o instanceof globalThis.Date) { + return o; + } + else if (typeof o === "string") { + return new globalThis.Date(o); + } + else { + return fromTimestamp(timestamp_1.Timestamp.fromJSON(o)); + } +} +function isObject(value) { + return typeof value === "object" && value !== null; +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js new file mode 100644 index 0000000000000000000000000000000000000000..b4d9ccc781c2f96ba82c62dbccf8f36e96e464b1 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js @@ -0,0 +1,141 @@ +"use strict"; +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.7.5 +// protoc v6.30.2 +// source: google/api/field_behavior.proto +Object.defineProperty(exports, "__esModule", { value: true }); +exports.FieldBehavior = void 0; +exports.fieldBehaviorFromJSON = fieldBehaviorFromJSON; +exports.fieldBehaviorToJSON = fieldBehaviorToJSON; +/* eslint-disable */ +/** + * An indicator of the behavior of a given field (for example, that a field + * is required in requests, or given as output but ignored as input). + * This **does not** change the behavior in protocol buffers itself; it only + * denotes the behavior and may affect how API tooling handles the field. + * + * Note: This enum **may** receive new values in the future. + */ +var FieldBehavior; +(function (FieldBehavior) { + /** FIELD_BEHAVIOR_UNSPECIFIED - Conventional default for enums. Do not use this. */ + FieldBehavior[FieldBehavior["FIELD_BEHAVIOR_UNSPECIFIED"] = 0] = "FIELD_BEHAVIOR_UNSPECIFIED"; + /** + * OPTIONAL - Specifically denotes a field as optional. + * While all fields in protocol buffers are optional, this may be specified + * for emphasis if appropriate. + */ + FieldBehavior[FieldBehavior["OPTIONAL"] = 1] = "OPTIONAL"; + /** + * REQUIRED - Denotes a field as required. + * This indicates that the field **must** be provided as part of the request, + * and failure to do so will cause an error (usually `INVALID_ARGUMENT`). + */ + FieldBehavior[FieldBehavior["REQUIRED"] = 2] = "REQUIRED"; + /** + * OUTPUT_ONLY - Denotes a field as output only. + * This indicates that the field is provided in responses, but including the + * field in a request does nothing (the server *must* ignore it and + * *must not* throw an error as a result of the field's presence). + */ + FieldBehavior[FieldBehavior["OUTPUT_ONLY"] = 3] = "OUTPUT_ONLY"; + /** + * INPUT_ONLY - Denotes a field as input only. + * This indicates that the field is provided in requests, and the + * corresponding field is not included in output. + */ + FieldBehavior[FieldBehavior["INPUT_ONLY"] = 4] = "INPUT_ONLY"; + /** + * IMMUTABLE - Denotes a field as immutable. + * This indicates that the field may be set once in a request to create a + * resource, but may not be changed thereafter. + */ + FieldBehavior[FieldBehavior["IMMUTABLE"] = 5] = "IMMUTABLE"; + /** + * UNORDERED_LIST - Denotes that a (repeated) field is an unordered list. + * This indicates that the service may provide the elements of the list + * in any arbitrary order, rather than the order the user originally + * provided. Additionally, the list's order may or may not be stable. + */ + FieldBehavior[FieldBehavior["UNORDERED_LIST"] = 6] = "UNORDERED_LIST"; + /** + * NON_EMPTY_DEFAULT - Denotes that this field returns a non-empty default value if not set. + * This indicates that if the user provides the empty value in a request, + * a non-empty value will be returned. The user will not be aware of what + * non-empty value to expect. + */ + FieldBehavior[FieldBehavior["NON_EMPTY_DEFAULT"] = 7] = "NON_EMPTY_DEFAULT"; + /** + * IDENTIFIER - Denotes that the field in a resource (a message annotated with + * google.api.resource) is used in the resource name to uniquely identify the + * resource. For AIP-compliant APIs, this should only be applied to the + * `name` field on the resource. + * + * This behavior should not be applied to references to other resources within + * the message. + * + * The identifier field of resources often have different field behavior + * depending on the request it is embedded in (e.g. for Create methods name + * is optional and unused, while for Update methods it is required). Instead + * of method-specific annotations, only `IDENTIFIER` is required. + */ + FieldBehavior[FieldBehavior["IDENTIFIER"] = 8] = "IDENTIFIER"; +})(FieldBehavior || (exports.FieldBehavior = FieldBehavior = {})); +function fieldBehaviorFromJSON(object) { + switch (object) { + case 0: + case "FIELD_BEHAVIOR_UNSPECIFIED": + return FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED; + case 1: + case "OPTIONAL": + return FieldBehavior.OPTIONAL; + case 2: + case "REQUIRED": + return FieldBehavior.REQUIRED; + case 3: + case "OUTPUT_ONLY": + return FieldBehavior.OUTPUT_ONLY; + case 4: + case "INPUT_ONLY": + return FieldBehavior.INPUT_ONLY; + case 5: + case "IMMUTABLE": + return FieldBehavior.IMMUTABLE; + case 6: + case "UNORDERED_LIST": + return FieldBehavior.UNORDERED_LIST; + case 7: + case "NON_EMPTY_DEFAULT": + return FieldBehavior.NON_EMPTY_DEFAULT; + case 8: + case "IDENTIFIER": + return FieldBehavior.IDENTIFIER; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior"); + } +} +function fieldBehaviorToJSON(object) { + switch (object) { + case FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED: + return "FIELD_BEHAVIOR_UNSPECIFIED"; + case FieldBehavior.OPTIONAL: + return "OPTIONAL"; + case FieldBehavior.REQUIRED: + return "REQUIRED"; + case FieldBehavior.OUTPUT_ONLY: + return "OUTPUT_ONLY"; + case FieldBehavior.INPUT_ONLY: + return "INPUT_ONLY"; + case FieldBehavior.IMMUTABLE: + return "IMMUTABLE"; + case FieldBehavior.UNORDERED_LIST: + return "UNORDERED_LIST"; + case FieldBehavior.NON_EMPTY_DEFAULT: + return "NON_EMPTY_DEFAULT"; + case FieldBehavior.IDENTIFIER: + return "IDENTIFIER"; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior"); + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js new file mode 100644 index 0000000000000000000000000000000000000000..f0c8aab773e4c9032818f4700f40010b1f38f363 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js @@ -0,0 +1,35 @@ +"use strict"; +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.7.5 +// protoc v6.30.2 +// source: google/protobuf/any.proto +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Any = void 0; +exports.Any = { + fromJSON(object) { + return { + typeUrl: isSet(object.typeUrl) ? globalThis.String(object.typeUrl) : "", + value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0), + }; + }, + toJSON(message) { + const obj = {}; + if (message.typeUrl !== "") { + obj.typeUrl = message.typeUrl; + } + if (message.value.length !== 0) { + obj.value = base64FromBytes(message.value); + } + return obj; + }, +}; +function bytesFromBase64(b64) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); +} +function base64FromBytes(arr) { + return globalThis.Buffer.from(arr).toString("base64"); +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js new file mode 100644 index 0000000000000000000000000000000000000000..d6f8ddddf799db0f02924359534e3156c5bcb6ac --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js @@ -0,0 +1,2042 @@ +"use strict"; +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.7.5 +// protoc v6.30.2 +// source: google/protobuf/descriptor.proto +Object.defineProperty(exports, "__esModule", { value: true }); +exports.GeneratedCodeInfo = exports.SourceCodeInfo_Location = exports.SourceCodeInfo = exports.FeatureSetDefaults_FeatureSetEditionDefault = exports.FeatureSetDefaults = exports.FeatureSet = exports.UninterpretedOption_NamePart = exports.UninterpretedOption = exports.MethodOptions = exports.ServiceOptions = exports.EnumValueOptions = exports.EnumOptions = exports.OneofOptions = exports.FieldOptions_FeatureSupport = exports.FieldOptions_EditionDefault = exports.FieldOptions = exports.MessageOptions = exports.FileOptions = exports.MethodDescriptorProto = exports.ServiceDescriptorProto = exports.EnumValueDescriptorProto = exports.EnumDescriptorProto_EnumReservedRange = exports.EnumDescriptorProto = exports.OneofDescriptorProto = exports.FieldDescriptorProto = exports.ExtensionRangeOptions_Declaration = exports.ExtensionRangeOptions = exports.DescriptorProto_ReservedRange = exports.DescriptorProto_ExtensionRange = exports.DescriptorProto = exports.FileDescriptorProto = exports.FileDescriptorSet = exports.GeneratedCodeInfo_Annotation_Semantic = exports.FeatureSet_EnforceNamingStyle = exports.FeatureSet_JsonFormat = exports.FeatureSet_MessageEncoding = exports.FeatureSet_Utf8Validation = exports.FeatureSet_RepeatedFieldEncoding = exports.FeatureSet_EnumType = exports.FeatureSet_FieldPresence = exports.MethodOptions_IdempotencyLevel = exports.FieldOptions_OptionTargetType = exports.FieldOptions_OptionRetention = exports.FieldOptions_JSType = exports.FieldOptions_CType = exports.FileOptions_OptimizeMode = exports.FieldDescriptorProto_Label = exports.FieldDescriptorProto_Type = exports.ExtensionRangeOptions_VerificationState = exports.Edition = void 0; +exports.GeneratedCodeInfo_Annotation = void 0; +exports.editionFromJSON = editionFromJSON; +exports.editionToJSON = editionToJSON; +exports.extensionRangeOptions_VerificationStateFromJSON = extensionRangeOptions_VerificationStateFromJSON; +exports.extensionRangeOptions_VerificationStateToJSON = extensionRangeOptions_VerificationStateToJSON; +exports.fieldDescriptorProto_TypeFromJSON = fieldDescriptorProto_TypeFromJSON; +exports.fieldDescriptorProto_TypeToJSON = fieldDescriptorProto_TypeToJSON; +exports.fieldDescriptorProto_LabelFromJSON = fieldDescriptorProto_LabelFromJSON; +exports.fieldDescriptorProto_LabelToJSON = fieldDescriptorProto_LabelToJSON; +exports.fileOptions_OptimizeModeFromJSON = fileOptions_OptimizeModeFromJSON; +exports.fileOptions_OptimizeModeToJSON = fileOptions_OptimizeModeToJSON; +exports.fieldOptions_CTypeFromJSON = fieldOptions_CTypeFromJSON; +exports.fieldOptions_CTypeToJSON = fieldOptions_CTypeToJSON; +exports.fieldOptions_JSTypeFromJSON = fieldOptions_JSTypeFromJSON; +exports.fieldOptions_JSTypeToJSON = fieldOptions_JSTypeToJSON; +exports.fieldOptions_OptionRetentionFromJSON = fieldOptions_OptionRetentionFromJSON; +exports.fieldOptions_OptionRetentionToJSON = fieldOptions_OptionRetentionToJSON; +exports.fieldOptions_OptionTargetTypeFromJSON = fieldOptions_OptionTargetTypeFromJSON; +exports.fieldOptions_OptionTargetTypeToJSON = fieldOptions_OptionTargetTypeToJSON; +exports.methodOptions_IdempotencyLevelFromJSON = methodOptions_IdempotencyLevelFromJSON; +exports.methodOptions_IdempotencyLevelToJSON = methodOptions_IdempotencyLevelToJSON; +exports.featureSet_FieldPresenceFromJSON = featureSet_FieldPresenceFromJSON; +exports.featureSet_FieldPresenceToJSON = featureSet_FieldPresenceToJSON; +exports.featureSet_EnumTypeFromJSON = featureSet_EnumTypeFromJSON; +exports.featureSet_EnumTypeToJSON = featureSet_EnumTypeToJSON; +exports.featureSet_RepeatedFieldEncodingFromJSON = featureSet_RepeatedFieldEncodingFromJSON; +exports.featureSet_RepeatedFieldEncodingToJSON = featureSet_RepeatedFieldEncodingToJSON; +exports.featureSet_Utf8ValidationFromJSON = featureSet_Utf8ValidationFromJSON; +exports.featureSet_Utf8ValidationToJSON = featureSet_Utf8ValidationToJSON; +exports.featureSet_MessageEncodingFromJSON = featureSet_MessageEncodingFromJSON; +exports.featureSet_MessageEncodingToJSON = featureSet_MessageEncodingToJSON; +exports.featureSet_JsonFormatFromJSON = featureSet_JsonFormatFromJSON; +exports.featureSet_JsonFormatToJSON = featureSet_JsonFormatToJSON; +exports.featureSet_EnforceNamingStyleFromJSON = featureSet_EnforceNamingStyleFromJSON; +exports.featureSet_EnforceNamingStyleToJSON = featureSet_EnforceNamingStyleToJSON; +exports.generatedCodeInfo_Annotation_SemanticFromJSON = generatedCodeInfo_Annotation_SemanticFromJSON; +exports.generatedCodeInfo_Annotation_SemanticToJSON = generatedCodeInfo_Annotation_SemanticToJSON; +/* eslint-disable */ +/** The full set of known editions. */ +var Edition; +(function (Edition) { + /** EDITION_UNKNOWN - A placeholder for an unknown edition value. */ + Edition[Edition["EDITION_UNKNOWN"] = 0] = "EDITION_UNKNOWN"; + /** + * EDITION_LEGACY - A placeholder edition for specifying default behaviors *before* a feature + * was first introduced. This is effectively an "infinite past". + */ + Edition[Edition["EDITION_LEGACY"] = 900] = "EDITION_LEGACY"; + /** + * EDITION_PROTO2 - Legacy syntax "editions". These pre-date editions, but behave much like + * distinct editions. These can't be used to specify the edition of proto + * files, but feature definitions must supply proto2/proto3 defaults for + * backwards compatibility. + */ + Edition[Edition["EDITION_PROTO2"] = 998] = "EDITION_PROTO2"; + Edition[Edition["EDITION_PROTO3"] = 999] = "EDITION_PROTO3"; + /** + * EDITION_2023 - Editions that have been released. The specific values are arbitrary and + * should not be depended on, but they will always be time-ordered for easy + * comparison. + */ + Edition[Edition["EDITION_2023"] = 1000] = "EDITION_2023"; + Edition[Edition["EDITION_2024"] = 1001] = "EDITION_2024"; + /** + * EDITION_1_TEST_ONLY - Placeholder editions for testing feature resolution. These should not be + * used or relied on outside of tests. + */ + Edition[Edition["EDITION_1_TEST_ONLY"] = 1] = "EDITION_1_TEST_ONLY"; + Edition[Edition["EDITION_2_TEST_ONLY"] = 2] = "EDITION_2_TEST_ONLY"; + Edition[Edition["EDITION_99997_TEST_ONLY"] = 99997] = "EDITION_99997_TEST_ONLY"; + Edition[Edition["EDITION_99998_TEST_ONLY"] = 99998] = "EDITION_99998_TEST_ONLY"; + Edition[Edition["EDITION_99999_TEST_ONLY"] = 99999] = "EDITION_99999_TEST_ONLY"; + /** + * EDITION_MAX - Placeholder for specifying unbounded edition support. This should only + * ever be used by plugins that can expect to never require any changes to + * support a new edition. + */ + Edition[Edition["EDITION_MAX"] = 2147483647] = "EDITION_MAX"; +})(Edition || (exports.Edition = Edition = {})); +function editionFromJSON(object) { + switch (object) { + case 0: + case "EDITION_UNKNOWN": + return Edition.EDITION_UNKNOWN; + case 900: + case "EDITION_LEGACY": + return Edition.EDITION_LEGACY; + case 998: + case "EDITION_PROTO2": + return Edition.EDITION_PROTO2; + case 999: + case "EDITION_PROTO3": + return Edition.EDITION_PROTO3; + case 1000: + case "EDITION_2023": + return Edition.EDITION_2023; + case 1001: + case "EDITION_2024": + return Edition.EDITION_2024; + case 1: + case "EDITION_1_TEST_ONLY": + return Edition.EDITION_1_TEST_ONLY; + case 2: + case "EDITION_2_TEST_ONLY": + return Edition.EDITION_2_TEST_ONLY; + case 99997: + case "EDITION_99997_TEST_ONLY": + return Edition.EDITION_99997_TEST_ONLY; + case 99998: + case "EDITION_99998_TEST_ONLY": + return Edition.EDITION_99998_TEST_ONLY; + case 99999: + case "EDITION_99999_TEST_ONLY": + return Edition.EDITION_99999_TEST_ONLY; + case 2147483647: + case "EDITION_MAX": + return Edition.EDITION_MAX; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum Edition"); + } +} +function editionToJSON(object) { + switch (object) { + case Edition.EDITION_UNKNOWN: + return "EDITION_UNKNOWN"; + case Edition.EDITION_LEGACY: + return "EDITION_LEGACY"; + case Edition.EDITION_PROTO2: + return "EDITION_PROTO2"; + case Edition.EDITION_PROTO3: + return "EDITION_PROTO3"; + case Edition.EDITION_2023: + return "EDITION_2023"; + case Edition.EDITION_2024: + return "EDITION_2024"; + case Edition.EDITION_1_TEST_ONLY: + return "EDITION_1_TEST_ONLY"; + case Edition.EDITION_2_TEST_ONLY: + return "EDITION_2_TEST_ONLY"; + case Edition.EDITION_99997_TEST_ONLY: + return "EDITION_99997_TEST_ONLY"; + case Edition.EDITION_99998_TEST_ONLY: + return "EDITION_99998_TEST_ONLY"; + case Edition.EDITION_99999_TEST_ONLY: + return "EDITION_99999_TEST_ONLY"; + case Edition.EDITION_MAX: + return "EDITION_MAX"; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum Edition"); + } +} +/** The verification state of the extension range. */ +var ExtensionRangeOptions_VerificationState; +(function (ExtensionRangeOptions_VerificationState) { + /** DECLARATION - All the extensions of the range must be declared. */ + ExtensionRangeOptions_VerificationState[ExtensionRangeOptions_VerificationState["DECLARATION"] = 0] = "DECLARATION"; + ExtensionRangeOptions_VerificationState[ExtensionRangeOptions_VerificationState["UNVERIFIED"] = 1] = "UNVERIFIED"; +})(ExtensionRangeOptions_VerificationState || (exports.ExtensionRangeOptions_VerificationState = ExtensionRangeOptions_VerificationState = {})); +function extensionRangeOptions_VerificationStateFromJSON(object) { + switch (object) { + case 0: + case "DECLARATION": + return ExtensionRangeOptions_VerificationState.DECLARATION; + case 1: + case "UNVERIFIED": + return ExtensionRangeOptions_VerificationState.UNVERIFIED; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum ExtensionRangeOptions_VerificationState"); + } +} +function extensionRangeOptions_VerificationStateToJSON(object) { + switch (object) { + case ExtensionRangeOptions_VerificationState.DECLARATION: + return "DECLARATION"; + case ExtensionRangeOptions_VerificationState.UNVERIFIED: + return "UNVERIFIED"; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum ExtensionRangeOptions_VerificationState"); + } +} +var FieldDescriptorProto_Type; +(function (FieldDescriptorProto_Type) { + /** + * TYPE_DOUBLE - 0 is reserved for errors. + * Order is weird for historical reasons. + */ + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_DOUBLE"] = 1] = "TYPE_DOUBLE"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FLOAT"] = 2] = "TYPE_FLOAT"; + /** + * TYPE_INT64 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + * negative values are likely. + */ + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT64"] = 3] = "TYPE_INT64"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT64"] = 4] = "TYPE_UINT64"; + /** + * TYPE_INT32 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + * negative values are likely. + */ + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT32"] = 5] = "TYPE_INT32"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED64"] = 6] = "TYPE_FIXED64"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED32"] = 7] = "TYPE_FIXED32"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BOOL"] = 8] = "TYPE_BOOL"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_STRING"] = 9] = "TYPE_STRING"; + /** + * TYPE_GROUP - Tag-delimited aggregate. + * Group type is deprecated and not supported after google.protobuf. However, Proto3 + * implementations should still be able to parse the group wire format and + * treat group fields as unknown fields. In Editions, the group wire format + * can be enabled via the `message_encoding` feature. + */ + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_GROUP"] = 10] = "TYPE_GROUP"; + /** TYPE_MESSAGE - Length-delimited aggregate. */ + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_MESSAGE"] = 11] = "TYPE_MESSAGE"; + /** TYPE_BYTES - New in version 2. */ + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BYTES"] = 12] = "TYPE_BYTES"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT32"] = 13] = "TYPE_UINT32"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_ENUM"] = 14] = "TYPE_ENUM"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED32"] = 15] = "TYPE_SFIXED32"; + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED64"] = 16] = "TYPE_SFIXED64"; + /** TYPE_SINT32 - Uses ZigZag encoding. */ + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT32"] = 17] = "TYPE_SINT32"; + /** TYPE_SINT64 - Uses ZigZag encoding. */ + FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT64"] = 18] = "TYPE_SINT64"; +})(FieldDescriptorProto_Type || (exports.FieldDescriptorProto_Type = FieldDescriptorProto_Type = {})); +function fieldDescriptorProto_TypeFromJSON(object) { + switch (object) { + case 1: + case "TYPE_DOUBLE": + return FieldDescriptorProto_Type.TYPE_DOUBLE; + case 2: + case "TYPE_FLOAT": + return FieldDescriptorProto_Type.TYPE_FLOAT; + case 3: + case "TYPE_INT64": + return FieldDescriptorProto_Type.TYPE_INT64; + case 4: + case "TYPE_UINT64": + return FieldDescriptorProto_Type.TYPE_UINT64; + case 5: + case "TYPE_INT32": + return FieldDescriptorProto_Type.TYPE_INT32; + case 6: + case "TYPE_FIXED64": + return FieldDescriptorProto_Type.TYPE_FIXED64; + case 7: + case "TYPE_FIXED32": + return FieldDescriptorProto_Type.TYPE_FIXED32; + case 8: + case "TYPE_BOOL": + return FieldDescriptorProto_Type.TYPE_BOOL; + case 9: + case "TYPE_STRING": + return FieldDescriptorProto_Type.TYPE_STRING; + case 10: + case "TYPE_GROUP": + return FieldDescriptorProto_Type.TYPE_GROUP; + case 11: + case "TYPE_MESSAGE": + return FieldDescriptorProto_Type.TYPE_MESSAGE; + case 12: + case "TYPE_BYTES": + return FieldDescriptorProto_Type.TYPE_BYTES; + case 13: + case "TYPE_UINT32": + return FieldDescriptorProto_Type.TYPE_UINT32; + case 14: + case "TYPE_ENUM": + return FieldDescriptorProto_Type.TYPE_ENUM; + case 15: + case "TYPE_SFIXED32": + return FieldDescriptorProto_Type.TYPE_SFIXED32; + case 16: + case "TYPE_SFIXED64": + return FieldDescriptorProto_Type.TYPE_SFIXED64; + case 17: + case "TYPE_SINT32": + return FieldDescriptorProto_Type.TYPE_SINT32; + case 18: + case "TYPE_SINT64": + return FieldDescriptorProto_Type.TYPE_SINT64; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type"); + } +} +function fieldDescriptorProto_TypeToJSON(object) { + switch (object) { + case FieldDescriptorProto_Type.TYPE_DOUBLE: + return "TYPE_DOUBLE"; + case FieldDescriptorProto_Type.TYPE_FLOAT: + return "TYPE_FLOAT"; + case FieldDescriptorProto_Type.TYPE_INT64: + return "TYPE_INT64"; + case FieldDescriptorProto_Type.TYPE_UINT64: + return "TYPE_UINT64"; + case FieldDescriptorProto_Type.TYPE_INT32: + return "TYPE_INT32"; + case FieldDescriptorProto_Type.TYPE_FIXED64: + return "TYPE_FIXED64"; + case FieldDescriptorProto_Type.TYPE_FIXED32: + return "TYPE_FIXED32"; + case FieldDescriptorProto_Type.TYPE_BOOL: + return "TYPE_BOOL"; + case FieldDescriptorProto_Type.TYPE_STRING: + return "TYPE_STRING"; + case FieldDescriptorProto_Type.TYPE_GROUP: + return "TYPE_GROUP"; + case FieldDescriptorProto_Type.TYPE_MESSAGE: + return "TYPE_MESSAGE"; + case FieldDescriptorProto_Type.TYPE_BYTES: + return "TYPE_BYTES"; + case FieldDescriptorProto_Type.TYPE_UINT32: + return "TYPE_UINT32"; + case FieldDescriptorProto_Type.TYPE_ENUM: + return "TYPE_ENUM"; + case FieldDescriptorProto_Type.TYPE_SFIXED32: + return "TYPE_SFIXED32"; + case FieldDescriptorProto_Type.TYPE_SFIXED64: + return "TYPE_SFIXED64"; + case FieldDescriptorProto_Type.TYPE_SINT32: + return "TYPE_SINT32"; + case FieldDescriptorProto_Type.TYPE_SINT64: + return "TYPE_SINT64"; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type"); + } +} +var FieldDescriptorProto_Label; +(function (FieldDescriptorProto_Label) { + /** LABEL_OPTIONAL - 0 is reserved for errors */ + FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_OPTIONAL"] = 1] = "LABEL_OPTIONAL"; + FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REPEATED"] = 3] = "LABEL_REPEATED"; + /** + * LABEL_REQUIRED - The required label is only allowed in google.protobuf. In proto3 and Editions + * it's explicitly prohibited. In Editions, the `field_presence` feature + * can be used to get this behavior. + */ + FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REQUIRED"] = 2] = "LABEL_REQUIRED"; +})(FieldDescriptorProto_Label || (exports.FieldDescriptorProto_Label = FieldDescriptorProto_Label = {})); +function fieldDescriptorProto_LabelFromJSON(object) { + switch (object) { + case 1: + case "LABEL_OPTIONAL": + return FieldDescriptorProto_Label.LABEL_OPTIONAL; + case 3: + case "LABEL_REPEATED": + return FieldDescriptorProto_Label.LABEL_REPEATED; + case 2: + case "LABEL_REQUIRED": + return FieldDescriptorProto_Label.LABEL_REQUIRED; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label"); + } +} +function fieldDescriptorProto_LabelToJSON(object) { + switch (object) { + case FieldDescriptorProto_Label.LABEL_OPTIONAL: + return "LABEL_OPTIONAL"; + case FieldDescriptorProto_Label.LABEL_REPEATED: + return "LABEL_REPEATED"; + case FieldDescriptorProto_Label.LABEL_REQUIRED: + return "LABEL_REQUIRED"; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label"); + } +} +/** Generated classes can be optimized for speed or code size. */ +var FileOptions_OptimizeMode; +(function (FileOptions_OptimizeMode) { + /** SPEED - Generate complete code for parsing, serialization, */ + FileOptions_OptimizeMode[FileOptions_OptimizeMode["SPEED"] = 1] = "SPEED"; + /** CODE_SIZE - etc. */ + FileOptions_OptimizeMode[FileOptions_OptimizeMode["CODE_SIZE"] = 2] = "CODE_SIZE"; + /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */ + FileOptions_OptimizeMode[FileOptions_OptimizeMode["LITE_RUNTIME"] = 3] = "LITE_RUNTIME"; +})(FileOptions_OptimizeMode || (exports.FileOptions_OptimizeMode = FileOptions_OptimizeMode = {})); +function fileOptions_OptimizeModeFromJSON(object) { + switch (object) { + case 1: + case "SPEED": + return FileOptions_OptimizeMode.SPEED; + case 2: + case "CODE_SIZE": + return FileOptions_OptimizeMode.CODE_SIZE; + case 3: + case "LITE_RUNTIME": + return FileOptions_OptimizeMode.LITE_RUNTIME; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode"); + } +} +function fileOptions_OptimizeModeToJSON(object) { + switch (object) { + case FileOptions_OptimizeMode.SPEED: + return "SPEED"; + case FileOptions_OptimizeMode.CODE_SIZE: + return "CODE_SIZE"; + case FileOptions_OptimizeMode.LITE_RUNTIME: + return "LITE_RUNTIME"; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode"); + } +} +var FieldOptions_CType; +(function (FieldOptions_CType) { + /** STRING - Default mode. */ + FieldOptions_CType[FieldOptions_CType["STRING"] = 0] = "STRING"; + /** + * CORD - The option [ctype=CORD] may be applied to a non-repeated field of type + * "bytes". It indicates that in C++, the data should be stored in a Cord + * instead of a string. For very large strings, this may reduce memory + * fragmentation. It may also allow better performance when parsing from a + * Cord, or when parsing with aliasing enabled, as the parsed Cord may then + * alias the original buffer. + */ + FieldOptions_CType[FieldOptions_CType["CORD"] = 1] = "CORD"; + FieldOptions_CType[FieldOptions_CType["STRING_PIECE"] = 2] = "STRING_PIECE"; +})(FieldOptions_CType || (exports.FieldOptions_CType = FieldOptions_CType = {})); +function fieldOptions_CTypeFromJSON(object) { + switch (object) { + case 0: + case "STRING": + return FieldOptions_CType.STRING; + case 1: + case "CORD": + return FieldOptions_CType.CORD; + case 2: + case "STRING_PIECE": + return FieldOptions_CType.STRING_PIECE; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType"); + } +} +function fieldOptions_CTypeToJSON(object) { + switch (object) { + case FieldOptions_CType.STRING: + return "STRING"; + case FieldOptions_CType.CORD: + return "CORD"; + case FieldOptions_CType.STRING_PIECE: + return "STRING_PIECE"; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType"); + } +} +var FieldOptions_JSType; +(function (FieldOptions_JSType) { + /** JS_NORMAL - Use the default type. */ + FieldOptions_JSType[FieldOptions_JSType["JS_NORMAL"] = 0] = "JS_NORMAL"; + /** JS_STRING - Use JavaScript strings. */ + FieldOptions_JSType[FieldOptions_JSType["JS_STRING"] = 1] = "JS_STRING"; + /** JS_NUMBER - Use JavaScript numbers. */ + FieldOptions_JSType[FieldOptions_JSType["JS_NUMBER"] = 2] = "JS_NUMBER"; +})(FieldOptions_JSType || (exports.FieldOptions_JSType = FieldOptions_JSType = {})); +function fieldOptions_JSTypeFromJSON(object) { + switch (object) { + case 0: + case "JS_NORMAL": + return FieldOptions_JSType.JS_NORMAL; + case 1: + case "JS_STRING": + return FieldOptions_JSType.JS_STRING; + case 2: + case "JS_NUMBER": + return FieldOptions_JSType.JS_NUMBER; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType"); + } +} +function fieldOptions_JSTypeToJSON(object) { + switch (object) { + case FieldOptions_JSType.JS_NORMAL: + return "JS_NORMAL"; + case FieldOptions_JSType.JS_STRING: + return "JS_STRING"; + case FieldOptions_JSType.JS_NUMBER: + return "JS_NUMBER"; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType"); + } +} +/** If set to RETENTION_SOURCE, the option will be omitted from the binary. */ +var FieldOptions_OptionRetention; +(function (FieldOptions_OptionRetention) { + FieldOptions_OptionRetention[FieldOptions_OptionRetention["RETENTION_UNKNOWN"] = 0] = "RETENTION_UNKNOWN"; + FieldOptions_OptionRetention[FieldOptions_OptionRetention["RETENTION_RUNTIME"] = 1] = "RETENTION_RUNTIME"; + FieldOptions_OptionRetention[FieldOptions_OptionRetention["RETENTION_SOURCE"] = 2] = "RETENTION_SOURCE"; +})(FieldOptions_OptionRetention || (exports.FieldOptions_OptionRetention = FieldOptions_OptionRetention = {})); +function fieldOptions_OptionRetentionFromJSON(object) { + switch (object) { + case 0: + case "RETENTION_UNKNOWN": + return FieldOptions_OptionRetention.RETENTION_UNKNOWN; + case 1: + case "RETENTION_RUNTIME": + return FieldOptions_OptionRetention.RETENTION_RUNTIME; + case 2: + case "RETENTION_SOURCE": + return FieldOptions_OptionRetention.RETENTION_SOURCE; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionRetention"); + } +} +function fieldOptions_OptionRetentionToJSON(object) { + switch (object) { + case FieldOptions_OptionRetention.RETENTION_UNKNOWN: + return "RETENTION_UNKNOWN"; + case FieldOptions_OptionRetention.RETENTION_RUNTIME: + return "RETENTION_RUNTIME"; + case FieldOptions_OptionRetention.RETENTION_SOURCE: + return "RETENTION_SOURCE"; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionRetention"); + } +} +/** + * This indicates the types of entities that the field may apply to when used + * as an option. If it is unset, then the field may be freely used as an + * option on any kind of entity. + */ +var FieldOptions_OptionTargetType; +(function (FieldOptions_OptionTargetType) { + FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_UNKNOWN"] = 0] = "TARGET_TYPE_UNKNOWN"; + FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_FILE"] = 1] = "TARGET_TYPE_FILE"; + FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_EXTENSION_RANGE"] = 2] = "TARGET_TYPE_EXTENSION_RANGE"; + FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_MESSAGE"] = 3] = "TARGET_TYPE_MESSAGE"; + FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_FIELD"] = 4] = "TARGET_TYPE_FIELD"; + FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_ONEOF"] = 5] = "TARGET_TYPE_ONEOF"; + FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_ENUM"] = 6] = "TARGET_TYPE_ENUM"; + FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_ENUM_ENTRY"] = 7] = "TARGET_TYPE_ENUM_ENTRY"; + FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_SERVICE"] = 8] = "TARGET_TYPE_SERVICE"; + FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_METHOD"] = 9] = "TARGET_TYPE_METHOD"; +})(FieldOptions_OptionTargetType || (exports.FieldOptions_OptionTargetType = FieldOptions_OptionTargetType = {})); +function fieldOptions_OptionTargetTypeFromJSON(object) { + switch (object) { + case 0: + case "TARGET_TYPE_UNKNOWN": + return FieldOptions_OptionTargetType.TARGET_TYPE_UNKNOWN; + case 1: + case "TARGET_TYPE_FILE": + return FieldOptions_OptionTargetType.TARGET_TYPE_FILE; + case 2: + case "TARGET_TYPE_EXTENSION_RANGE": + return FieldOptions_OptionTargetType.TARGET_TYPE_EXTENSION_RANGE; + case 3: + case "TARGET_TYPE_MESSAGE": + return FieldOptions_OptionTargetType.TARGET_TYPE_MESSAGE; + case 4: + case "TARGET_TYPE_FIELD": + return FieldOptions_OptionTargetType.TARGET_TYPE_FIELD; + case 5: + case "TARGET_TYPE_ONEOF": + return FieldOptions_OptionTargetType.TARGET_TYPE_ONEOF; + case 6: + case "TARGET_TYPE_ENUM": + return FieldOptions_OptionTargetType.TARGET_TYPE_ENUM; + case 7: + case "TARGET_TYPE_ENUM_ENTRY": + return FieldOptions_OptionTargetType.TARGET_TYPE_ENUM_ENTRY; + case 8: + case "TARGET_TYPE_SERVICE": + return FieldOptions_OptionTargetType.TARGET_TYPE_SERVICE; + case 9: + case "TARGET_TYPE_METHOD": + return FieldOptions_OptionTargetType.TARGET_TYPE_METHOD; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionTargetType"); + } +} +function fieldOptions_OptionTargetTypeToJSON(object) { + switch (object) { + case FieldOptions_OptionTargetType.TARGET_TYPE_UNKNOWN: + return "TARGET_TYPE_UNKNOWN"; + case FieldOptions_OptionTargetType.TARGET_TYPE_FILE: + return "TARGET_TYPE_FILE"; + case FieldOptions_OptionTargetType.TARGET_TYPE_EXTENSION_RANGE: + return "TARGET_TYPE_EXTENSION_RANGE"; + case FieldOptions_OptionTargetType.TARGET_TYPE_MESSAGE: + return "TARGET_TYPE_MESSAGE"; + case FieldOptions_OptionTargetType.TARGET_TYPE_FIELD: + return "TARGET_TYPE_FIELD"; + case FieldOptions_OptionTargetType.TARGET_TYPE_ONEOF: + return "TARGET_TYPE_ONEOF"; + case FieldOptions_OptionTargetType.TARGET_TYPE_ENUM: + return "TARGET_TYPE_ENUM"; + case FieldOptions_OptionTargetType.TARGET_TYPE_ENUM_ENTRY: + return "TARGET_TYPE_ENUM_ENTRY"; + case FieldOptions_OptionTargetType.TARGET_TYPE_SERVICE: + return "TARGET_TYPE_SERVICE"; + case FieldOptions_OptionTargetType.TARGET_TYPE_METHOD: + return "TARGET_TYPE_METHOD"; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionTargetType"); + } +} +/** + * Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + * or neither? HTTP based RPC implementation may choose GET verb for safe + * methods, and PUT verb for idempotent methods instead of the default POST. + */ +var MethodOptions_IdempotencyLevel; +(function (MethodOptions_IdempotencyLevel) { + MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENCY_UNKNOWN"] = 0] = "IDEMPOTENCY_UNKNOWN"; + /** NO_SIDE_EFFECTS - implies idempotent */ + MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["NO_SIDE_EFFECTS"] = 1] = "NO_SIDE_EFFECTS"; + /** IDEMPOTENT - idempotent, but may have side effects */ + MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENT"] = 2] = "IDEMPOTENT"; +})(MethodOptions_IdempotencyLevel || (exports.MethodOptions_IdempotencyLevel = MethodOptions_IdempotencyLevel = {})); +function methodOptions_IdempotencyLevelFromJSON(object) { + switch (object) { + case 0: + case "IDEMPOTENCY_UNKNOWN": + return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN; + case 1: + case "NO_SIDE_EFFECTS": + return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS; + case 2: + case "IDEMPOTENT": + return MethodOptions_IdempotencyLevel.IDEMPOTENT; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel"); + } +} +function methodOptions_IdempotencyLevelToJSON(object) { + switch (object) { + case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN: + return "IDEMPOTENCY_UNKNOWN"; + case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS: + return "NO_SIDE_EFFECTS"; + case MethodOptions_IdempotencyLevel.IDEMPOTENT: + return "IDEMPOTENT"; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel"); + } +} +var FeatureSet_FieldPresence; +(function (FeatureSet_FieldPresence) { + FeatureSet_FieldPresence[FeatureSet_FieldPresence["FIELD_PRESENCE_UNKNOWN"] = 0] = "FIELD_PRESENCE_UNKNOWN"; + FeatureSet_FieldPresence[FeatureSet_FieldPresence["EXPLICIT"] = 1] = "EXPLICIT"; + FeatureSet_FieldPresence[FeatureSet_FieldPresence["IMPLICIT"] = 2] = "IMPLICIT"; + FeatureSet_FieldPresence[FeatureSet_FieldPresence["LEGACY_REQUIRED"] = 3] = "LEGACY_REQUIRED"; +})(FeatureSet_FieldPresence || (exports.FeatureSet_FieldPresence = FeatureSet_FieldPresence = {})); +function featureSet_FieldPresenceFromJSON(object) { + switch (object) { + case 0: + case "FIELD_PRESENCE_UNKNOWN": + return FeatureSet_FieldPresence.FIELD_PRESENCE_UNKNOWN; + case 1: + case "EXPLICIT": + return FeatureSet_FieldPresence.EXPLICIT; + case 2: + case "IMPLICIT": + return FeatureSet_FieldPresence.IMPLICIT; + case 3: + case "LEGACY_REQUIRED": + return FeatureSet_FieldPresence.LEGACY_REQUIRED; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_FieldPresence"); + } +} +function featureSet_FieldPresenceToJSON(object) { + switch (object) { + case FeatureSet_FieldPresence.FIELD_PRESENCE_UNKNOWN: + return "FIELD_PRESENCE_UNKNOWN"; + case FeatureSet_FieldPresence.EXPLICIT: + return "EXPLICIT"; + case FeatureSet_FieldPresence.IMPLICIT: + return "IMPLICIT"; + case FeatureSet_FieldPresence.LEGACY_REQUIRED: + return "LEGACY_REQUIRED"; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_FieldPresence"); + } +} +var FeatureSet_EnumType; +(function (FeatureSet_EnumType) { + FeatureSet_EnumType[FeatureSet_EnumType["ENUM_TYPE_UNKNOWN"] = 0] = "ENUM_TYPE_UNKNOWN"; + FeatureSet_EnumType[FeatureSet_EnumType["OPEN"] = 1] = "OPEN"; + FeatureSet_EnumType[FeatureSet_EnumType["CLOSED"] = 2] = "CLOSED"; +})(FeatureSet_EnumType || (exports.FeatureSet_EnumType = FeatureSet_EnumType = {})); +function featureSet_EnumTypeFromJSON(object) { + switch (object) { + case 0: + case "ENUM_TYPE_UNKNOWN": + return FeatureSet_EnumType.ENUM_TYPE_UNKNOWN; + case 1: + case "OPEN": + return FeatureSet_EnumType.OPEN; + case 2: + case "CLOSED": + return FeatureSet_EnumType.CLOSED; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnumType"); + } +} +function featureSet_EnumTypeToJSON(object) { + switch (object) { + case FeatureSet_EnumType.ENUM_TYPE_UNKNOWN: + return "ENUM_TYPE_UNKNOWN"; + case FeatureSet_EnumType.OPEN: + return "OPEN"; + case FeatureSet_EnumType.CLOSED: + return "CLOSED"; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnumType"); + } +} +var FeatureSet_RepeatedFieldEncoding; +(function (FeatureSet_RepeatedFieldEncoding) { + FeatureSet_RepeatedFieldEncoding[FeatureSet_RepeatedFieldEncoding["REPEATED_FIELD_ENCODING_UNKNOWN"] = 0] = "REPEATED_FIELD_ENCODING_UNKNOWN"; + FeatureSet_RepeatedFieldEncoding[FeatureSet_RepeatedFieldEncoding["PACKED"] = 1] = "PACKED"; + FeatureSet_RepeatedFieldEncoding[FeatureSet_RepeatedFieldEncoding["EXPANDED"] = 2] = "EXPANDED"; +})(FeatureSet_RepeatedFieldEncoding || (exports.FeatureSet_RepeatedFieldEncoding = FeatureSet_RepeatedFieldEncoding = {})); +function featureSet_RepeatedFieldEncodingFromJSON(object) { + switch (object) { + case 0: + case "REPEATED_FIELD_ENCODING_UNKNOWN": + return FeatureSet_RepeatedFieldEncoding.REPEATED_FIELD_ENCODING_UNKNOWN; + case 1: + case "PACKED": + return FeatureSet_RepeatedFieldEncoding.PACKED; + case 2: + case "EXPANDED": + return FeatureSet_RepeatedFieldEncoding.EXPANDED; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_RepeatedFieldEncoding"); + } +} +function featureSet_RepeatedFieldEncodingToJSON(object) { + switch (object) { + case FeatureSet_RepeatedFieldEncoding.REPEATED_FIELD_ENCODING_UNKNOWN: + return "REPEATED_FIELD_ENCODING_UNKNOWN"; + case FeatureSet_RepeatedFieldEncoding.PACKED: + return "PACKED"; + case FeatureSet_RepeatedFieldEncoding.EXPANDED: + return "EXPANDED"; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_RepeatedFieldEncoding"); + } +} +var FeatureSet_Utf8Validation; +(function (FeatureSet_Utf8Validation) { + FeatureSet_Utf8Validation[FeatureSet_Utf8Validation["UTF8_VALIDATION_UNKNOWN"] = 0] = "UTF8_VALIDATION_UNKNOWN"; + FeatureSet_Utf8Validation[FeatureSet_Utf8Validation["VERIFY"] = 2] = "VERIFY"; + FeatureSet_Utf8Validation[FeatureSet_Utf8Validation["NONE"] = 3] = "NONE"; +})(FeatureSet_Utf8Validation || (exports.FeatureSet_Utf8Validation = FeatureSet_Utf8Validation = {})); +function featureSet_Utf8ValidationFromJSON(object) { + switch (object) { + case 0: + case "UTF8_VALIDATION_UNKNOWN": + return FeatureSet_Utf8Validation.UTF8_VALIDATION_UNKNOWN; + case 2: + case "VERIFY": + return FeatureSet_Utf8Validation.VERIFY; + case 3: + case "NONE": + return FeatureSet_Utf8Validation.NONE; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_Utf8Validation"); + } +} +function featureSet_Utf8ValidationToJSON(object) { + switch (object) { + case FeatureSet_Utf8Validation.UTF8_VALIDATION_UNKNOWN: + return "UTF8_VALIDATION_UNKNOWN"; + case FeatureSet_Utf8Validation.VERIFY: + return "VERIFY"; + case FeatureSet_Utf8Validation.NONE: + return "NONE"; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_Utf8Validation"); + } +} +var FeatureSet_MessageEncoding; +(function (FeatureSet_MessageEncoding) { + FeatureSet_MessageEncoding[FeatureSet_MessageEncoding["MESSAGE_ENCODING_UNKNOWN"] = 0] = "MESSAGE_ENCODING_UNKNOWN"; + FeatureSet_MessageEncoding[FeatureSet_MessageEncoding["LENGTH_PREFIXED"] = 1] = "LENGTH_PREFIXED"; + FeatureSet_MessageEncoding[FeatureSet_MessageEncoding["DELIMITED"] = 2] = "DELIMITED"; +})(FeatureSet_MessageEncoding || (exports.FeatureSet_MessageEncoding = FeatureSet_MessageEncoding = {})); +function featureSet_MessageEncodingFromJSON(object) { + switch (object) { + case 0: + case "MESSAGE_ENCODING_UNKNOWN": + return FeatureSet_MessageEncoding.MESSAGE_ENCODING_UNKNOWN; + case 1: + case "LENGTH_PREFIXED": + return FeatureSet_MessageEncoding.LENGTH_PREFIXED; + case 2: + case "DELIMITED": + return FeatureSet_MessageEncoding.DELIMITED; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_MessageEncoding"); + } +} +function featureSet_MessageEncodingToJSON(object) { + switch (object) { + case FeatureSet_MessageEncoding.MESSAGE_ENCODING_UNKNOWN: + return "MESSAGE_ENCODING_UNKNOWN"; + case FeatureSet_MessageEncoding.LENGTH_PREFIXED: + return "LENGTH_PREFIXED"; + case FeatureSet_MessageEncoding.DELIMITED: + return "DELIMITED"; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_MessageEncoding"); + } +} +var FeatureSet_JsonFormat; +(function (FeatureSet_JsonFormat) { + FeatureSet_JsonFormat[FeatureSet_JsonFormat["JSON_FORMAT_UNKNOWN"] = 0] = "JSON_FORMAT_UNKNOWN"; + FeatureSet_JsonFormat[FeatureSet_JsonFormat["ALLOW"] = 1] = "ALLOW"; + FeatureSet_JsonFormat[FeatureSet_JsonFormat["LEGACY_BEST_EFFORT"] = 2] = "LEGACY_BEST_EFFORT"; +})(FeatureSet_JsonFormat || (exports.FeatureSet_JsonFormat = FeatureSet_JsonFormat = {})); +function featureSet_JsonFormatFromJSON(object) { + switch (object) { + case 0: + case "JSON_FORMAT_UNKNOWN": + return FeatureSet_JsonFormat.JSON_FORMAT_UNKNOWN; + case 1: + case "ALLOW": + return FeatureSet_JsonFormat.ALLOW; + case 2: + case "LEGACY_BEST_EFFORT": + return FeatureSet_JsonFormat.LEGACY_BEST_EFFORT; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_JsonFormat"); + } +} +function featureSet_JsonFormatToJSON(object) { + switch (object) { + case FeatureSet_JsonFormat.JSON_FORMAT_UNKNOWN: + return "JSON_FORMAT_UNKNOWN"; + case FeatureSet_JsonFormat.ALLOW: + return "ALLOW"; + case FeatureSet_JsonFormat.LEGACY_BEST_EFFORT: + return "LEGACY_BEST_EFFORT"; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_JsonFormat"); + } +} +var FeatureSet_EnforceNamingStyle; +(function (FeatureSet_EnforceNamingStyle) { + FeatureSet_EnforceNamingStyle[FeatureSet_EnforceNamingStyle["ENFORCE_NAMING_STYLE_UNKNOWN"] = 0] = "ENFORCE_NAMING_STYLE_UNKNOWN"; + FeatureSet_EnforceNamingStyle[FeatureSet_EnforceNamingStyle["STYLE2024"] = 1] = "STYLE2024"; + FeatureSet_EnforceNamingStyle[FeatureSet_EnforceNamingStyle["STYLE_LEGACY"] = 2] = "STYLE_LEGACY"; +})(FeatureSet_EnforceNamingStyle || (exports.FeatureSet_EnforceNamingStyle = FeatureSet_EnforceNamingStyle = {})); +function featureSet_EnforceNamingStyleFromJSON(object) { + switch (object) { + case 0: + case "ENFORCE_NAMING_STYLE_UNKNOWN": + return FeatureSet_EnforceNamingStyle.ENFORCE_NAMING_STYLE_UNKNOWN; + case 1: + case "STYLE2024": + return FeatureSet_EnforceNamingStyle.STYLE2024; + case 2: + case "STYLE_LEGACY": + return FeatureSet_EnforceNamingStyle.STYLE_LEGACY; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnforceNamingStyle"); + } +} +function featureSet_EnforceNamingStyleToJSON(object) { + switch (object) { + case FeatureSet_EnforceNamingStyle.ENFORCE_NAMING_STYLE_UNKNOWN: + return "ENFORCE_NAMING_STYLE_UNKNOWN"; + case FeatureSet_EnforceNamingStyle.STYLE2024: + return "STYLE2024"; + case FeatureSet_EnforceNamingStyle.STYLE_LEGACY: + return "STYLE_LEGACY"; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnforceNamingStyle"); + } +} +/** + * Represents the identified object's effect on the element in the original + * .proto file. + */ +var GeneratedCodeInfo_Annotation_Semantic; +(function (GeneratedCodeInfo_Annotation_Semantic) { + /** NONE - There is no effect or the effect is indescribable. */ + GeneratedCodeInfo_Annotation_Semantic[GeneratedCodeInfo_Annotation_Semantic["NONE"] = 0] = "NONE"; + /** SET - The element is set or otherwise mutated. */ + GeneratedCodeInfo_Annotation_Semantic[GeneratedCodeInfo_Annotation_Semantic["SET"] = 1] = "SET"; + /** ALIAS - An alias to the element is returned. */ + GeneratedCodeInfo_Annotation_Semantic[GeneratedCodeInfo_Annotation_Semantic["ALIAS"] = 2] = "ALIAS"; +})(GeneratedCodeInfo_Annotation_Semantic || (exports.GeneratedCodeInfo_Annotation_Semantic = GeneratedCodeInfo_Annotation_Semantic = {})); +function generatedCodeInfo_Annotation_SemanticFromJSON(object) { + switch (object) { + case 0: + case "NONE": + return GeneratedCodeInfo_Annotation_Semantic.NONE; + case 1: + case "SET": + return GeneratedCodeInfo_Annotation_Semantic.SET; + case 2: + case "ALIAS": + return GeneratedCodeInfo_Annotation_Semantic.ALIAS; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum GeneratedCodeInfo_Annotation_Semantic"); + } +} +function generatedCodeInfo_Annotation_SemanticToJSON(object) { + switch (object) { + case GeneratedCodeInfo_Annotation_Semantic.NONE: + return "NONE"; + case GeneratedCodeInfo_Annotation_Semantic.SET: + return "SET"; + case GeneratedCodeInfo_Annotation_Semantic.ALIAS: + return "ALIAS"; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum GeneratedCodeInfo_Annotation_Semantic"); + } +} +exports.FileDescriptorSet = { + fromJSON(object) { + return { + file: globalThis.Array.isArray(object?.file) ? object.file.map((e) => exports.FileDescriptorProto.fromJSON(e)) : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.file?.length) { + obj.file = message.file.map((e) => exports.FileDescriptorProto.toJSON(e)); + } + return obj; + }, +}; +exports.FileDescriptorProto = { + fromJSON(object) { + return { + name: isSet(object.name) ? globalThis.String(object.name) : "", + package: isSet(object.package) ? globalThis.String(object.package) : "", + dependency: globalThis.Array.isArray(object?.dependency) + ? object.dependency.map((e) => globalThis.String(e)) + : [], + publicDependency: globalThis.Array.isArray(object?.publicDependency) + ? object.publicDependency.map((e) => globalThis.Number(e)) + : [], + weakDependency: globalThis.Array.isArray(object?.weakDependency) + ? object.weakDependency.map((e) => globalThis.Number(e)) + : [], + messageType: globalThis.Array.isArray(object?.messageType) + ? object.messageType.map((e) => exports.DescriptorProto.fromJSON(e)) + : [], + enumType: globalThis.Array.isArray(object?.enumType) + ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e)) + : [], + service: globalThis.Array.isArray(object?.service) + ? object.service.map((e) => exports.ServiceDescriptorProto.fromJSON(e)) + : [], + extension: globalThis.Array.isArray(object?.extension) + ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? exports.FileOptions.fromJSON(object.options) : undefined, + sourceCodeInfo: isSet(object.sourceCodeInfo) ? exports.SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined, + syntax: isSet(object.syntax) ? globalThis.String(object.syntax) : "", + edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0, + }; + }, + toJSON(message) { + const obj = {}; + if (message.name !== undefined && message.name !== "") { + obj.name = message.name; + } + if (message.package !== undefined && message.package !== "") { + obj.package = message.package; + } + if (message.dependency?.length) { + obj.dependency = message.dependency; + } + if (message.publicDependency?.length) { + obj.publicDependency = message.publicDependency.map((e) => Math.round(e)); + } + if (message.weakDependency?.length) { + obj.weakDependency = message.weakDependency.map((e) => Math.round(e)); + } + if (message.messageType?.length) { + obj.messageType = message.messageType.map((e) => exports.DescriptorProto.toJSON(e)); + } + if (message.enumType?.length) { + obj.enumType = message.enumType.map((e) => exports.EnumDescriptorProto.toJSON(e)); + } + if (message.service?.length) { + obj.service = message.service.map((e) => exports.ServiceDescriptorProto.toJSON(e)); + } + if (message.extension?.length) { + obj.extension = message.extension.map((e) => exports.FieldDescriptorProto.toJSON(e)); + } + if (message.options !== undefined) { + obj.options = exports.FileOptions.toJSON(message.options); + } + if (message.sourceCodeInfo !== undefined) { + obj.sourceCodeInfo = exports.SourceCodeInfo.toJSON(message.sourceCodeInfo); + } + if (message.syntax !== undefined && message.syntax !== "") { + obj.syntax = message.syntax; + } + if (message.edition !== undefined && message.edition !== 0) { + obj.edition = editionToJSON(message.edition); + } + return obj; + }, +}; +exports.DescriptorProto = { + fromJSON(object) { + return { + name: isSet(object.name) ? globalThis.String(object.name) : "", + field: globalThis.Array.isArray(object?.field) + ? object.field.map((e) => exports.FieldDescriptorProto.fromJSON(e)) + : [], + extension: globalThis.Array.isArray(object?.extension) + ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e)) + : [], + nestedType: globalThis.Array.isArray(object?.nestedType) + ? object.nestedType.map((e) => exports.DescriptorProto.fromJSON(e)) + : [], + enumType: globalThis.Array.isArray(object?.enumType) + ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e)) + : [], + extensionRange: globalThis.Array.isArray(object?.extensionRange) + ? object.extensionRange.map((e) => exports.DescriptorProto_ExtensionRange.fromJSON(e)) + : [], + oneofDecl: globalThis.Array.isArray(object?.oneofDecl) + ? object.oneofDecl.map((e) => exports.OneofDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? exports.MessageOptions.fromJSON(object.options) : undefined, + reservedRange: globalThis.Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e) => exports.DescriptorProto_ReservedRange.fromJSON(e)) + : [], + reservedName: globalThis.Array.isArray(object?.reservedName) + ? object.reservedName.map((e) => globalThis.String(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.name !== undefined && message.name !== "") { + obj.name = message.name; + } + if (message.field?.length) { + obj.field = message.field.map((e) => exports.FieldDescriptorProto.toJSON(e)); + } + if (message.extension?.length) { + obj.extension = message.extension.map((e) => exports.FieldDescriptorProto.toJSON(e)); + } + if (message.nestedType?.length) { + obj.nestedType = message.nestedType.map((e) => exports.DescriptorProto.toJSON(e)); + } + if (message.enumType?.length) { + obj.enumType = message.enumType.map((e) => exports.EnumDescriptorProto.toJSON(e)); + } + if (message.extensionRange?.length) { + obj.extensionRange = message.extensionRange.map((e) => exports.DescriptorProto_ExtensionRange.toJSON(e)); + } + if (message.oneofDecl?.length) { + obj.oneofDecl = message.oneofDecl.map((e) => exports.OneofDescriptorProto.toJSON(e)); + } + if (message.options !== undefined) { + obj.options = exports.MessageOptions.toJSON(message.options); + } + if (message.reservedRange?.length) { + obj.reservedRange = message.reservedRange.map((e) => exports.DescriptorProto_ReservedRange.toJSON(e)); + } + if (message.reservedName?.length) { + obj.reservedName = message.reservedName; + } + return obj; + }, +}; +exports.DescriptorProto_ExtensionRange = { + fromJSON(object) { + return { + start: isSet(object.start) ? globalThis.Number(object.start) : 0, + end: isSet(object.end) ? globalThis.Number(object.end) : 0, + options: isSet(object.options) ? exports.ExtensionRangeOptions.fromJSON(object.options) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + if (message.start !== undefined && message.start !== 0) { + obj.start = Math.round(message.start); + } + if (message.end !== undefined && message.end !== 0) { + obj.end = Math.round(message.end); + } + if (message.options !== undefined) { + obj.options = exports.ExtensionRangeOptions.toJSON(message.options); + } + return obj; + }, +}; +exports.DescriptorProto_ReservedRange = { + fromJSON(object) { + return { + start: isSet(object.start) ? globalThis.Number(object.start) : 0, + end: isSet(object.end) ? globalThis.Number(object.end) : 0, + }; + }, + toJSON(message) { + const obj = {}; + if (message.start !== undefined && message.start !== 0) { + obj.start = Math.round(message.start); + } + if (message.end !== undefined && message.end !== 0) { + obj.end = Math.round(message.end); + } + return obj; + }, +}; +exports.ExtensionRangeOptions = { + fromJSON(object) { + return { + uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + declaration: globalThis.Array.isArray(object?.declaration) + ? object.declaration.map((e) => exports.ExtensionRangeOptions_Declaration.fromJSON(e)) + : [], + features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined, + verification: isSet(object.verification) + ? extensionRangeOptions_VerificationStateFromJSON(object.verification) + : 1, + }; + }, + toJSON(message) { + const obj = {}; + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e)); + } + if (message.declaration?.length) { + obj.declaration = message.declaration.map((e) => exports.ExtensionRangeOptions_Declaration.toJSON(e)); + } + if (message.features !== undefined) { + obj.features = exports.FeatureSet.toJSON(message.features); + } + if (message.verification !== undefined && message.verification !== 1) { + obj.verification = extensionRangeOptions_VerificationStateToJSON(message.verification); + } + return obj; + }, +}; +exports.ExtensionRangeOptions_Declaration = { + fromJSON(object) { + return { + number: isSet(object.number) ? globalThis.Number(object.number) : 0, + fullName: isSet(object.fullName) ? globalThis.String(object.fullName) : "", + type: isSet(object.type) ? globalThis.String(object.type) : "", + reserved: isSet(object.reserved) ? globalThis.Boolean(object.reserved) : false, + repeated: isSet(object.repeated) ? globalThis.Boolean(object.repeated) : false, + }; + }, + toJSON(message) { + const obj = {}; + if (message.number !== undefined && message.number !== 0) { + obj.number = Math.round(message.number); + } + if (message.fullName !== undefined && message.fullName !== "") { + obj.fullName = message.fullName; + } + if (message.type !== undefined && message.type !== "") { + obj.type = message.type; + } + if (message.reserved !== undefined && message.reserved !== false) { + obj.reserved = message.reserved; + } + if (message.repeated !== undefined && message.repeated !== false) { + obj.repeated = message.repeated; + } + return obj; + }, +}; +exports.FieldDescriptorProto = { + fromJSON(object) { + return { + name: isSet(object.name) ? globalThis.String(object.name) : "", + number: isSet(object.number) ? globalThis.Number(object.number) : 0, + label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1, + type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1, + typeName: isSet(object.typeName) ? globalThis.String(object.typeName) : "", + extendee: isSet(object.extendee) ? globalThis.String(object.extendee) : "", + defaultValue: isSet(object.defaultValue) ? globalThis.String(object.defaultValue) : "", + oneofIndex: isSet(object.oneofIndex) ? globalThis.Number(object.oneofIndex) : 0, + jsonName: isSet(object.jsonName) ? globalThis.String(object.jsonName) : "", + options: isSet(object.options) ? exports.FieldOptions.fromJSON(object.options) : undefined, + proto3Optional: isSet(object.proto3Optional) ? globalThis.Boolean(object.proto3Optional) : false, + }; + }, + toJSON(message) { + const obj = {}; + if (message.name !== undefined && message.name !== "") { + obj.name = message.name; + } + if (message.number !== undefined && message.number !== 0) { + obj.number = Math.round(message.number); + } + if (message.label !== undefined && message.label !== 1) { + obj.label = fieldDescriptorProto_LabelToJSON(message.label); + } + if (message.type !== undefined && message.type !== 1) { + obj.type = fieldDescriptorProto_TypeToJSON(message.type); + } + if (message.typeName !== undefined && message.typeName !== "") { + obj.typeName = message.typeName; + } + if (message.extendee !== undefined && message.extendee !== "") { + obj.extendee = message.extendee; + } + if (message.defaultValue !== undefined && message.defaultValue !== "") { + obj.defaultValue = message.defaultValue; + } + if (message.oneofIndex !== undefined && message.oneofIndex !== 0) { + obj.oneofIndex = Math.round(message.oneofIndex); + } + if (message.jsonName !== undefined && message.jsonName !== "") { + obj.jsonName = message.jsonName; + } + if (message.options !== undefined) { + obj.options = exports.FieldOptions.toJSON(message.options); + } + if (message.proto3Optional !== undefined && message.proto3Optional !== false) { + obj.proto3Optional = message.proto3Optional; + } + return obj; + }, +}; +exports.OneofDescriptorProto = { + fromJSON(object) { + return { + name: isSet(object.name) ? globalThis.String(object.name) : "", + options: isSet(object.options) ? exports.OneofOptions.fromJSON(object.options) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + if (message.name !== undefined && message.name !== "") { + obj.name = message.name; + } + if (message.options !== undefined) { + obj.options = exports.OneofOptions.toJSON(message.options); + } + return obj; + }, +}; +exports.EnumDescriptorProto = { + fromJSON(object) { + return { + name: isSet(object.name) ? globalThis.String(object.name) : "", + value: globalThis.Array.isArray(object?.value) + ? object.value.map((e) => exports.EnumValueDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? exports.EnumOptions.fromJSON(object.options) : undefined, + reservedRange: globalThis.Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e) => exports.EnumDescriptorProto_EnumReservedRange.fromJSON(e)) + : [], + reservedName: globalThis.Array.isArray(object?.reservedName) + ? object.reservedName.map((e) => globalThis.String(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.name !== undefined && message.name !== "") { + obj.name = message.name; + } + if (message.value?.length) { + obj.value = message.value.map((e) => exports.EnumValueDescriptorProto.toJSON(e)); + } + if (message.options !== undefined) { + obj.options = exports.EnumOptions.toJSON(message.options); + } + if (message.reservedRange?.length) { + obj.reservedRange = message.reservedRange.map((e) => exports.EnumDescriptorProto_EnumReservedRange.toJSON(e)); + } + if (message.reservedName?.length) { + obj.reservedName = message.reservedName; + } + return obj; + }, +}; +exports.EnumDescriptorProto_EnumReservedRange = { + fromJSON(object) { + return { + start: isSet(object.start) ? globalThis.Number(object.start) : 0, + end: isSet(object.end) ? globalThis.Number(object.end) : 0, + }; + }, + toJSON(message) { + const obj = {}; + if (message.start !== undefined && message.start !== 0) { + obj.start = Math.round(message.start); + } + if (message.end !== undefined && message.end !== 0) { + obj.end = Math.round(message.end); + } + return obj; + }, +}; +exports.EnumValueDescriptorProto = { + fromJSON(object) { + return { + name: isSet(object.name) ? globalThis.String(object.name) : "", + number: isSet(object.number) ? globalThis.Number(object.number) : 0, + options: isSet(object.options) ? exports.EnumValueOptions.fromJSON(object.options) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + if (message.name !== undefined && message.name !== "") { + obj.name = message.name; + } + if (message.number !== undefined && message.number !== 0) { + obj.number = Math.round(message.number); + } + if (message.options !== undefined) { + obj.options = exports.EnumValueOptions.toJSON(message.options); + } + return obj; + }, +}; +exports.ServiceDescriptorProto = { + fromJSON(object) { + return { + name: isSet(object.name) ? globalThis.String(object.name) : "", + method: globalThis.Array.isArray(object?.method) + ? object.method.map((e) => exports.MethodDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) ? exports.ServiceOptions.fromJSON(object.options) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + if (message.name !== undefined && message.name !== "") { + obj.name = message.name; + } + if (message.method?.length) { + obj.method = message.method.map((e) => exports.MethodDescriptorProto.toJSON(e)); + } + if (message.options !== undefined) { + obj.options = exports.ServiceOptions.toJSON(message.options); + } + return obj; + }, +}; +exports.MethodDescriptorProto = { + fromJSON(object) { + return { + name: isSet(object.name) ? globalThis.String(object.name) : "", + inputType: isSet(object.inputType) ? globalThis.String(object.inputType) : "", + outputType: isSet(object.outputType) ? globalThis.String(object.outputType) : "", + options: isSet(object.options) ? exports.MethodOptions.fromJSON(object.options) : undefined, + clientStreaming: isSet(object.clientStreaming) ? globalThis.Boolean(object.clientStreaming) : false, + serverStreaming: isSet(object.serverStreaming) ? globalThis.Boolean(object.serverStreaming) : false, + }; + }, + toJSON(message) { + const obj = {}; + if (message.name !== undefined && message.name !== "") { + obj.name = message.name; + } + if (message.inputType !== undefined && message.inputType !== "") { + obj.inputType = message.inputType; + } + if (message.outputType !== undefined && message.outputType !== "") { + obj.outputType = message.outputType; + } + if (message.options !== undefined) { + obj.options = exports.MethodOptions.toJSON(message.options); + } + if (message.clientStreaming !== undefined && message.clientStreaming !== false) { + obj.clientStreaming = message.clientStreaming; + } + if (message.serverStreaming !== undefined && message.serverStreaming !== false) { + obj.serverStreaming = message.serverStreaming; + } + return obj; + }, +}; +exports.FileOptions = { + fromJSON(object) { + return { + javaPackage: isSet(object.javaPackage) ? globalThis.String(object.javaPackage) : "", + javaOuterClassname: isSet(object.javaOuterClassname) ? globalThis.String(object.javaOuterClassname) : "", + javaMultipleFiles: isSet(object.javaMultipleFiles) ? globalThis.Boolean(object.javaMultipleFiles) : false, + javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash) + ? globalThis.Boolean(object.javaGenerateEqualsAndHash) + : false, + javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? globalThis.Boolean(object.javaStringCheckUtf8) : false, + optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1, + goPackage: isSet(object.goPackage) ? globalThis.String(object.goPackage) : "", + ccGenericServices: isSet(object.ccGenericServices) ? globalThis.Boolean(object.ccGenericServices) : false, + javaGenericServices: isSet(object.javaGenericServices) ? globalThis.Boolean(object.javaGenericServices) : false, + pyGenericServices: isSet(object.pyGenericServices) ? globalThis.Boolean(object.pyGenericServices) : false, + deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false, + ccEnableArenas: isSet(object.ccEnableArenas) ? globalThis.Boolean(object.ccEnableArenas) : true, + objcClassPrefix: isSet(object.objcClassPrefix) ? globalThis.String(object.objcClassPrefix) : "", + csharpNamespace: isSet(object.csharpNamespace) ? globalThis.String(object.csharpNamespace) : "", + swiftPrefix: isSet(object.swiftPrefix) ? globalThis.String(object.swiftPrefix) : "", + phpClassPrefix: isSet(object.phpClassPrefix) ? globalThis.String(object.phpClassPrefix) : "", + phpNamespace: isSet(object.phpNamespace) ? globalThis.String(object.phpNamespace) : "", + phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? globalThis.String(object.phpMetadataNamespace) : "", + rubyPackage: isSet(object.rubyPackage) ? globalThis.String(object.rubyPackage) : "", + features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined, + uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.javaPackage !== undefined && message.javaPackage !== "") { + obj.javaPackage = message.javaPackage; + } + if (message.javaOuterClassname !== undefined && message.javaOuterClassname !== "") { + obj.javaOuterClassname = message.javaOuterClassname; + } + if (message.javaMultipleFiles !== undefined && message.javaMultipleFiles !== false) { + obj.javaMultipleFiles = message.javaMultipleFiles; + } + if (message.javaGenerateEqualsAndHash !== undefined && message.javaGenerateEqualsAndHash !== false) { + obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash; + } + if (message.javaStringCheckUtf8 !== undefined && message.javaStringCheckUtf8 !== false) { + obj.javaStringCheckUtf8 = message.javaStringCheckUtf8; + } + if (message.optimizeFor !== undefined && message.optimizeFor !== 1) { + obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor); + } + if (message.goPackage !== undefined && message.goPackage !== "") { + obj.goPackage = message.goPackage; + } + if (message.ccGenericServices !== undefined && message.ccGenericServices !== false) { + obj.ccGenericServices = message.ccGenericServices; + } + if (message.javaGenericServices !== undefined && message.javaGenericServices !== false) { + obj.javaGenericServices = message.javaGenericServices; + } + if (message.pyGenericServices !== undefined && message.pyGenericServices !== false) { + obj.pyGenericServices = message.pyGenericServices; + } + if (message.deprecated !== undefined && message.deprecated !== false) { + obj.deprecated = message.deprecated; + } + if (message.ccEnableArenas !== undefined && message.ccEnableArenas !== true) { + obj.ccEnableArenas = message.ccEnableArenas; + } + if (message.objcClassPrefix !== undefined && message.objcClassPrefix !== "") { + obj.objcClassPrefix = message.objcClassPrefix; + } + if (message.csharpNamespace !== undefined && message.csharpNamespace !== "") { + obj.csharpNamespace = message.csharpNamespace; + } + if (message.swiftPrefix !== undefined && message.swiftPrefix !== "") { + obj.swiftPrefix = message.swiftPrefix; + } + if (message.phpClassPrefix !== undefined && message.phpClassPrefix !== "") { + obj.phpClassPrefix = message.phpClassPrefix; + } + if (message.phpNamespace !== undefined && message.phpNamespace !== "") { + obj.phpNamespace = message.phpNamespace; + } + if (message.phpMetadataNamespace !== undefined && message.phpMetadataNamespace !== "") { + obj.phpMetadataNamespace = message.phpMetadataNamespace; + } + if (message.rubyPackage !== undefined && message.rubyPackage !== "") { + obj.rubyPackage = message.rubyPackage; + } + if (message.features !== undefined) { + obj.features = exports.FeatureSet.toJSON(message.features); + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e)); + } + return obj; + }, +}; +exports.MessageOptions = { + fromJSON(object) { + return { + messageSetWireFormat: isSet(object.messageSetWireFormat) + ? globalThis.Boolean(object.messageSetWireFormat) + : false, + noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor) + ? globalThis.Boolean(object.noStandardDescriptorAccessor) + : false, + deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false, + mapEntry: isSet(object.mapEntry) ? globalThis.Boolean(object.mapEntry) : false, + deprecatedLegacyJsonFieldConflicts: isSet(object.deprecatedLegacyJsonFieldConflicts) + ? globalThis.Boolean(object.deprecatedLegacyJsonFieldConflicts) + : false, + features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined, + uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.messageSetWireFormat !== undefined && message.messageSetWireFormat !== false) { + obj.messageSetWireFormat = message.messageSetWireFormat; + } + if (message.noStandardDescriptorAccessor !== undefined && message.noStandardDescriptorAccessor !== false) { + obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor; + } + if (message.deprecated !== undefined && message.deprecated !== false) { + obj.deprecated = message.deprecated; + } + if (message.mapEntry !== undefined && message.mapEntry !== false) { + obj.mapEntry = message.mapEntry; + } + if (message.deprecatedLegacyJsonFieldConflicts !== undefined && message.deprecatedLegacyJsonFieldConflicts !== false) { + obj.deprecatedLegacyJsonFieldConflicts = message.deprecatedLegacyJsonFieldConflicts; + } + if (message.features !== undefined) { + obj.features = exports.FeatureSet.toJSON(message.features); + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e)); + } + return obj; + }, +}; +exports.FieldOptions = { + fromJSON(object) { + return { + ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0, + packed: isSet(object.packed) ? globalThis.Boolean(object.packed) : false, + jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0, + lazy: isSet(object.lazy) ? globalThis.Boolean(object.lazy) : false, + unverifiedLazy: isSet(object.unverifiedLazy) ? globalThis.Boolean(object.unverifiedLazy) : false, + deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false, + weak: isSet(object.weak) ? globalThis.Boolean(object.weak) : false, + debugRedact: isSet(object.debugRedact) ? globalThis.Boolean(object.debugRedact) : false, + retention: isSet(object.retention) ? fieldOptions_OptionRetentionFromJSON(object.retention) : 0, + targets: globalThis.Array.isArray(object?.targets) + ? object.targets.map((e) => fieldOptions_OptionTargetTypeFromJSON(e)) + : [], + editionDefaults: globalThis.Array.isArray(object?.editionDefaults) + ? object.editionDefaults.map((e) => exports.FieldOptions_EditionDefault.fromJSON(e)) + : [], + features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined, + featureSupport: isSet(object.featureSupport) + ? exports.FieldOptions_FeatureSupport.fromJSON(object.featureSupport) + : undefined, + uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.ctype !== undefined && message.ctype !== 0) { + obj.ctype = fieldOptions_CTypeToJSON(message.ctype); + } + if (message.packed !== undefined && message.packed !== false) { + obj.packed = message.packed; + } + if (message.jstype !== undefined && message.jstype !== 0) { + obj.jstype = fieldOptions_JSTypeToJSON(message.jstype); + } + if (message.lazy !== undefined && message.lazy !== false) { + obj.lazy = message.lazy; + } + if (message.unverifiedLazy !== undefined && message.unverifiedLazy !== false) { + obj.unverifiedLazy = message.unverifiedLazy; + } + if (message.deprecated !== undefined && message.deprecated !== false) { + obj.deprecated = message.deprecated; + } + if (message.weak !== undefined && message.weak !== false) { + obj.weak = message.weak; + } + if (message.debugRedact !== undefined && message.debugRedact !== false) { + obj.debugRedact = message.debugRedact; + } + if (message.retention !== undefined && message.retention !== 0) { + obj.retention = fieldOptions_OptionRetentionToJSON(message.retention); + } + if (message.targets?.length) { + obj.targets = message.targets.map((e) => fieldOptions_OptionTargetTypeToJSON(e)); + } + if (message.editionDefaults?.length) { + obj.editionDefaults = message.editionDefaults.map((e) => exports.FieldOptions_EditionDefault.toJSON(e)); + } + if (message.features !== undefined) { + obj.features = exports.FeatureSet.toJSON(message.features); + } + if (message.featureSupport !== undefined) { + obj.featureSupport = exports.FieldOptions_FeatureSupport.toJSON(message.featureSupport); + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e)); + } + return obj; + }, +}; +exports.FieldOptions_EditionDefault = { + fromJSON(object) { + return { + edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0, + value: isSet(object.value) ? globalThis.String(object.value) : "", + }; + }, + toJSON(message) { + const obj = {}; + if (message.edition !== undefined && message.edition !== 0) { + obj.edition = editionToJSON(message.edition); + } + if (message.value !== undefined && message.value !== "") { + obj.value = message.value; + } + return obj; + }, +}; +exports.FieldOptions_FeatureSupport = { + fromJSON(object) { + return { + editionIntroduced: isSet(object.editionIntroduced) ? editionFromJSON(object.editionIntroduced) : 0, + editionDeprecated: isSet(object.editionDeprecated) ? editionFromJSON(object.editionDeprecated) : 0, + deprecationWarning: isSet(object.deprecationWarning) ? globalThis.String(object.deprecationWarning) : "", + editionRemoved: isSet(object.editionRemoved) ? editionFromJSON(object.editionRemoved) : 0, + }; + }, + toJSON(message) { + const obj = {}; + if (message.editionIntroduced !== undefined && message.editionIntroduced !== 0) { + obj.editionIntroduced = editionToJSON(message.editionIntroduced); + } + if (message.editionDeprecated !== undefined && message.editionDeprecated !== 0) { + obj.editionDeprecated = editionToJSON(message.editionDeprecated); + } + if (message.deprecationWarning !== undefined && message.deprecationWarning !== "") { + obj.deprecationWarning = message.deprecationWarning; + } + if (message.editionRemoved !== undefined && message.editionRemoved !== 0) { + obj.editionRemoved = editionToJSON(message.editionRemoved); + } + return obj; + }, +}; +exports.OneofOptions = { + fromJSON(object) { + return { + features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined, + uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.features !== undefined) { + obj.features = exports.FeatureSet.toJSON(message.features); + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e)); + } + return obj; + }, +}; +exports.EnumOptions = { + fromJSON(object) { + return { + allowAlias: isSet(object.allowAlias) ? globalThis.Boolean(object.allowAlias) : false, + deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false, + deprecatedLegacyJsonFieldConflicts: isSet(object.deprecatedLegacyJsonFieldConflicts) + ? globalThis.Boolean(object.deprecatedLegacyJsonFieldConflicts) + : false, + features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined, + uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.allowAlias !== undefined && message.allowAlias !== false) { + obj.allowAlias = message.allowAlias; + } + if (message.deprecated !== undefined && message.deprecated !== false) { + obj.deprecated = message.deprecated; + } + if (message.deprecatedLegacyJsonFieldConflicts !== undefined && message.deprecatedLegacyJsonFieldConflicts !== false) { + obj.deprecatedLegacyJsonFieldConflicts = message.deprecatedLegacyJsonFieldConflicts; + } + if (message.features !== undefined) { + obj.features = exports.FeatureSet.toJSON(message.features); + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e)); + } + return obj; + }, +}; +exports.EnumValueOptions = { + fromJSON(object) { + return { + deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false, + features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined, + debugRedact: isSet(object.debugRedact) ? globalThis.Boolean(object.debugRedact) : false, + featureSupport: isSet(object.featureSupport) + ? exports.FieldOptions_FeatureSupport.fromJSON(object.featureSupport) + : undefined, + uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.deprecated !== undefined && message.deprecated !== false) { + obj.deprecated = message.deprecated; + } + if (message.features !== undefined) { + obj.features = exports.FeatureSet.toJSON(message.features); + } + if (message.debugRedact !== undefined && message.debugRedact !== false) { + obj.debugRedact = message.debugRedact; + } + if (message.featureSupport !== undefined) { + obj.featureSupport = exports.FieldOptions_FeatureSupport.toJSON(message.featureSupport); + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e)); + } + return obj; + }, +}; +exports.ServiceOptions = { + fromJSON(object) { + return { + features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined, + deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false, + uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.features !== undefined) { + obj.features = exports.FeatureSet.toJSON(message.features); + } + if (message.deprecated !== undefined && message.deprecated !== false) { + obj.deprecated = message.deprecated; + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e)); + } + return obj; + }, +}; +exports.MethodOptions = { + fromJSON(object) { + return { + deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false, + idempotencyLevel: isSet(object.idempotencyLevel) + ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel) + : 0, + features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined, + uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.deprecated !== undefined && message.deprecated !== false) { + obj.deprecated = message.deprecated; + } + if (message.idempotencyLevel !== undefined && message.idempotencyLevel !== 0) { + obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel); + } + if (message.features !== undefined) { + obj.features = exports.FeatureSet.toJSON(message.features); + } + if (message.uninterpretedOption?.length) { + obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e)); + } + return obj; + }, +}; +exports.UninterpretedOption = { + fromJSON(object) { + return { + name: globalThis.Array.isArray(object?.name) + ? object.name.map((e) => exports.UninterpretedOption_NamePart.fromJSON(e)) + : [], + identifierValue: isSet(object.identifierValue) ? globalThis.String(object.identifierValue) : "", + positiveIntValue: isSet(object.positiveIntValue) ? globalThis.String(object.positiveIntValue) : "0", + negativeIntValue: isSet(object.negativeIntValue) ? globalThis.String(object.negativeIntValue) : "0", + doubleValue: isSet(object.doubleValue) ? globalThis.Number(object.doubleValue) : 0, + stringValue: isSet(object.stringValue) ? Buffer.from(bytesFromBase64(object.stringValue)) : Buffer.alloc(0), + aggregateValue: isSet(object.aggregateValue) ? globalThis.String(object.aggregateValue) : "", + }; + }, + toJSON(message) { + const obj = {}; + if (message.name?.length) { + obj.name = message.name.map((e) => exports.UninterpretedOption_NamePart.toJSON(e)); + } + if (message.identifierValue !== undefined && message.identifierValue !== "") { + obj.identifierValue = message.identifierValue; + } + if (message.positiveIntValue !== undefined && message.positiveIntValue !== "0") { + obj.positiveIntValue = message.positiveIntValue; + } + if (message.negativeIntValue !== undefined && message.negativeIntValue !== "0") { + obj.negativeIntValue = message.negativeIntValue; + } + if (message.doubleValue !== undefined && message.doubleValue !== 0) { + obj.doubleValue = message.doubleValue; + } + if (message.stringValue !== undefined && message.stringValue.length !== 0) { + obj.stringValue = base64FromBytes(message.stringValue); + } + if (message.aggregateValue !== undefined && message.aggregateValue !== "") { + obj.aggregateValue = message.aggregateValue; + } + return obj; + }, +}; +exports.UninterpretedOption_NamePart = { + fromJSON(object) { + return { + namePart: isSet(object.namePart) ? globalThis.String(object.namePart) : "", + isExtension: isSet(object.isExtension) ? globalThis.Boolean(object.isExtension) : false, + }; + }, + toJSON(message) { + const obj = {}; + if (message.namePart !== "") { + obj.namePart = message.namePart; + } + if (message.isExtension !== false) { + obj.isExtension = message.isExtension; + } + return obj; + }, +}; +exports.FeatureSet = { + fromJSON(object) { + return { + fieldPresence: isSet(object.fieldPresence) ? featureSet_FieldPresenceFromJSON(object.fieldPresence) : 0, + enumType: isSet(object.enumType) ? featureSet_EnumTypeFromJSON(object.enumType) : 0, + repeatedFieldEncoding: isSet(object.repeatedFieldEncoding) + ? featureSet_RepeatedFieldEncodingFromJSON(object.repeatedFieldEncoding) + : 0, + utf8Validation: isSet(object.utf8Validation) ? featureSet_Utf8ValidationFromJSON(object.utf8Validation) : 0, + messageEncoding: isSet(object.messageEncoding) ? featureSet_MessageEncodingFromJSON(object.messageEncoding) : 0, + jsonFormat: isSet(object.jsonFormat) ? featureSet_JsonFormatFromJSON(object.jsonFormat) : 0, + enforceNamingStyle: isSet(object.enforceNamingStyle) + ? featureSet_EnforceNamingStyleFromJSON(object.enforceNamingStyle) + : 0, + }; + }, + toJSON(message) { + const obj = {}; + if (message.fieldPresence !== undefined && message.fieldPresence !== 0) { + obj.fieldPresence = featureSet_FieldPresenceToJSON(message.fieldPresence); + } + if (message.enumType !== undefined && message.enumType !== 0) { + obj.enumType = featureSet_EnumTypeToJSON(message.enumType); + } + if (message.repeatedFieldEncoding !== undefined && message.repeatedFieldEncoding !== 0) { + obj.repeatedFieldEncoding = featureSet_RepeatedFieldEncodingToJSON(message.repeatedFieldEncoding); + } + if (message.utf8Validation !== undefined && message.utf8Validation !== 0) { + obj.utf8Validation = featureSet_Utf8ValidationToJSON(message.utf8Validation); + } + if (message.messageEncoding !== undefined && message.messageEncoding !== 0) { + obj.messageEncoding = featureSet_MessageEncodingToJSON(message.messageEncoding); + } + if (message.jsonFormat !== undefined && message.jsonFormat !== 0) { + obj.jsonFormat = featureSet_JsonFormatToJSON(message.jsonFormat); + } + if (message.enforceNamingStyle !== undefined && message.enforceNamingStyle !== 0) { + obj.enforceNamingStyle = featureSet_EnforceNamingStyleToJSON(message.enforceNamingStyle); + } + return obj; + }, +}; +exports.FeatureSetDefaults = { + fromJSON(object) { + return { + defaults: globalThis.Array.isArray(object?.defaults) + ? object.defaults.map((e) => exports.FeatureSetDefaults_FeatureSetEditionDefault.fromJSON(e)) + : [], + minimumEdition: isSet(object.minimumEdition) ? editionFromJSON(object.minimumEdition) : 0, + maximumEdition: isSet(object.maximumEdition) ? editionFromJSON(object.maximumEdition) : 0, + }; + }, + toJSON(message) { + const obj = {}; + if (message.defaults?.length) { + obj.defaults = message.defaults.map((e) => exports.FeatureSetDefaults_FeatureSetEditionDefault.toJSON(e)); + } + if (message.minimumEdition !== undefined && message.minimumEdition !== 0) { + obj.minimumEdition = editionToJSON(message.minimumEdition); + } + if (message.maximumEdition !== undefined && message.maximumEdition !== 0) { + obj.maximumEdition = editionToJSON(message.maximumEdition); + } + return obj; + }, +}; +exports.FeatureSetDefaults_FeatureSetEditionDefault = { + fromJSON(object) { + return { + edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0, + overridableFeatures: isSet(object.overridableFeatures) + ? exports.FeatureSet.fromJSON(object.overridableFeatures) + : undefined, + fixedFeatures: isSet(object.fixedFeatures) ? exports.FeatureSet.fromJSON(object.fixedFeatures) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + if (message.edition !== undefined && message.edition !== 0) { + obj.edition = editionToJSON(message.edition); + } + if (message.overridableFeatures !== undefined) { + obj.overridableFeatures = exports.FeatureSet.toJSON(message.overridableFeatures); + } + if (message.fixedFeatures !== undefined) { + obj.fixedFeatures = exports.FeatureSet.toJSON(message.fixedFeatures); + } + return obj; + }, +}; +exports.SourceCodeInfo = { + fromJSON(object) { + return { + location: globalThis.Array.isArray(object?.location) + ? object.location.map((e) => exports.SourceCodeInfo_Location.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.location?.length) { + obj.location = message.location.map((e) => exports.SourceCodeInfo_Location.toJSON(e)); + } + return obj; + }, +}; +exports.SourceCodeInfo_Location = { + fromJSON(object) { + return { + path: globalThis.Array.isArray(object?.path) + ? object.path.map((e) => globalThis.Number(e)) + : [], + span: globalThis.Array.isArray(object?.span) ? object.span.map((e) => globalThis.Number(e)) : [], + leadingComments: isSet(object.leadingComments) ? globalThis.String(object.leadingComments) : "", + trailingComments: isSet(object.trailingComments) ? globalThis.String(object.trailingComments) : "", + leadingDetachedComments: globalThis.Array.isArray(object?.leadingDetachedComments) + ? object.leadingDetachedComments.map((e) => globalThis.String(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.path?.length) { + obj.path = message.path.map((e) => Math.round(e)); + } + if (message.span?.length) { + obj.span = message.span.map((e) => Math.round(e)); + } + if (message.leadingComments !== undefined && message.leadingComments !== "") { + obj.leadingComments = message.leadingComments; + } + if (message.trailingComments !== undefined && message.trailingComments !== "") { + obj.trailingComments = message.trailingComments; + } + if (message.leadingDetachedComments?.length) { + obj.leadingDetachedComments = message.leadingDetachedComments; + } + return obj; + }, +}; +exports.GeneratedCodeInfo = { + fromJSON(object) { + return { + annotation: globalThis.Array.isArray(object?.annotation) + ? object.annotation.map((e) => exports.GeneratedCodeInfo_Annotation.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.annotation?.length) { + obj.annotation = message.annotation.map((e) => exports.GeneratedCodeInfo_Annotation.toJSON(e)); + } + return obj; + }, +}; +exports.GeneratedCodeInfo_Annotation = { + fromJSON(object) { + return { + path: globalThis.Array.isArray(object?.path) + ? object.path.map((e) => globalThis.Number(e)) + : [], + sourceFile: isSet(object.sourceFile) ? globalThis.String(object.sourceFile) : "", + begin: isSet(object.begin) ? globalThis.Number(object.begin) : 0, + end: isSet(object.end) ? globalThis.Number(object.end) : 0, + semantic: isSet(object.semantic) ? generatedCodeInfo_Annotation_SemanticFromJSON(object.semantic) : 0, + }; + }, + toJSON(message) { + const obj = {}; + if (message.path?.length) { + obj.path = message.path.map((e) => Math.round(e)); + } + if (message.sourceFile !== undefined && message.sourceFile !== "") { + obj.sourceFile = message.sourceFile; + } + if (message.begin !== undefined && message.begin !== 0) { + obj.begin = Math.round(message.begin); + } + if (message.end !== undefined && message.end !== 0) { + obj.end = Math.round(message.end); + } + if (message.semantic !== undefined && message.semantic !== 0) { + obj.semantic = generatedCodeInfo_Annotation_SemanticToJSON(message.semantic); + } + return obj; + }, +}; +function bytesFromBase64(b64) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); +} +function base64FromBytes(arr) { + return globalThis.Buffer.from(arr).toString("base64"); +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js new file mode 100644 index 0000000000000000000000000000000000000000..9d24cbba10de9f724256160eb3f622fa8634c4f8 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js @@ -0,0 +1,29 @@ +"use strict"; +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.7.5 +// protoc v6.30.2 +// source: google/protobuf/timestamp.proto +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Timestamp = void 0; +exports.Timestamp = { + fromJSON(object) { + return { + seconds: isSet(object.seconds) ? globalThis.String(object.seconds) : "0", + nanos: isSet(object.nanos) ? globalThis.Number(object.nanos) : 0, + }; + }, + toJSON(message) { + const obj = {}; + if (message.seconds !== "0") { + obj.seconds = message.seconds; + } + if (message.nanos !== 0) { + obj.nanos = Math.round(message.nanos); + } + return obj; + }, +}; +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js new file mode 100644 index 0000000000000000000000000000000000000000..abc766bed3b88133d7fc043ec5283ec4e1ce2c95 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js @@ -0,0 +1,55 @@ +"use strict"; +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.7.5 +// protoc v6.30.2 +// source: rekor/v2/dsse.proto +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DSSELogEntryV002 = exports.DSSERequestV002 = void 0; +/* eslint-disable */ +const envelope_1 = require("../../envelope"); +const sigstore_common_1 = require("../../sigstore_common"); +const verifier_1 = require("./verifier"); +exports.DSSERequestV002 = { + fromJSON(object) { + return { + envelope: isSet(object.envelope) ? envelope_1.Envelope.fromJSON(object.envelope) : undefined, + verifiers: globalThis.Array.isArray(object?.verifiers) + ? object.verifiers.map((e) => verifier_1.Verifier.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.envelope !== undefined) { + obj.envelope = envelope_1.Envelope.toJSON(message.envelope); + } + if (message.verifiers?.length) { + obj.verifiers = message.verifiers.map((e) => verifier_1.Verifier.toJSON(e)); + } + return obj; + }, +}; +exports.DSSELogEntryV002 = { + fromJSON(object) { + return { + payloadHash: isSet(object.payloadHash) ? sigstore_common_1.HashOutput.fromJSON(object.payloadHash) : undefined, + signatures: globalThis.Array.isArray(object?.signatures) + ? object.signatures.map((e) => verifier_1.Signature.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.payloadHash !== undefined) { + obj.payloadHash = sigstore_common_1.HashOutput.toJSON(message.payloadHash); + } + if (message.signatures?.length) { + obj.signatures = message.signatures.map((e) => verifier_1.Signature.toJSON(e)); + } + return obj; + }, +}; +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js new file mode 100644 index 0000000000000000000000000000000000000000..c5eccb10e0a6808d9fbe3cc1b6f592ebf560c4e1 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js @@ -0,0 +1,81 @@ +"use strict"; +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.7.5 +// protoc v6.30.2 +// source: rekor/v2/entry.proto +Object.defineProperty(exports, "__esModule", { value: true }); +exports.CreateEntryRequest = exports.Spec = exports.Entry = void 0; +/* eslint-disable */ +const dsse_1 = require("./dsse"); +const hashedrekord_1 = require("./hashedrekord"); +exports.Entry = { + fromJSON(object) { + return { + kind: isSet(object.kind) ? globalThis.String(object.kind) : "", + apiVersion: isSet(object.apiVersion) ? globalThis.String(object.apiVersion) : "", + spec: isSet(object.spec) ? exports.Spec.fromJSON(object.spec) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + if (message.kind !== "") { + obj.kind = message.kind; + } + if (message.apiVersion !== "") { + obj.apiVersion = message.apiVersion; + } + if (message.spec !== undefined) { + obj.spec = exports.Spec.toJSON(message.spec); + } + return obj; + }, +}; +exports.Spec = { + fromJSON(object) { + return { + spec: isSet(object.hashedRekordV002) + ? { $case: "hashedRekordV002", hashedRekordV002: hashedrekord_1.HashedRekordLogEntryV002.fromJSON(object.hashedRekordV002) } + : isSet(object.dsseV002) + ? { $case: "dsseV002", dsseV002: dsse_1.DSSELogEntryV002.fromJSON(object.dsseV002) } + : undefined, + }; + }, + toJSON(message) { + const obj = {}; + if (message.spec?.$case === "hashedRekordV002") { + obj.hashedRekordV002 = hashedrekord_1.HashedRekordLogEntryV002.toJSON(message.spec.hashedRekordV002); + } + else if (message.spec?.$case === "dsseV002") { + obj.dsseV002 = dsse_1.DSSELogEntryV002.toJSON(message.spec.dsseV002); + } + return obj; + }, +}; +exports.CreateEntryRequest = { + fromJSON(object) { + return { + spec: isSet(object.hashedRekordRequestV002) + ? { + $case: "hashedRekordRequestV002", + hashedRekordRequestV002: hashedrekord_1.HashedRekordRequestV002.fromJSON(object.hashedRekordRequestV002), + } + : isSet(object.dsseRequestV002) + ? { $case: "dsseRequestV002", dsseRequestV002: dsse_1.DSSERequestV002.fromJSON(object.dsseRequestV002) } + : undefined, + }; + }, + toJSON(message) { + const obj = {}; + if (message.spec?.$case === "hashedRekordRequestV002") { + obj.hashedRekordRequestV002 = hashedrekord_1.HashedRekordRequestV002.toJSON(message.spec.hashedRekordRequestV002); + } + else if (message.spec?.$case === "dsseRequestV002") { + obj.dsseRequestV002 = dsse_1.DSSERequestV002.toJSON(message.spec.dsseRequestV002); + } + return obj; + }, +}; +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js new file mode 100644 index 0000000000000000000000000000000000000000..d3fd1af2483d1837e2f018f9db2336d3497a0b4f --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js @@ -0,0 +1,56 @@ +"use strict"; +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.7.5 +// protoc v6.30.2 +// source: rekor/v2/hashedrekord.proto +Object.defineProperty(exports, "__esModule", { value: true }); +exports.HashedRekordLogEntryV002 = exports.HashedRekordRequestV002 = void 0; +/* eslint-disable */ +const sigstore_common_1 = require("../../sigstore_common"); +const verifier_1 = require("./verifier"); +exports.HashedRekordRequestV002 = { + fromJSON(object) { + return { + digest: isSet(object.digest) ? Buffer.from(bytesFromBase64(object.digest)) : Buffer.alloc(0), + signature: isSet(object.signature) ? verifier_1.Signature.fromJSON(object.signature) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + if (message.digest.length !== 0) { + obj.digest = base64FromBytes(message.digest); + } + if (message.signature !== undefined) { + obj.signature = verifier_1.Signature.toJSON(message.signature); + } + return obj; + }, +}; +exports.HashedRekordLogEntryV002 = { + fromJSON(object) { + return { + data: isSet(object.data) ? sigstore_common_1.HashOutput.fromJSON(object.data) : undefined, + signature: isSet(object.signature) ? verifier_1.Signature.fromJSON(object.signature) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + if (message.data !== undefined) { + obj.data = sigstore_common_1.HashOutput.toJSON(message.data); + } + if (message.signature !== undefined) { + obj.signature = verifier_1.Signature.toJSON(message.signature); + } + return obj; + }, +}; +function bytesFromBase64(b64) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); +} +function base64FromBytes(arr) { + return globalThis.Buffer.from(arr).toString("base64"); +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js new file mode 100644 index 0000000000000000000000000000000000000000..c437d5053a3cb6b9cb71f64d4d700560dc9c60a1 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js @@ -0,0 +1,74 @@ +"use strict"; +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.7.5 +// protoc v6.30.2 +// source: rekor/v2/verifier.proto +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Signature = exports.Verifier = exports.PublicKey = void 0; +/* eslint-disable */ +const sigstore_common_1 = require("../../sigstore_common"); +exports.PublicKey = { + fromJSON(object) { + return { rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : Buffer.alloc(0) }; + }, + toJSON(message) { + const obj = {}; + if (message.rawBytes.length !== 0) { + obj.rawBytes = base64FromBytes(message.rawBytes); + } + return obj; + }, +}; +exports.Verifier = { + fromJSON(object) { + return { + verifier: isSet(object.publicKey) + ? { $case: "publicKey", publicKey: exports.PublicKey.fromJSON(object.publicKey) } + : isSet(object.x509Certificate) + ? { $case: "x509Certificate", x509Certificate: sigstore_common_1.X509Certificate.fromJSON(object.x509Certificate) } + : undefined, + keyDetails: isSet(object.keyDetails) ? (0, sigstore_common_1.publicKeyDetailsFromJSON)(object.keyDetails) : 0, + }; + }, + toJSON(message) { + const obj = {}; + if (message.verifier?.$case === "publicKey") { + obj.publicKey = exports.PublicKey.toJSON(message.verifier.publicKey); + } + else if (message.verifier?.$case === "x509Certificate") { + obj.x509Certificate = sigstore_common_1.X509Certificate.toJSON(message.verifier.x509Certificate); + } + if (message.keyDetails !== 0) { + obj.keyDetails = (0, sigstore_common_1.publicKeyDetailsToJSON)(message.keyDetails); + } + return obj; + }, +}; +exports.Signature = { + fromJSON(object) { + return { + content: isSet(object.content) ? Buffer.from(bytesFromBase64(object.content)) : Buffer.alloc(0), + verifier: isSet(object.verifier) ? exports.Verifier.fromJSON(object.verifier) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + if (message.content.length !== 0) { + obj.content = base64FromBytes(message.content); + } + if (message.verifier !== undefined) { + obj.verifier = exports.Verifier.toJSON(message.verifier); + } + return obj; + }, +}; +function bytesFromBase64(b64) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); +} +function base64FromBytes(arr) { + return globalThis.Buffer.from(arr).toString("base64"); +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js new file mode 100644 index 0000000000000000000000000000000000000000..aed636f00e7cf62184550c9be3018a119a46927e --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js @@ -0,0 +1,103 @@ +"use strict"; +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.7.5 +// protoc v6.30.2 +// source: sigstore_bundle.proto +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Bundle = exports.VerificationMaterial = exports.TimestampVerificationData = void 0; +/* eslint-disable */ +const envelope_1 = require("./envelope"); +const sigstore_common_1 = require("./sigstore_common"); +const sigstore_rekor_1 = require("./sigstore_rekor"); +exports.TimestampVerificationData = { + fromJSON(object) { + return { + rfc3161Timestamps: globalThis.Array.isArray(object?.rfc3161Timestamps) + ? object.rfc3161Timestamps.map((e) => sigstore_common_1.RFC3161SignedTimestamp.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.rfc3161Timestamps?.length) { + obj.rfc3161Timestamps = message.rfc3161Timestamps.map((e) => sigstore_common_1.RFC3161SignedTimestamp.toJSON(e)); + } + return obj; + }, +}; +exports.VerificationMaterial = { + fromJSON(object) { + return { + content: isSet(object.publicKey) + ? { $case: "publicKey", publicKey: sigstore_common_1.PublicKeyIdentifier.fromJSON(object.publicKey) } + : isSet(object.x509CertificateChain) + ? { + $case: "x509CertificateChain", + x509CertificateChain: sigstore_common_1.X509CertificateChain.fromJSON(object.x509CertificateChain), + } + : isSet(object.certificate) + ? { $case: "certificate", certificate: sigstore_common_1.X509Certificate.fromJSON(object.certificate) } + : undefined, + tlogEntries: globalThis.Array.isArray(object?.tlogEntries) + ? object.tlogEntries.map((e) => sigstore_rekor_1.TransparencyLogEntry.fromJSON(e)) + : [], + timestampVerificationData: isSet(object.timestampVerificationData) + ? exports.TimestampVerificationData.fromJSON(object.timestampVerificationData) + : undefined, + }; + }, + toJSON(message) { + const obj = {}; + if (message.content?.$case === "publicKey") { + obj.publicKey = sigstore_common_1.PublicKeyIdentifier.toJSON(message.content.publicKey); + } + else if (message.content?.$case === "x509CertificateChain") { + obj.x509CertificateChain = sigstore_common_1.X509CertificateChain.toJSON(message.content.x509CertificateChain); + } + else if (message.content?.$case === "certificate") { + obj.certificate = sigstore_common_1.X509Certificate.toJSON(message.content.certificate); + } + if (message.tlogEntries?.length) { + obj.tlogEntries = message.tlogEntries.map((e) => sigstore_rekor_1.TransparencyLogEntry.toJSON(e)); + } + if (message.timestampVerificationData !== undefined) { + obj.timestampVerificationData = exports.TimestampVerificationData.toJSON(message.timestampVerificationData); + } + return obj; + }, +}; +exports.Bundle = { + fromJSON(object) { + return { + mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "", + verificationMaterial: isSet(object.verificationMaterial) + ? exports.VerificationMaterial.fromJSON(object.verificationMaterial) + : undefined, + content: isSet(object.messageSignature) + ? { $case: "messageSignature", messageSignature: sigstore_common_1.MessageSignature.fromJSON(object.messageSignature) } + : isSet(object.dsseEnvelope) + ? { $case: "dsseEnvelope", dsseEnvelope: envelope_1.Envelope.fromJSON(object.dsseEnvelope) } + : undefined, + }; + }, + toJSON(message) { + const obj = {}; + if (message.mediaType !== "") { + obj.mediaType = message.mediaType; + } + if (message.verificationMaterial !== undefined) { + obj.verificationMaterial = exports.VerificationMaterial.toJSON(message.verificationMaterial); + } + if (message.content?.$case === "messageSignature") { + obj.messageSignature = sigstore_common_1.MessageSignature.toJSON(message.content.messageSignature); + } + else if (message.content?.$case === "dsseEnvelope") { + obj.dsseEnvelope = envelope_1.Envelope.toJSON(message.content.dsseEnvelope); + } + return obj; + }, +}; +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js new file mode 100644 index 0000000000000000000000000000000000000000..b900516ed3b55758e41b0b944b60d498d354012d --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js @@ -0,0 +1,596 @@ +"use strict"; +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.7.5 +// protoc v6.30.2 +// source: sigstore_common.proto +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TimeRange = exports.X509CertificateChain = exports.SubjectAlternativeName = exports.X509Certificate = exports.DistinguishedName = exports.ObjectIdentifierValuePair = exports.ObjectIdentifier = exports.PublicKeyIdentifier = exports.PublicKey = exports.RFC3161SignedTimestamp = exports.LogId = exports.MessageSignature = exports.HashOutput = exports.SubjectAlternativeNameType = exports.PublicKeyDetails = exports.HashAlgorithm = void 0; +exports.hashAlgorithmFromJSON = hashAlgorithmFromJSON; +exports.hashAlgorithmToJSON = hashAlgorithmToJSON; +exports.publicKeyDetailsFromJSON = publicKeyDetailsFromJSON; +exports.publicKeyDetailsToJSON = publicKeyDetailsToJSON; +exports.subjectAlternativeNameTypeFromJSON = subjectAlternativeNameTypeFromJSON; +exports.subjectAlternativeNameTypeToJSON = subjectAlternativeNameTypeToJSON; +/* eslint-disable */ +const timestamp_1 = require("./google/protobuf/timestamp"); +/** + * Only a subset of the secure hash standard algorithms are supported. + * See for more + * details. + * UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force + * any proto JSON serialization to emit the used hash algorithm, as default + * option is to *omit* the default value of an enum (which is the first + * value, represented by '0'. + */ +var HashAlgorithm; +(function (HashAlgorithm) { + HashAlgorithm[HashAlgorithm["HASH_ALGORITHM_UNSPECIFIED"] = 0] = "HASH_ALGORITHM_UNSPECIFIED"; + HashAlgorithm[HashAlgorithm["SHA2_256"] = 1] = "SHA2_256"; + HashAlgorithm[HashAlgorithm["SHA2_384"] = 2] = "SHA2_384"; + HashAlgorithm[HashAlgorithm["SHA2_512"] = 3] = "SHA2_512"; + HashAlgorithm[HashAlgorithm["SHA3_256"] = 4] = "SHA3_256"; + HashAlgorithm[HashAlgorithm["SHA3_384"] = 5] = "SHA3_384"; +})(HashAlgorithm || (exports.HashAlgorithm = HashAlgorithm = {})); +function hashAlgorithmFromJSON(object) { + switch (object) { + case 0: + case "HASH_ALGORITHM_UNSPECIFIED": + return HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED; + case 1: + case "SHA2_256": + return HashAlgorithm.SHA2_256; + case 2: + case "SHA2_384": + return HashAlgorithm.SHA2_384; + case 3: + case "SHA2_512": + return HashAlgorithm.SHA2_512; + case 4: + case "SHA3_256": + return HashAlgorithm.SHA3_256; + case 5: + case "SHA3_384": + return HashAlgorithm.SHA3_384; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm"); + } +} +function hashAlgorithmToJSON(object) { + switch (object) { + case HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED: + return "HASH_ALGORITHM_UNSPECIFIED"; + case HashAlgorithm.SHA2_256: + return "SHA2_256"; + case HashAlgorithm.SHA2_384: + return "SHA2_384"; + case HashAlgorithm.SHA2_512: + return "SHA2_512"; + case HashAlgorithm.SHA3_256: + return "SHA3_256"; + case HashAlgorithm.SHA3_384: + return "SHA3_384"; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm"); + } +} +/** + * Details of a specific public key, capturing the the key encoding method, + * and signature algorithm. + * + * PublicKeyDetails captures the public key/hash algorithm combinations + * recommended in the Sigstore ecosystem. + * + * This is modelled as a linear set as we want to provide a small number of + * opinionated options instead of allowing every possible permutation. + * + * Any changes to this enum MUST be reflected in the algorithm registry. + * + * See: + * + * To avoid the possibility of contradicting formats such as PKCS1 with + * ED25519 the valid permutations are listed as a linear set instead of a + * cartesian set (i.e one combined variable instead of two, one for encoding + * and one for the signature algorithm). + */ +var PublicKeyDetails; +(function (PublicKeyDetails) { + PublicKeyDetails[PublicKeyDetails["PUBLIC_KEY_DETAILS_UNSPECIFIED"] = 0] = "PUBLIC_KEY_DETAILS_UNSPECIFIED"; + /** + * PKCS1_RSA_PKCS1V5 - RSA + * + * @deprecated + */ + PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PKCS1V5"] = 1] = "PKCS1_RSA_PKCS1V5"; + /** + * PKCS1_RSA_PSS - See RFC8017 + * + * @deprecated + */ + PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PSS"] = 2] = "PKCS1_RSA_PSS"; + /** @deprecated */ + PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V5"] = 3] = "PKIX_RSA_PKCS1V5"; + /** @deprecated */ + PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS"] = 4] = "PKIX_RSA_PSS"; + /** PKIX_RSA_PKCS1V15_2048_SHA256 - RSA public key in PKIX format, PKCS#1v1.5 signature */ + PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V15_2048_SHA256"] = 9] = "PKIX_RSA_PKCS1V15_2048_SHA256"; + PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V15_3072_SHA256"] = 10] = "PKIX_RSA_PKCS1V15_3072_SHA256"; + PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V15_4096_SHA256"] = 11] = "PKIX_RSA_PKCS1V15_4096_SHA256"; + /** PKIX_RSA_PSS_2048_SHA256 - RSA public key in PKIX format, RSASSA-PSS signature */ + PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS_2048_SHA256"] = 16] = "PKIX_RSA_PSS_2048_SHA256"; + PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS_3072_SHA256"] = 17] = "PKIX_RSA_PSS_3072_SHA256"; + PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS_4096_SHA256"] = 18] = "PKIX_RSA_PSS_4096_SHA256"; + /** + * PKIX_ECDSA_P256_HMAC_SHA_256 - ECDSA + * + * @deprecated + */ + PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_HMAC_SHA_256"] = 6] = "PKIX_ECDSA_P256_HMAC_SHA_256"; + /** PKIX_ECDSA_P256_SHA_256 - See NIST FIPS 186-4 */ + PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_SHA_256"] = 5] = "PKIX_ECDSA_P256_SHA_256"; + PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P384_SHA_384"] = 12] = "PKIX_ECDSA_P384_SHA_384"; + PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P521_SHA_512"] = 13] = "PKIX_ECDSA_P521_SHA_512"; + /** PKIX_ED25519 - Ed 25519 */ + PublicKeyDetails[PublicKeyDetails["PKIX_ED25519"] = 7] = "PKIX_ED25519"; + PublicKeyDetails[PublicKeyDetails["PKIX_ED25519_PH"] = 8] = "PKIX_ED25519_PH"; + /** + * PKIX_ECDSA_P384_SHA_256 - These algorithms are deprecated and should not be used, but they + * were/are being used by most Sigstore clients implementations. + * + * @deprecated + */ + PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P384_SHA_256"] = 19] = "PKIX_ECDSA_P384_SHA_256"; + /** @deprecated */ + PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P521_SHA_256"] = 20] = "PKIX_ECDSA_P521_SHA_256"; + /** + * LMS_SHA256 - LMS and LM-OTS + * + * These algorithms are deprecated and should not be used. + * Keys and signatures MAY be used by private Sigstore + * deployments, but will not be supported by the public + * good instance. + * + * USER WARNING: LMS and LM-OTS are both stateful signature schemes. + * Using them correctly requires discretion and careful consideration + * to ensure that individual secret keys are not used more than once. + * In addition, LM-OTS is a single-use scheme, meaning that it + * MUST NOT be used for more than one signature per LM-OTS key. + * If you cannot maintain these invariants, you MUST NOT use these + * schemes. + * + * @deprecated + */ + PublicKeyDetails[PublicKeyDetails["LMS_SHA256"] = 14] = "LMS_SHA256"; + /** @deprecated */ + PublicKeyDetails[PublicKeyDetails["LMOTS_SHA256"] = 15] = "LMOTS_SHA256"; + /** + * ML_DSA_65 - ML-DSA + * + * These ML_DSA_65 and ML-DSA_87 algorithms are the pure variants that + * take data to sign rather than the prehash variants (HashML-DSA), which + * take digests. While considered quantum-resistant, their usage + * involves tradeoffs in that signatures and keys are much larger, and + * this makes deployments more costly. + * + * USER WARNING: ML_DSA_65 and ML_DSA_87 are experimental algorithms. + * In the future they MAY be used by private Sigstore deployments, but + * they are not yet fully functional. This warning will be removed when + * these algorithms are widely supported by Sigstore clients and servers, + * but care should still be taken for production environments. + */ + PublicKeyDetails[PublicKeyDetails["ML_DSA_65"] = 21] = "ML_DSA_65"; + PublicKeyDetails[PublicKeyDetails["ML_DSA_87"] = 22] = "ML_DSA_87"; +})(PublicKeyDetails || (exports.PublicKeyDetails = PublicKeyDetails = {})); +function publicKeyDetailsFromJSON(object) { + switch (object) { + case 0: + case "PUBLIC_KEY_DETAILS_UNSPECIFIED": + return PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED; + case 1: + case "PKCS1_RSA_PKCS1V5": + return PublicKeyDetails.PKCS1_RSA_PKCS1V5; + case 2: + case "PKCS1_RSA_PSS": + return PublicKeyDetails.PKCS1_RSA_PSS; + case 3: + case "PKIX_RSA_PKCS1V5": + return PublicKeyDetails.PKIX_RSA_PKCS1V5; + case 4: + case "PKIX_RSA_PSS": + return PublicKeyDetails.PKIX_RSA_PSS; + case 9: + case "PKIX_RSA_PKCS1V15_2048_SHA256": + return PublicKeyDetails.PKIX_RSA_PKCS1V15_2048_SHA256; + case 10: + case "PKIX_RSA_PKCS1V15_3072_SHA256": + return PublicKeyDetails.PKIX_RSA_PKCS1V15_3072_SHA256; + case 11: + case "PKIX_RSA_PKCS1V15_4096_SHA256": + return PublicKeyDetails.PKIX_RSA_PKCS1V15_4096_SHA256; + case 16: + case "PKIX_RSA_PSS_2048_SHA256": + return PublicKeyDetails.PKIX_RSA_PSS_2048_SHA256; + case 17: + case "PKIX_RSA_PSS_3072_SHA256": + return PublicKeyDetails.PKIX_RSA_PSS_3072_SHA256; + case 18: + case "PKIX_RSA_PSS_4096_SHA256": + return PublicKeyDetails.PKIX_RSA_PSS_4096_SHA256; + case 6: + case "PKIX_ECDSA_P256_HMAC_SHA_256": + return PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256; + case 5: + case "PKIX_ECDSA_P256_SHA_256": + return PublicKeyDetails.PKIX_ECDSA_P256_SHA_256; + case 12: + case "PKIX_ECDSA_P384_SHA_384": + return PublicKeyDetails.PKIX_ECDSA_P384_SHA_384; + case 13: + case "PKIX_ECDSA_P521_SHA_512": + return PublicKeyDetails.PKIX_ECDSA_P521_SHA_512; + case 7: + case "PKIX_ED25519": + return PublicKeyDetails.PKIX_ED25519; + case 8: + case "PKIX_ED25519_PH": + return PublicKeyDetails.PKIX_ED25519_PH; + case 19: + case "PKIX_ECDSA_P384_SHA_256": + return PublicKeyDetails.PKIX_ECDSA_P384_SHA_256; + case 20: + case "PKIX_ECDSA_P521_SHA_256": + return PublicKeyDetails.PKIX_ECDSA_P521_SHA_256; + case 14: + case "LMS_SHA256": + return PublicKeyDetails.LMS_SHA256; + case 15: + case "LMOTS_SHA256": + return PublicKeyDetails.LMOTS_SHA256; + case 21: + case "ML_DSA_65": + return PublicKeyDetails.ML_DSA_65; + case 22: + case "ML_DSA_87": + return PublicKeyDetails.ML_DSA_87; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails"); + } +} +function publicKeyDetailsToJSON(object) { + switch (object) { + case PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED: + return "PUBLIC_KEY_DETAILS_UNSPECIFIED"; + case PublicKeyDetails.PKCS1_RSA_PKCS1V5: + return "PKCS1_RSA_PKCS1V5"; + case PublicKeyDetails.PKCS1_RSA_PSS: + return "PKCS1_RSA_PSS"; + case PublicKeyDetails.PKIX_RSA_PKCS1V5: + return "PKIX_RSA_PKCS1V5"; + case PublicKeyDetails.PKIX_RSA_PSS: + return "PKIX_RSA_PSS"; + case PublicKeyDetails.PKIX_RSA_PKCS1V15_2048_SHA256: + return "PKIX_RSA_PKCS1V15_2048_SHA256"; + case PublicKeyDetails.PKIX_RSA_PKCS1V15_3072_SHA256: + return "PKIX_RSA_PKCS1V15_3072_SHA256"; + case PublicKeyDetails.PKIX_RSA_PKCS1V15_4096_SHA256: + return "PKIX_RSA_PKCS1V15_4096_SHA256"; + case PublicKeyDetails.PKIX_RSA_PSS_2048_SHA256: + return "PKIX_RSA_PSS_2048_SHA256"; + case PublicKeyDetails.PKIX_RSA_PSS_3072_SHA256: + return "PKIX_RSA_PSS_3072_SHA256"; + case PublicKeyDetails.PKIX_RSA_PSS_4096_SHA256: + return "PKIX_RSA_PSS_4096_SHA256"; + case PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256: + return "PKIX_ECDSA_P256_HMAC_SHA_256"; + case PublicKeyDetails.PKIX_ECDSA_P256_SHA_256: + return "PKIX_ECDSA_P256_SHA_256"; + case PublicKeyDetails.PKIX_ECDSA_P384_SHA_384: + return "PKIX_ECDSA_P384_SHA_384"; + case PublicKeyDetails.PKIX_ECDSA_P521_SHA_512: + return "PKIX_ECDSA_P521_SHA_512"; + case PublicKeyDetails.PKIX_ED25519: + return "PKIX_ED25519"; + case PublicKeyDetails.PKIX_ED25519_PH: + return "PKIX_ED25519_PH"; + case PublicKeyDetails.PKIX_ECDSA_P384_SHA_256: + return "PKIX_ECDSA_P384_SHA_256"; + case PublicKeyDetails.PKIX_ECDSA_P521_SHA_256: + return "PKIX_ECDSA_P521_SHA_256"; + case PublicKeyDetails.LMS_SHA256: + return "LMS_SHA256"; + case PublicKeyDetails.LMOTS_SHA256: + return "LMOTS_SHA256"; + case PublicKeyDetails.ML_DSA_65: + return "ML_DSA_65"; + case PublicKeyDetails.ML_DSA_87: + return "ML_DSA_87"; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails"); + } +} +var SubjectAlternativeNameType; +(function (SubjectAlternativeNameType) { + SubjectAlternativeNameType[SubjectAlternativeNameType["SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED"] = 0] = "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED"; + SubjectAlternativeNameType[SubjectAlternativeNameType["EMAIL"] = 1] = "EMAIL"; + SubjectAlternativeNameType[SubjectAlternativeNameType["URI"] = 2] = "URI"; + /** + * OTHER_NAME - OID 1.3.6.1.4.1.57264.1.7 + * See https://github.com/sigstore/fulcio/blob/main/docs/oid-info.md#1361415726417--othername-san + * for more details. + */ + SubjectAlternativeNameType[SubjectAlternativeNameType["OTHER_NAME"] = 3] = "OTHER_NAME"; +})(SubjectAlternativeNameType || (exports.SubjectAlternativeNameType = SubjectAlternativeNameType = {})); +function subjectAlternativeNameTypeFromJSON(object) { + switch (object) { + case 0: + case "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED": + return SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED; + case 1: + case "EMAIL": + return SubjectAlternativeNameType.EMAIL; + case 2: + case "URI": + return SubjectAlternativeNameType.URI; + case 3: + case "OTHER_NAME": + return SubjectAlternativeNameType.OTHER_NAME; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType"); + } +} +function subjectAlternativeNameTypeToJSON(object) { + switch (object) { + case SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED: + return "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED"; + case SubjectAlternativeNameType.EMAIL: + return "EMAIL"; + case SubjectAlternativeNameType.URI: + return "URI"; + case SubjectAlternativeNameType.OTHER_NAME: + return "OTHER_NAME"; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType"); + } +} +exports.HashOutput = { + fromJSON(object) { + return { + algorithm: isSet(object.algorithm) ? hashAlgorithmFromJSON(object.algorithm) : 0, + digest: isSet(object.digest) ? Buffer.from(bytesFromBase64(object.digest)) : Buffer.alloc(0), + }; + }, + toJSON(message) { + const obj = {}; + if (message.algorithm !== 0) { + obj.algorithm = hashAlgorithmToJSON(message.algorithm); + } + if (message.digest.length !== 0) { + obj.digest = base64FromBytes(message.digest); + } + return obj; + }, +}; +exports.MessageSignature = { + fromJSON(object) { + return { + messageDigest: isSet(object.messageDigest) ? exports.HashOutput.fromJSON(object.messageDigest) : undefined, + signature: isSet(object.signature) ? Buffer.from(bytesFromBase64(object.signature)) : Buffer.alloc(0), + }; + }, + toJSON(message) { + const obj = {}; + if (message.messageDigest !== undefined) { + obj.messageDigest = exports.HashOutput.toJSON(message.messageDigest); + } + if (message.signature.length !== 0) { + obj.signature = base64FromBytes(message.signature); + } + return obj; + }, +}; +exports.LogId = { + fromJSON(object) { + return { keyId: isSet(object.keyId) ? Buffer.from(bytesFromBase64(object.keyId)) : Buffer.alloc(0) }; + }, + toJSON(message) { + const obj = {}; + if (message.keyId.length !== 0) { + obj.keyId = base64FromBytes(message.keyId); + } + return obj; + }, +}; +exports.RFC3161SignedTimestamp = { + fromJSON(object) { + return { + signedTimestamp: isSet(object.signedTimestamp) + ? Buffer.from(bytesFromBase64(object.signedTimestamp)) + : Buffer.alloc(0), + }; + }, + toJSON(message) { + const obj = {}; + if (message.signedTimestamp.length !== 0) { + obj.signedTimestamp = base64FromBytes(message.signedTimestamp); + } + return obj; + }, +}; +exports.PublicKey = { + fromJSON(object) { + return { + rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : undefined, + keyDetails: isSet(object.keyDetails) ? publicKeyDetailsFromJSON(object.keyDetails) : 0, + validFor: isSet(object.validFor) ? exports.TimeRange.fromJSON(object.validFor) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + if (message.rawBytes !== undefined) { + obj.rawBytes = base64FromBytes(message.rawBytes); + } + if (message.keyDetails !== 0) { + obj.keyDetails = publicKeyDetailsToJSON(message.keyDetails); + } + if (message.validFor !== undefined) { + obj.validFor = exports.TimeRange.toJSON(message.validFor); + } + return obj; + }, +}; +exports.PublicKeyIdentifier = { + fromJSON(object) { + return { hint: isSet(object.hint) ? globalThis.String(object.hint) : "" }; + }, + toJSON(message) { + const obj = {}; + if (message.hint !== "") { + obj.hint = message.hint; + } + return obj; + }, +}; +exports.ObjectIdentifier = { + fromJSON(object) { + return { id: globalThis.Array.isArray(object?.id) ? object.id.map((e) => globalThis.Number(e)) : [] }; + }, + toJSON(message) { + const obj = {}; + if (message.id?.length) { + obj.id = message.id.map((e) => Math.round(e)); + } + return obj; + }, +}; +exports.ObjectIdentifierValuePair = { + fromJSON(object) { + return { + oid: isSet(object.oid) ? exports.ObjectIdentifier.fromJSON(object.oid) : undefined, + value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0), + }; + }, + toJSON(message) { + const obj = {}; + if (message.oid !== undefined) { + obj.oid = exports.ObjectIdentifier.toJSON(message.oid); + } + if (message.value.length !== 0) { + obj.value = base64FromBytes(message.value); + } + return obj; + }, +}; +exports.DistinguishedName = { + fromJSON(object) { + return { + organization: isSet(object.organization) ? globalThis.String(object.organization) : "", + commonName: isSet(object.commonName) ? globalThis.String(object.commonName) : "", + }; + }, + toJSON(message) { + const obj = {}; + if (message.organization !== "") { + obj.organization = message.organization; + } + if (message.commonName !== "") { + obj.commonName = message.commonName; + } + return obj; + }, +}; +exports.X509Certificate = { + fromJSON(object) { + return { rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : Buffer.alloc(0) }; + }, + toJSON(message) { + const obj = {}; + if (message.rawBytes.length !== 0) { + obj.rawBytes = base64FromBytes(message.rawBytes); + } + return obj; + }, +}; +exports.SubjectAlternativeName = { + fromJSON(object) { + return { + type: isSet(object.type) ? subjectAlternativeNameTypeFromJSON(object.type) : 0, + identity: isSet(object.regexp) + ? { $case: "regexp", regexp: globalThis.String(object.regexp) } + : isSet(object.value) + ? { $case: "value", value: globalThis.String(object.value) } + : undefined, + }; + }, + toJSON(message) { + const obj = {}; + if (message.type !== 0) { + obj.type = subjectAlternativeNameTypeToJSON(message.type); + } + if (message.identity?.$case === "regexp") { + obj.regexp = message.identity.regexp; + } + else if (message.identity?.$case === "value") { + obj.value = message.identity.value; + } + return obj; + }, +}; +exports.X509CertificateChain = { + fromJSON(object) { + return { + certificates: globalThis.Array.isArray(object?.certificates) + ? object.certificates.map((e) => exports.X509Certificate.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.certificates?.length) { + obj.certificates = message.certificates.map((e) => exports.X509Certificate.toJSON(e)); + } + return obj; + }, +}; +exports.TimeRange = { + fromJSON(object) { + return { + start: isSet(object.start) ? fromJsonTimestamp(object.start) : undefined, + end: isSet(object.end) ? fromJsonTimestamp(object.end) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + if (message.start !== undefined) { + obj.start = message.start.toISOString(); + } + if (message.end !== undefined) { + obj.end = message.end.toISOString(); + } + return obj; + }, +}; +function bytesFromBase64(b64) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); +} +function base64FromBytes(arr) { + return globalThis.Buffer.from(arr).toString("base64"); +} +function fromTimestamp(t) { + let millis = (globalThis.Number(t.seconds) || 0) * 1_000; + millis += (t.nanos || 0) / 1_000_000; + return new globalThis.Date(millis); +} +function fromJsonTimestamp(o) { + if (o instanceof globalThis.Date) { + return o; + } + else if (typeof o === "string") { + return new globalThis.Date(o); + } + else { + return fromTimestamp(timestamp_1.Timestamp.fromJSON(o)); + } +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js new file mode 100644 index 0000000000000000000000000000000000000000..fd8ea8384664d49face1b0de1e086e9500aacbfd --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js @@ -0,0 +1,137 @@ +"use strict"; +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.7.5 +// protoc v6.30.2 +// source: sigstore_rekor.proto +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TransparencyLogEntry = exports.InclusionPromise = exports.InclusionProof = exports.Checkpoint = exports.KindVersion = void 0; +/* eslint-disable */ +const sigstore_common_1 = require("./sigstore_common"); +exports.KindVersion = { + fromJSON(object) { + return { + kind: isSet(object.kind) ? globalThis.String(object.kind) : "", + version: isSet(object.version) ? globalThis.String(object.version) : "", + }; + }, + toJSON(message) { + const obj = {}; + if (message.kind !== "") { + obj.kind = message.kind; + } + if (message.version !== "") { + obj.version = message.version; + } + return obj; + }, +}; +exports.Checkpoint = { + fromJSON(object) { + return { envelope: isSet(object.envelope) ? globalThis.String(object.envelope) : "" }; + }, + toJSON(message) { + const obj = {}; + if (message.envelope !== "") { + obj.envelope = message.envelope; + } + return obj; + }, +}; +exports.InclusionProof = { + fromJSON(object) { + return { + logIndex: isSet(object.logIndex) ? globalThis.String(object.logIndex) : "0", + rootHash: isSet(object.rootHash) ? Buffer.from(bytesFromBase64(object.rootHash)) : Buffer.alloc(0), + treeSize: isSet(object.treeSize) ? globalThis.String(object.treeSize) : "0", + hashes: globalThis.Array.isArray(object?.hashes) + ? object.hashes.map((e) => Buffer.from(bytesFromBase64(e))) + : [], + checkpoint: isSet(object.checkpoint) ? exports.Checkpoint.fromJSON(object.checkpoint) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + if (message.logIndex !== "0") { + obj.logIndex = message.logIndex; + } + if (message.rootHash.length !== 0) { + obj.rootHash = base64FromBytes(message.rootHash); + } + if (message.treeSize !== "0") { + obj.treeSize = message.treeSize; + } + if (message.hashes?.length) { + obj.hashes = message.hashes.map((e) => base64FromBytes(e)); + } + if (message.checkpoint !== undefined) { + obj.checkpoint = exports.Checkpoint.toJSON(message.checkpoint); + } + return obj; + }, +}; +exports.InclusionPromise = { + fromJSON(object) { + return { + signedEntryTimestamp: isSet(object.signedEntryTimestamp) + ? Buffer.from(bytesFromBase64(object.signedEntryTimestamp)) + : Buffer.alloc(0), + }; + }, + toJSON(message) { + const obj = {}; + if (message.signedEntryTimestamp.length !== 0) { + obj.signedEntryTimestamp = base64FromBytes(message.signedEntryTimestamp); + } + return obj; + }, +}; +exports.TransparencyLogEntry = { + fromJSON(object) { + return { + logIndex: isSet(object.logIndex) ? globalThis.String(object.logIndex) : "0", + logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined, + kindVersion: isSet(object.kindVersion) ? exports.KindVersion.fromJSON(object.kindVersion) : undefined, + integratedTime: isSet(object.integratedTime) ? globalThis.String(object.integratedTime) : "0", + inclusionPromise: isSet(object.inclusionPromise) ? exports.InclusionPromise.fromJSON(object.inclusionPromise) : undefined, + inclusionProof: isSet(object.inclusionProof) ? exports.InclusionProof.fromJSON(object.inclusionProof) : undefined, + canonicalizedBody: isSet(object.canonicalizedBody) + ? Buffer.from(bytesFromBase64(object.canonicalizedBody)) + : Buffer.alloc(0), + }; + }, + toJSON(message) { + const obj = {}; + if (message.logIndex !== "0") { + obj.logIndex = message.logIndex; + } + if (message.logId !== undefined) { + obj.logId = sigstore_common_1.LogId.toJSON(message.logId); + } + if (message.kindVersion !== undefined) { + obj.kindVersion = exports.KindVersion.toJSON(message.kindVersion); + } + if (message.integratedTime !== "0") { + obj.integratedTime = message.integratedTime; + } + if (message.inclusionPromise !== undefined) { + obj.inclusionPromise = exports.InclusionPromise.toJSON(message.inclusionPromise); + } + if (message.inclusionProof !== undefined) { + obj.inclusionProof = exports.InclusionProof.toJSON(message.inclusionProof); + } + if (message.canonicalizedBody.length !== 0) { + obj.canonicalizedBody = base64FromBytes(message.canonicalizedBody); + } + return obj; + }, +}; +function bytesFromBase64(b64) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); +} +function base64FromBytes(arr) { + return globalThis.Buffer.from(arr).toString("base64"); +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js new file mode 100644 index 0000000000000000000000000000000000000000..1b5492fb1a77e3d6fb9dc6d89478bd363ec7738e --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js @@ -0,0 +1,284 @@ +"use strict"; +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.7.5 +// protoc v6.30.2 +// source: sigstore_trustroot.proto +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ClientTrustConfig = exports.ServiceConfiguration = exports.Service = exports.SigningConfig = exports.TrustedRoot = exports.CertificateAuthority = exports.TransparencyLogInstance = exports.ServiceSelector = void 0; +exports.serviceSelectorFromJSON = serviceSelectorFromJSON; +exports.serviceSelectorToJSON = serviceSelectorToJSON; +/* eslint-disable */ +const sigstore_common_1 = require("./sigstore_common"); +/** + * ServiceSelector specifies how a client SHOULD select a set of + * Services to connect to. A client SHOULD throw an error if + * the value is SERVICE_SELECTOR_UNDEFINED. + */ +var ServiceSelector; +(function (ServiceSelector) { + ServiceSelector[ServiceSelector["SERVICE_SELECTOR_UNDEFINED"] = 0] = "SERVICE_SELECTOR_UNDEFINED"; + /** + * ALL - Clients SHOULD select all Services based on supported API version + * and validity window. + */ + ServiceSelector[ServiceSelector["ALL"] = 1] = "ALL"; + /** + * ANY - Clients SHOULD select one Service based on supported API version + * and validity window. It is up to the client implementation to + * decide how to select the Service, e.g. random or round-robin. + */ + ServiceSelector[ServiceSelector["ANY"] = 2] = "ANY"; + /** + * EXACT - Clients SHOULD select a specific number of Services based on + * supported API version and validity window, using the provided + * `count`. It is up to the client implementation to decide how to + * select the Service, e.g. random or round-robin. + */ + ServiceSelector[ServiceSelector["EXACT"] = 3] = "EXACT"; +})(ServiceSelector || (exports.ServiceSelector = ServiceSelector = {})); +function serviceSelectorFromJSON(object) { + switch (object) { + case 0: + case "SERVICE_SELECTOR_UNDEFINED": + return ServiceSelector.SERVICE_SELECTOR_UNDEFINED; + case 1: + case "ALL": + return ServiceSelector.ALL; + case 2: + case "ANY": + return ServiceSelector.ANY; + case 3: + case "EXACT": + return ServiceSelector.EXACT; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum ServiceSelector"); + } +} +function serviceSelectorToJSON(object) { + switch (object) { + case ServiceSelector.SERVICE_SELECTOR_UNDEFINED: + return "SERVICE_SELECTOR_UNDEFINED"; + case ServiceSelector.ALL: + return "ALL"; + case ServiceSelector.ANY: + return "ANY"; + case ServiceSelector.EXACT: + return "EXACT"; + default: + throw new globalThis.Error("Unrecognized enum value " + object + " for enum ServiceSelector"); + } +} +exports.TransparencyLogInstance = { + fromJSON(object) { + return { + baseUrl: isSet(object.baseUrl) ? globalThis.String(object.baseUrl) : "", + hashAlgorithm: isSet(object.hashAlgorithm) ? (0, sigstore_common_1.hashAlgorithmFromJSON)(object.hashAlgorithm) : 0, + publicKey: isSet(object.publicKey) ? sigstore_common_1.PublicKey.fromJSON(object.publicKey) : undefined, + logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined, + checkpointKeyId: isSet(object.checkpointKeyId) ? sigstore_common_1.LogId.fromJSON(object.checkpointKeyId) : undefined, + operator: isSet(object.operator) ? globalThis.String(object.operator) : "", + }; + }, + toJSON(message) { + const obj = {}; + if (message.baseUrl !== "") { + obj.baseUrl = message.baseUrl; + } + if (message.hashAlgorithm !== 0) { + obj.hashAlgorithm = (0, sigstore_common_1.hashAlgorithmToJSON)(message.hashAlgorithm); + } + if (message.publicKey !== undefined) { + obj.publicKey = sigstore_common_1.PublicKey.toJSON(message.publicKey); + } + if (message.logId !== undefined) { + obj.logId = sigstore_common_1.LogId.toJSON(message.logId); + } + if (message.checkpointKeyId !== undefined) { + obj.checkpointKeyId = sigstore_common_1.LogId.toJSON(message.checkpointKeyId); + } + if (message.operator !== "") { + obj.operator = message.operator; + } + return obj; + }, +}; +exports.CertificateAuthority = { + fromJSON(object) { + return { + subject: isSet(object.subject) ? sigstore_common_1.DistinguishedName.fromJSON(object.subject) : undefined, + uri: isSet(object.uri) ? globalThis.String(object.uri) : "", + certChain: isSet(object.certChain) ? sigstore_common_1.X509CertificateChain.fromJSON(object.certChain) : undefined, + validFor: isSet(object.validFor) ? sigstore_common_1.TimeRange.fromJSON(object.validFor) : undefined, + operator: isSet(object.operator) ? globalThis.String(object.operator) : "", + }; + }, + toJSON(message) { + const obj = {}; + if (message.subject !== undefined) { + obj.subject = sigstore_common_1.DistinguishedName.toJSON(message.subject); + } + if (message.uri !== "") { + obj.uri = message.uri; + } + if (message.certChain !== undefined) { + obj.certChain = sigstore_common_1.X509CertificateChain.toJSON(message.certChain); + } + if (message.validFor !== undefined) { + obj.validFor = sigstore_common_1.TimeRange.toJSON(message.validFor); + } + if (message.operator !== "") { + obj.operator = message.operator; + } + return obj; + }, +}; +exports.TrustedRoot = { + fromJSON(object) { + return { + mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "", + tlogs: globalThis.Array.isArray(object?.tlogs) + ? object.tlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e)) + : [], + certificateAuthorities: globalThis.Array.isArray(object?.certificateAuthorities) + ? object.certificateAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e)) + : [], + ctlogs: globalThis.Array.isArray(object?.ctlogs) + ? object.ctlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e)) + : [], + timestampAuthorities: globalThis.Array.isArray(object?.timestampAuthorities) + ? object.timestampAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.mediaType !== "") { + obj.mediaType = message.mediaType; + } + if (message.tlogs?.length) { + obj.tlogs = message.tlogs.map((e) => exports.TransparencyLogInstance.toJSON(e)); + } + if (message.certificateAuthorities?.length) { + obj.certificateAuthorities = message.certificateAuthorities.map((e) => exports.CertificateAuthority.toJSON(e)); + } + if (message.ctlogs?.length) { + obj.ctlogs = message.ctlogs.map((e) => exports.TransparencyLogInstance.toJSON(e)); + } + if (message.timestampAuthorities?.length) { + obj.timestampAuthorities = message.timestampAuthorities.map((e) => exports.CertificateAuthority.toJSON(e)); + } + return obj; + }, +}; +exports.SigningConfig = { + fromJSON(object) { + return { + mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "", + caUrls: globalThis.Array.isArray(object?.caUrls) ? object.caUrls.map((e) => exports.Service.fromJSON(e)) : [], + oidcUrls: globalThis.Array.isArray(object?.oidcUrls) ? object.oidcUrls.map((e) => exports.Service.fromJSON(e)) : [], + rekorTlogUrls: globalThis.Array.isArray(object?.rekorTlogUrls) + ? object.rekorTlogUrls.map((e) => exports.Service.fromJSON(e)) + : [], + rekorTlogConfig: isSet(object.rekorTlogConfig) + ? exports.ServiceConfiguration.fromJSON(object.rekorTlogConfig) + : undefined, + tsaUrls: globalThis.Array.isArray(object?.tsaUrls) ? object.tsaUrls.map((e) => exports.Service.fromJSON(e)) : [], + tsaConfig: isSet(object.tsaConfig) ? exports.ServiceConfiguration.fromJSON(object.tsaConfig) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + if (message.mediaType !== "") { + obj.mediaType = message.mediaType; + } + if (message.caUrls?.length) { + obj.caUrls = message.caUrls.map((e) => exports.Service.toJSON(e)); + } + if (message.oidcUrls?.length) { + obj.oidcUrls = message.oidcUrls.map((e) => exports.Service.toJSON(e)); + } + if (message.rekorTlogUrls?.length) { + obj.rekorTlogUrls = message.rekorTlogUrls.map((e) => exports.Service.toJSON(e)); + } + if (message.rekorTlogConfig !== undefined) { + obj.rekorTlogConfig = exports.ServiceConfiguration.toJSON(message.rekorTlogConfig); + } + if (message.tsaUrls?.length) { + obj.tsaUrls = message.tsaUrls.map((e) => exports.Service.toJSON(e)); + } + if (message.tsaConfig !== undefined) { + obj.tsaConfig = exports.ServiceConfiguration.toJSON(message.tsaConfig); + } + return obj; + }, +}; +exports.Service = { + fromJSON(object) { + return { + url: isSet(object.url) ? globalThis.String(object.url) : "", + majorApiVersion: isSet(object.majorApiVersion) ? globalThis.Number(object.majorApiVersion) : 0, + validFor: isSet(object.validFor) ? sigstore_common_1.TimeRange.fromJSON(object.validFor) : undefined, + operator: isSet(object.operator) ? globalThis.String(object.operator) : "", + }; + }, + toJSON(message) { + const obj = {}; + if (message.url !== "") { + obj.url = message.url; + } + if (message.majorApiVersion !== 0) { + obj.majorApiVersion = Math.round(message.majorApiVersion); + } + if (message.validFor !== undefined) { + obj.validFor = sigstore_common_1.TimeRange.toJSON(message.validFor); + } + if (message.operator !== "") { + obj.operator = message.operator; + } + return obj; + }, +}; +exports.ServiceConfiguration = { + fromJSON(object) { + return { + selector: isSet(object.selector) ? serviceSelectorFromJSON(object.selector) : 0, + count: isSet(object.count) ? globalThis.Number(object.count) : 0, + }; + }, + toJSON(message) { + const obj = {}; + if (message.selector !== 0) { + obj.selector = serviceSelectorToJSON(message.selector); + } + if (message.count !== 0) { + obj.count = Math.round(message.count); + } + return obj; + }, +}; +exports.ClientTrustConfig = { + fromJSON(object) { + return { + mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "", + trustedRoot: isSet(object.trustedRoot) ? exports.TrustedRoot.fromJSON(object.trustedRoot) : undefined, + signingConfig: isSet(object.signingConfig) ? exports.SigningConfig.fromJSON(object.signingConfig) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + if (message.mediaType !== "") { + obj.mediaType = message.mediaType; + } + if (message.trustedRoot !== undefined) { + obj.trustedRoot = exports.TrustedRoot.toJSON(message.trustedRoot); + } + if (message.signingConfig !== undefined) { + obj.signingConfig = exports.SigningConfig.toJSON(message.signingConfig); + } + return obj; + }, +}; +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js new file mode 100644 index 0000000000000000000000000000000000000000..876fe9cc1db1d14b2dfeaab1b202c8b0d21b67cd --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js @@ -0,0 +1,281 @@ +"use strict"; +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v2.7.5 +// protoc v6.30.2 +// source: sigstore_verification.proto +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Input = exports.Artifact = exports.ArtifactVerificationOptions_ObserverTimestampOptions = exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions = exports.ArtifactVerificationOptions_TimestampAuthorityOptions = exports.ArtifactVerificationOptions_CtlogOptions = exports.ArtifactVerificationOptions_TlogOptions = exports.ArtifactVerificationOptions = exports.PublicKeyIdentities = exports.CertificateIdentities = exports.CertificateIdentity = void 0; +/* eslint-disable */ +const sigstore_bundle_1 = require("./sigstore_bundle"); +const sigstore_common_1 = require("./sigstore_common"); +const sigstore_trustroot_1 = require("./sigstore_trustroot"); +exports.CertificateIdentity = { + fromJSON(object) { + return { + issuer: isSet(object.issuer) ? globalThis.String(object.issuer) : "", + san: isSet(object.san) ? sigstore_common_1.SubjectAlternativeName.fromJSON(object.san) : undefined, + oids: globalThis.Array.isArray(object?.oids) + ? object.oids.map((e) => sigstore_common_1.ObjectIdentifierValuePair.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.issuer !== "") { + obj.issuer = message.issuer; + } + if (message.san !== undefined) { + obj.san = sigstore_common_1.SubjectAlternativeName.toJSON(message.san); + } + if (message.oids?.length) { + obj.oids = message.oids.map((e) => sigstore_common_1.ObjectIdentifierValuePair.toJSON(e)); + } + return obj; + }, +}; +exports.CertificateIdentities = { + fromJSON(object) { + return { + identities: globalThis.Array.isArray(object?.identities) + ? object.identities.map((e) => exports.CertificateIdentity.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.identities?.length) { + obj.identities = message.identities.map((e) => exports.CertificateIdentity.toJSON(e)); + } + return obj; + }, +}; +exports.PublicKeyIdentities = { + fromJSON(object) { + return { + publicKeys: globalThis.Array.isArray(object?.publicKeys) + ? object.publicKeys.map((e) => sigstore_common_1.PublicKey.fromJSON(e)) + : [], + }; + }, + toJSON(message) { + const obj = {}; + if (message.publicKeys?.length) { + obj.publicKeys = message.publicKeys.map((e) => sigstore_common_1.PublicKey.toJSON(e)); + } + return obj; + }, +}; +exports.ArtifactVerificationOptions = { + fromJSON(object) { + return { + signers: isSet(object.certificateIdentities) + ? { + $case: "certificateIdentities", + certificateIdentities: exports.CertificateIdentities.fromJSON(object.certificateIdentities), + } + : isSet(object.publicKeys) + ? { $case: "publicKeys", publicKeys: exports.PublicKeyIdentities.fromJSON(object.publicKeys) } + : undefined, + tlogOptions: isSet(object.tlogOptions) + ? exports.ArtifactVerificationOptions_TlogOptions.fromJSON(object.tlogOptions) + : undefined, + ctlogOptions: isSet(object.ctlogOptions) + ? exports.ArtifactVerificationOptions_CtlogOptions.fromJSON(object.ctlogOptions) + : undefined, + tsaOptions: isSet(object.tsaOptions) + ? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.fromJSON(object.tsaOptions) + : undefined, + integratedTsOptions: isSet(object.integratedTsOptions) + ? exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions.fromJSON(object.integratedTsOptions) + : undefined, + observerOptions: isSet(object.observerOptions) + ? exports.ArtifactVerificationOptions_ObserverTimestampOptions.fromJSON(object.observerOptions) + : undefined, + }; + }, + toJSON(message) { + const obj = {}; + if (message.signers?.$case === "certificateIdentities") { + obj.certificateIdentities = exports.CertificateIdentities.toJSON(message.signers.certificateIdentities); + } + else if (message.signers?.$case === "publicKeys") { + obj.publicKeys = exports.PublicKeyIdentities.toJSON(message.signers.publicKeys); + } + if (message.tlogOptions !== undefined) { + obj.tlogOptions = exports.ArtifactVerificationOptions_TlogOptions.toJSON(message.tlogOptions); + } + if (message.ctlogOptions !== undefined) { + obj.ctlogOptions = exports.ArtifactVerificationOptions_CtlogOptions.toJSON(message.ctlogOptions); + } + if (message.tsaOptions !== undefined) { + obj.tsaOptions = exports.ArtifactVerificationOptions_TimestampAuthorityOptions.toJSON(message.tsaOptions); + } + if (message.integratedTsOptions !== undefined) { + obj.integratedTsOptions = exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions.toJSON(message.integratedTsOptions); + } + if (message.observerOptions !== undefined) { + obj.observerOptions = exports.ArtifactVerificationOptions_ObserverTimestampOptions.toJSON(message.observerOptions); + } + return obj; + }, +}; +exports.ArtifactVerificationOptions_TlogOptions = { + fromJSON(object) { + return { + threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0, + performOnlineVerification: isSet(object.performOnlineVerification) + ? globalThis.Boolean(object.performOnlineVerification) + : false, + disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false, + }; + }, + toJSON(message) { + const obj = {}; + if (message.threshold !== 0) { + obj.threshold = Math.round(message.threshold); + } + if (message.performOnlineVerification !== false) { + obj.performOnlineVerification = message.performOnlineVerification; + } + if (message.disable !== false) { + obj.disable = message.disable; + } + return obj; + }, +}; +exports.ArtifactVerificationOptions_CtlogOptions = { + fromJSON(object) { + return { + threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0, + disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false, + }; + }, + toJSON(message) { + const obj = {}; + if (message.threshold !== 0) { + obj.threshold = Math.round(message.threshold); + } + if (message.disable !== false) { + obj.disable = message.disable; + } + return obj; + }, +}; +exports.ArtifactVerificationOptions_TimestampAuthorityOptions = { + fromJSON(object) { + return { + threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0, + disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false, + }; + }, + toJSON(message) { + const obj = {}; + if (message.threshold !== 0) { + obj.threshold = Math.round(message.threshold); + } + if (message.disable !== false) { + obj.disable = message.disable; + } + return obj; + }, +}; +exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions = { + fromJSON(object) { + return { + threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0, + disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false, + }; + }, + toJSON(message) { + const obj = {}; + if (message.threshold !== 0) { + obj.threshold = Math.round(message.threshold); + } + if (message.disable !== false) { + obj.disable = message.disable; + } + return obj; + }, +}; +exports.ArtifactVerificationOptions_ObserverTimestampOptions = { + fromJSON(object) { + return { + threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0, + disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false, + }; + }, + toJSON(message) { + const obj = {}; + if (message.threshold !== 0) { + obj.threshold = Math.round(message.threshold); + } + if (message.disable !== false) { + obj.disable = message.disable; + } + return obj; + }, +}; +exports.Artifact = { + fromJSON(object) { + return { + data: isSet(object.artifactUri) + ? { $case: "artifactUri", artifactUri: globalThis.String(object.artifactUri) } + : isSet(object.artifact) + ? { $case: "artifact", artifact: Buffer.from(bytesFromBase64(object.artifact)) } + : isSet(object.artifactDigest) + ? { $case: "artifactDigest", artifactDigest: sigstore_common_1.HashOutput.fromJSON(object.artifactDigest) } + : undefined, + }; + }, + toJSON(message) { + const obj = {}; + if (message.data?.$case === "artifactUri") { + obj.artifactUri = message.data.artifactUri; + } + else if (message.data?.$case === "artifact") { + obj.artifact = base64FromBytes(message.data.artifact); + } + else if (message.data?.$case === "artifactDigest") { + obj.artifactDigest = sigstore_common_1.HashOutput.toJSON(message.data.artifactDigest); + } + return obj; + }, +}; +exports.Input = { + fromJSON(object) { + return { + artifactTrustRoot: isSet(object.artifactTrustRoot) ? sigstore_trustroot_1.TrustedRoot.fromJSON(object.artifactTrustRoot) : undefined, + artifactVerificationOptions: isSet(object.artifactVerificationOptions) + ? exports.ArtifactVerificationOptions.fromJSON(object.artifactVerificationOptions) + : undefined, + bundle: isSet(object.bundle) ? sigstore_bundle_1.Bundle.fromJSON(object.bundle) : undefined, + artifact: isSet(object.artifact) ? exports.Artifact.fromJSON(object.artifact) : undefined, + }; + }, + toJSON(message) { + const obj = {}; + if (message.artifactTrustRoot !== undefined) { + obj.artifactTrustRoot = sigstore_trustroot_1.TrustedRoot.toJSON(message.artifactTrustRoot); + } + if (message.artifactVerificationOptions !== undefined) { + obj.artifactVerificationOptions = exports.ArtifactVerificationOptions.toJSON(message.artifactVerificationOptions); + } + if (message.bundle !== undefined) { + obj.bundle = sigstore_bundle_1.Bundle.toJSON(message.bundle); + } + if (message.artifact !== undefined) { + obj.artifact = exports.Artifact.toJSON(message.artifact); + } + return obj; + }, +}; +function bytesFromBase64(b64) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); +} +function base64FromBytes(arr) { + return globalThis.Buffer.from(arr).toString("base64"); +} +function isSet(value) { + return value !== null && value !== undefined; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/index.js new file mode 100644 index 0000000000000000000000000000000000000000..eafb768c48fcaa69df1bbd5583e61cdd77b4b70b --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/index.js @@ -0,0 +1,37 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +__exportStar(require("./__generated__/envelope"), exports); +__exportStar(require("./__generated__/sigstore_bundle"), exports); +__exportStar(require("./__generated__/sigstore_common"), exports); +__exportStar(require("./__generated__/sigstore_rekor"), exports); +__exportStar(require("./__generated__/sigstore_trustroot"), exports); +__exportStar(require("./__generated__/sigstore_verification"), exports); diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/rekor/v2/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/rekor/v2/index.js new file mode 100644 index 0000000000000000000000000000000000000000..10745efc39a1f8af5ceb774752383d32d9fa0d41 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/dist/rekor/v2/index.js @@ -0,0 +1,35 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +/* +Copyright 2025 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +__exportStar(require("../../__generated__/rekor/v2/dsse"), exports); +__exportStar(require("../../__generated__/rekor/v2/entry"), exports); +__exportStar(require("../../__generated__/rekor/v2/hashedrekord"), exports); +__exportStar(require("../../__generated__/rekor/v2/verifier"), exports); diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/package.json new file mode 100644 index 0000000000000000000000000000000000000000..f87b2540fbf986c78ff073f201ff8ecba2c90f25 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/protobuf-specs/package.json @@ -0,0 +1,35 @@ +{ + "name": "@sigstore/protobuf-specs", + "version": "0.5.0", + "description": "code-signing for npm packages", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "exports": { + ".": "./dist/index.js", + "./rekor/v2": "./dist/rekor/v2/index.js" + }, + "scripts": { + "build": "tsc" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/sigstore/protobuf-specs.git" + }, + "files": [ + "dist" + ], + "author": "bdehamer@github.com", + "license": "Apache-2.0", + "bugs": { + "url": "https://github.com/sigstore/protobuf-specs/issues" + }, + "homepage": "https://github.com/sigstore/protobuf-specs#readme", + "devDependencies": { + "@tsconfig/node18": "^18.2.4", + "@types/node": "^18.14.0", + "typescript": "^5.7.2" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..e9e7c1679a09dfcb0793682d99f5129e206a8abd --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 The Sigstore Authors + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/bundler/base.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/bundler/base.js new file mode 100644 index 0000000000000000000000000000000000000000..61d5eba4568a35c0a1e698c249911d0b470c278e --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/bundler/base.js @@ -0,0 +1,50 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.BaseBundleBuilder = void 0; +// BaseBundleBuilder is a base class for BundleBuilder implementations. It +// provides a the basic wokflow for signing and witnessing an artifact. +// Subclasses must implement the `package` method to assemble a valid bundle +// with the generated signature and verification material. +class BaseBundleBuilder { + constructor(options) { + this.signer = options.signer; + this.witnesses = options.witnesses; + } + // Executes the signing/witnessing process for the given artifact. + async create(artifact) { + const signature = await this.prepare(artifact).then((blob) => this.signer.sign(blob)); + const bundle = await this.package(artifact, signature); + // Invoke all of the witnesses in parallel + const verificationMaterials = await Promise.all(this.witnesses.map((witness) => witness.testify(bundle.content, publicKey(signature.key)))); + // Collect the verification material from all of the witnesses + const tlogEntryList = []; + const timestampList = []; + verificationMaterials.forEach(({ tlogEntries, rfc3161Timestamps }) => { + tlogEntryList.push(...(tlogEntries ?? [])); + timestampList.push(...(rfc3161Timestamps ?? [])); + }); + // Merge the collected verification material into the bundle + bundle.verificationMaterial.tlogEntries = tlogEntryList; + bundle.verificationMaterial.timestampVerificationData = { + rfc3161Timestamps: timestampList, + }; + return bundle; + } + // Override this function to apply any pre-signing transformations to the + // artifact. The returned buffer will be signed by the signer. The default + // implementation simply returns the artifact data. + async prepare(artifact) { + return artifact.data; + } +} +exports.BaseBundleBuilder = BaseBundleBuilder; +// Extracts the public key from a KeyMaterial. Returns either the public key +// or the certificate, depending on the type of key material. +function publicKey(key) { + switch (key.$case) { + case 'publicKey': + return key.publicKey; + case 'x509Certificate': + return key.certificate; + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/bundler/bundle.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/bundler/bundle.js new file mode 100644 index 0000000000000000000000000000000000000000..34b1d12f2b44c8db2600ef791da1db65160ab469 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/bundler/bundle.js @@ -0,0 +1,81 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +Object.defineProperty(exports, "__esModule", { value: true }); +exports.toMessageSignatureBundle = toMessageSignatureBundle; +exports.toDSSEBundle = toDSSEBundle; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const sigstore = __importStar(require("@sigstore/bundle")); +const util_1 = require("../util"); +// Helper functions for assembling the parts of a Sigstore bundle +// Message signature bundle - $case: 'messageSignature' +function toMessageSignatureBundle(artifact, signature) { + const digest = util_1.crypto.digest('sha256', artifact.data); + return sigstore.toMessageSignatureBundle({ + digest, + signature: signature.signature, + certificate: signature.key.$case === 'x509Certificate' + ? util_1.pem.toDER(signature.key.certificate) + : undefined, + keyHint: signature.key.$case === 'publicKey' ? signature.key.hint : undefined, + certificateChain: true, + }); +} +// DSSE envelope bundle - $case: 'dsseEnvelope' +function toDSSEBundle(artifact, signature, certificateChain) { + return sigstore.toDSSEBundle({ + artifact: artifact.data, + artifactType: artifact.type, + signature: signature.signature, + certificate: signature.key.$case === 'x509Certificate' + ? util_1.pem.toDER(signature.key.certificate) + : undefined, + keyHint: signature.key.$case === 'publicKey' ? signature.key.hint : undefined, + certificateChain, + }); +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/bundler/dsse.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/bundler/dsse.js new file mode 100644 index 0000000000000000000000000000000000000000..86046ba8f3013ba3e31c40b0c227c528bd0bf733 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/bundler/dsse.js @@ -0,0 +1,46 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DSSEBundleBuilder = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const util_1 = require("../util"); +const base_1 = require("./base"); +const bundle_1 = require("./bundle"); +// BundleBuilder implementation for DSSE wrapped attestations +class DSSEBundleBuilder extends base_1.BaseBundleBuilder { + constructor(options) { + super(options); + this.certificateChain = options.certificateChain ?? false; + } + // DSSE requires the artifact to be pre-encoded with the payload type + // before the signature is generated. + async prepare(artifact) { + const a = artifactDefaults(artifact); + return util_1.dsse.preAuthEncoding(a.type, a.data); + } + // Packages the artifact and signature into a DSSE bundle + async package(artifact, signature) { + return (0, bundle_1.toDSSEBundle)(artifactDefaults(artifact), signature, this.certificateChain); + } +} +exports.DSSEBundleBuilder = DSSEBundleBuilder; +// Defaults the artifact type to an empty string if not provided +function artifactDefaults(artifact) { + return { + ...artifact, + type: artifact.type ?? '', + }; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/bundler/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/bundler/index.js new file mode 100644 index 0000000000000000000000000000000000000000..d67c8c324a4f04c7b9191daf5c1fdbb8378317b9 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/bundler/index.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.MessageSignatureBundleBuilder = exports.DSSEBundleBuilder = void 0; +var dsse_1 = require("./dsse"); +Object.defineProperty(exports, "DSSEBundleBuilder", { enumerable: true, get: function () { return dsse_1.DSSEBundleBuilder; } }); +var message_1 = require("./message"); +Object.defineProperty(exports, "MessageSignatureBundleBuilder", { enumerable: true, get: function () { return message_1.MessageSignatureBundleBuilder; } }); diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/bundler/message.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/bundler/message.js new file mode 100644 index 0000000000000000000000000000000000000000..e3991f42bab939fcec0886248f4274a4d6c4f691 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/bundler/message.js @@ -0,0 +1,30 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.MessageSignatureBundleBuilder = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const base_1 = require("./base"); +const bundle_1 = require("./bundle"); +// BundleBuilder implementation for raw message signatures +class MessageSignatureBundleBuilder extends base_1.BaseBundleBuilder { + constructor(options) { + super(options); + } + async package(artifact, signature) { + return (0, bundle_1.toMessageSignatureBundle)(artifact, signature); + } +} +exports.MessageSignatureBundleBuilder = MessageSignatureBundleBuilder; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/error.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/error.js new file mode 100644 index 0000000000000000000000000000000000000000..d28f1913cc77e94437a8e950c12c0731f3f14e58 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/error.js @@ -0,0 +1,39 @@ +"use strict"; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.InternalError = void 0; +exports.internalError = internalError; +const error_1 = require("./external/error"); +class InternalError extends Error { + constructor({ code, message, cause, }) { + super(message); + this.name = this.constructor.name; + this.cause = cause; + this.code = code; + } +} +exports.InternalError = InternalError; +function internalError(err, code, message) { + if (err instanceof error_1.HTTPError) { + message += ` - ${err.message}`; + } + throw new InternalError({ + code: code, + message: message, + cause: err, + }); +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/external/error.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/external/error.js new file mode 100644 index 0000000000000000000000000000000000000000..a6a65adebb17670e264258dc0a89897729466d8a --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/external/error.js @@ -0,0 +1,26 @@ +"use strict"; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.HTTPError = void 0; +class HTTPError extends Error { + constructor({ status, message, location, }) { + super(`(${status}) ${message}`); + this.statusCode = status; + this.location = location; + } +} +exports.HTTPError = HTTPError; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/external/fetch.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/external/fetch.js new file mode 100644 index 0000000000000000000000000000000000000000..116090f3c641ef9404ea107efbca0081adfd9d57 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/external/fetch.js @@ -0,0 +1,98 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fetchWithRetry = fetchWithRetry; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const http2_1 = require("http2"); +const make_fetch_happen_1 = __importDefault(require("make-fetch-happen")); +const proc_log_1 = require("proc-log"); +const promise_retry_1 = __importDefault(require("promise-retry")); +const util_1 = require("../util"); +const error_1 = require("./error"); +const { HTTP2_HEADER_LOCATION, HTTP2_HEADER_CONTENT_TYPE, HTTP2_HEADER_USER_AGENT, HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_TOO_MANY_REQUESTS, HTTP_STATUS_REQUEST_TIMEOUT, } = http2_1.constants; +async function fetchWithRetry(url, options) { + return (0, promise_retry_1.default)(async (retry, attemptNum) => { + const method = options.method || 'POST'; + const headers = { + [HTTP2_HEADER_USER_AGENT]: util_1.ua.getUserAgent(), + ...options.headers, + }; + const response = await (0, make_fetch_happen_1.default)(url, { + method, + headers, + body: options.body, + timeout: options.timeout, + retry: false, // We're handling retries ourselves + }).catch((reason) => { + proc_log_1.log.http('fetch', `${method} ${url} attempt ${attemptNum} failed with ${reason}`); + return retry(reason); + }); + if (response.ok) { + return response; + } + else { + const error = await errorFromResponse(response); + proc_log_1.log.http('fetch', `${method} ${url} attempt ${attemptNum} failed with ${response.status}`); + if (retryable(response.status)) { + return retry(error); + } + else { + throw error; + } + } + }, retryOpts(options.retry)); +} +// Translate a Response into an HTTPError instance. This will attempt to parse +// the response body for a message, but will default to the statusText if none +// is found. +const errorFromResponse = async (response) => { + let message = response.statusText; + const location = response.headers.get(HTTP2_HEADER_LOCATION) || undefined; + const contentType = response.headers.get(HTTP2_HEADER_CONTENT_TYPE); + // If response type is JSON, try to parse the body for a message + if (contentType?.includes('application/json')) { + try { + const body = await response.json(); + message = body.message || message; + } + catch (e) { + // ignore + } + } + return new error_1.HTTPError({ + status: response.status, + message: message, + location: location, + }); +}; +// Determine if a status code is retryable. This includes 5xx errors, 408, and +// 429. +const retryable = (status) => [HTTP_STATUS_REQUEST_TIMEOUT, HTTP_STATUS_TOO_MANY_REQUESTS].includes(status) || status >= HTTP_STATUS_INTERNAL_SERVER_ERROR; +// Normalize the retry options to the format expected by promise-retry +const retryOpts = (retry) => { + if (typeof retry === 'boolean') { + return { retries: retry ? 1 : 0 }; + } + else if (typeof retry === 'number') { + return { retries: retry }; + } + else { + return { retries: 0, ...retry }; + } +}; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/external/fulcio.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/external/fulcio.js new file mode 100644 index 0000000000000000000000000000000000000000..de6a1ad9f9e797b4f2bdb317ae15653a13cc8435 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/external/fulcio.js @@ -0,0 +1,41 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Fulcio = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const fetch_1 = require("./fetch"); +/** + * Fulcio API client. + */ +class Fulcio { + constructor(options) { + this.options = options; + } + async createSigningCertificate(request) { + const { baseURL, retry, timeout } = this.options; + const url = `${baseURL}/api/v2/signingCert`; + const response = await (0, fetch_1.fetchWithRetry)(url, { + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify(request), + timeout, + retry, + }); + return response.json(); + } +} +exports.Fulcio = Fulcio; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/external/rekor.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/external/rekor.js new file mode 100644 index 0000000000000000000000000000000000000000..bb59a126e032fb5a4261a9b5eef9ccd348e8e264 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/external/rekor.js @@ -0,0 +1,80 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Rekor = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const fetch_1 = require("./fetch"); +/** + * Rekor API client. + */ +class Rekor { + constructor(options) { + this.options = options; + } + /** + * Create a new entry in the Rekor log. + * @param propsedEntry {ProposedEntry} Data to create a new entry + * @returns {Promise} The created entry + */ + async createEntry(propsedEntry) { + const { baseURL, timeout, retry } = this.options; + const url = `${baseURL}/api/v1/log/entries`; + const response = await (0, fetch_1.fetchWithRetry)(url, { + headers: { + 'Content-Type': 'application/json', + Accept: 'application/json', + }, + body: JSON.stringify(propsedEntry), + timeout, + retry, + }); + const data = await response.json(); + return entryFromResponse(data); + } + /** + * Get an entry from the Rekor log. + * @param uuid {string} The UUID of the entry to retrieve + * @returns {Promise} The retrieved entry + */ + async getEntry(uuid) { + const { baseURL, timeout, retry } = this.options; + const url = `${baseURL}/api/v1/log/entries/${uuid}`; + const response = await (0, fetch_1.fetchWithRetry)(url, { + method: 'GET', + headers: { + Accept: 'application/json', + }, + timeout, + retry, + }); + const data = await response.json(); + return entryFromResponse(data); + } +} +exports.Rekor = Rekor; +// Unpack the response from the Rekor API into a more convenient format. +function entryFromResponse(data) { + const entries = Object.entries(data); + if (entries.length != 1) { + throw new Error('Received multiple entries in Rekor response'); + } + // Grab UUID and entry data from the response + const [uuid, entry] = entries[0]; + return { + ...entry, + uuid, + }; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/external/tsa.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/external/tsa.js new file mode 100644 index 0000000000000000000000000000000000000000..a948ba9cca2c72ca15dbc167d961c91db9dd3027 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/external/tsa.js @@ -0,0 +1,38 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TimestampAuthority = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const fetch_1 = require("./fetch"); +class TimestampAuthority { + constructor(options) { + this.options = options; + } + async createTimestamp(request) { + const { baseURL, timeout, retry } = this.options; + const url = `${baseURL}/api/v1/timestamp`; + const response = await (0, fetch_1.fetchWithRetry)(url, { + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify(request), + timeout, + retry, + }); + return response.buffer(); + } +} +exports.TimestampAuthority = TimestampAuthority; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/identity/ci.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/identity/ci.js new file mode 100644 index 0000000000000000000000000000000000000000..d79133952b605ba964e46e610c45225c20067bc0 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/identity/ci.js @@ -0,0 +1,73 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.CIContextProvider = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const make_fetch_happen_1 = __importDefault(require("make-fetch-happen")); +// Collection of all the CI-specific providers we have implemented +const providers = [getGHAToken, getEnv]; +/** + * CIContextProvider is a composite identity provider which will iterate + * over all of the CI-specific providers and return the token from the first + * one that resolves. + */ +class CIContextProvider { + /* istanbul ignore next */ + constructor(audience = 'sigstore') { + this.audience = audience; + } + // Invoke all registered ProviderFuncs and return the value of whichever one + // resolves first. + async getToken() { + return Promise.any(providers.map((getToken) => getToken(this.audience))).catch(() => Promise.reject('CI: no tokens available')); + } +} +exports.CIContextProvider = CIContextProvider; +/** + * getGHAToken can retrieve an OIDC token when running in a GitHub Actions + * workflow + */ +async function getGHAToken(audience) { + // Check to see if we're running in GitHub Actions + if (!process.env.ACTIONS_ID_TOKEN_REQUEST_URL || + !process.env.ACTIONS_ID_TOKEN_REQUEST_TOKEN) { + return Promise.reject('no token available'); + } + // Construct URL to request token w/ appropriate audience + const url = new URL(process.env.ACTIONS_ID_TOKEN_REQUEST_URL); + url.searchParams.append('audience', audience); + const response = await (0, make_fetch_happen_1.default)(url.href, { + retry: 2, + headers: { + Accept: 'application/json', + Authorization: `Bearer ${process.env.ACTIONS_ID_TOKEN_REQUEST_TOKEN}`, + }, + }); + return response.json().then((data) => data.value); +} +/** + * getEnv can retrieve an OIDC token from an environment variable. + * This matches the behavior of https://github.com/sigstore/cosign/tree/main/pkg/providers/envvar + */ +async function getEnv() { + if (!process.env.SIGSTORE_ID_TOKEN) { + return Promise.reject('no token available'); + } + return process.env.SIGSTORE_ID_TOKEN; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/identity/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/identity/index.js new file mode 100644 index 0000000000000000000000000000000000000000..1c1223b443fab69b1e25e7ad19cafec297f9fba1 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/identity/index.js @@ -0,0 +1,20 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.CIContextProvider = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +var ci_1 = require("./ci"); +Object.defineProperty(exports, "CIContextProvider", { enumerable: true, get: function () { return ci_1.CIContextProvider; } }); diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/identity/provider.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/identity/provider.js new file mode 100644 index 0000000000000000000000000000000000000000..c8ad2e549bdc6801e0d1c80b0308d4b9bd4985ce --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/identity/provider.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/index.js new file mode 100644 index 0000000000000000000000000000000000000000..383b76083361b9f8afcea56bda581a994d6668d4 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/index.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TSAWitness = exports.RekorWitness = exports.DEFAULT_REKOR_URL = exports.FulcioSigner = exports.DEFAULT_FULCIO_URL = exports.CIContextProvider = exports.InternalError = exports.MessageSignatureBundleBuilder = exports.DSSEBundleBuilder = void 0; +var bundler_1 = require("./bundler"); +Object.defineProperty(exports, "DSSEBundleBuilder", { enumerable: true, get: function () { return bundler_1.DSSEBundleBuilder; } }); +Object.defineProperty(exports, "MessageSignatureBundleBuilder", { enumerable: true, get: function () { return bundler_1.MessageSignatureBundleBuilder; } }); +var error_1 = require("./error"); +Object.defineProperty(exports, "InternalError", { enumerable: true, get: function () { return error_1.InternalError; } }); +var identity_1 = require("./identity"); +Object.defineProperty(exports, "CIContextProvider", { enumerable: true, get: function () { return identity_1.CIContextProvider; } }); +var signer_1 = require("./signer"); +Object.defineProperty(exports, "DEFAULT_FULCIO_URL", { enumerable: true, get: function () { return signer_1.DEFAULT_FULCIO_URL; } }); +Object.defineProperty(exports, "FulcioSigner", { enumerable: true, get: function () { return signer_1.FulcioSigner; } }); +var witness_1 = require("./witness"); +Object.defineProperty(exports, "DEFAULT_REKOR_URL", { enumerable: true, get: function () { return witness_1.DEFAULT_REKOR_URL; } }); +Object.defineProperty(exports, "RekorWitness", { enumerable: true, get: function () { return witness_1.RekorWitness; } }); +Object.defineProperty(exports, "TSAWitness", { enumerable: true, get: function () { return witness_1.TSAWitness; } }); diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js new file mode 100644 index 0000000000000000000000000000000000000000..f01703cfab56455e9750452829f76493841debbb --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js @@ -0,0 +1,59 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.CAClient = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const error_1 = require("../../error"); +const fulcio_1 = require("../../external/fulcio"); +class CAClient { + constructor(options) { + this.fulcio = new fulcio_1.Fulcio({ + baseURL: options.fulcioBaseURL, + retry: options.retry, + timeout: options.timeout, + }); + } + async createSigningCertificate(identityToken, publicKey, challenge) { + const request = toCertificateRequest(identityToken, publicKey, challenge); + try { + const resp = await this.fulcio.createSigningCertificate(request); + // Account for the fact that the response may contain either a + // signedCertificateEmbeddedSct or a signedCertificateDetachedSct. + const cert = resp.signedCertificateEmbeddedSct + ? resp.signedCertificateEmbeddedSct + : resp.signedCertificateDetachedSct; + return cert.chain.certificates; + } + catch (err) { + (0, error_1.internalError)(err, 'CA_CREATE_SIGNING_CERTIFICATE_ERROR', 'error creating signing certificate'); + } + } +} +exports.CAClient = CAClient; +function toCertificateRequest(identityToken, publicKey, challenge) { + return { + credentials: { + oidcIdentityToken: identityToken, + }, + publicKeyRequest: { + publicKey: { + algorithm: 'ECDSA', + content: publicKey, + }, + proofOfPossession: challenge.toString('base64'), + }, + }; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js new file mode 100644 index 0000000000000000000000000000000000000000..481aa5c3579a2788063e8710c74f0e6c3038909c --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js @@ -0,0 +1,45 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.EphemeralSigner = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const crypto_1 = __importDefault(require("crypto")); +const EC_KEYPAIR_TYPE = 'ec'; +const P256_CURVE = 'P-256'; +// Signer implementation which uses an ephemeral keypair to sign artifacts. +// The private key lives only in memory and is tied to the lifetime of the +// EphemeralSigner instance. +class EphemeralSigner { + constructor() { + this.keypair = crypto_1.default.generateKeyPairSync(EC_KEYPAIR_TYPE, { + namedCurve: P256_CURVE, + }); + } + async sign(data) { + const signature = crypto_1.default.sign(null, data, this.keypair.privateKey); + const publicKey = this.keypair.publicKey + .export({ format: 'pem', type: 'spki' }) + .toString('ascii'); + return { + signature: signature, + key: { $case: 'publicKey', publicKey }, + }; + } +} +exports.EphemeralSigner = EphemeralSigner; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/signer/fulcio/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/signer/fulcio/index.js new file mode 100644 index 0000000000000000000000000000000000000000..89a432548d2b42cbc850d75df7ad63f1373c47e7 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/signer/fulcio/index.js @@ -0,0 +1,87 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.FulcioSigner = exports.DEFAULT_FULCIO_URL = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const error_1 = require("../../error"); +const util_1 = require("../../util"); +const ca_1 = require("./ca"); +const ephemeral_1 = require("./ephemeral"); +exports.DEFAULT_FULCIO_URL = 'https://fulcio.sigstore.dev'; +// Signer implementation which can be used to decorate another signer +// with a Fulcio-issued signing certificate for the signer's public key. +// Must be instantiated with an identity provider which can provide a JWT +// which represents the identity to be bound to the signing certificate. +class FulcioSigner { + constructor(options) { + this.ca = new ca_1.CAClient({ + ...options, + fulcioBaseURL: options.fulcioBaseURL || /* istanbul ignore next */ exports.DEFAULT_FULCIO_URL, + }); + this.identityProvider = options.identityProvider; + this.keyHolder = options.keyHolder || new ephemeral_1.EphemeralSigner(); + } + async sign(data) { + // Retrieve identity token from the supplied identity provider + const identityToken = await this.getIdentityToken(); + // Extract challenge claim from OIDC token + let subject; + try { + subject = util_1.oidc.extractJWTSubject(identityToken); + } + catch (err) { + throw new error_1.InternalError({ + code: 'IDENTITY_TOKEN_PARSE_ERROR', + message: `invalid identity token: ${identityToken}`, + cause: err, + }); + } + // Construct challenge value by signing the subject claim + const challenge = await this.keyHolder.sign(Buffer.from(subject)); + if (challenge.key.$case !== 'publicKey') { + throw new error_1.InternalError({ + code: 'CA_CREATE_SIGNING_CERTIFICATE_ERROR', + message: 'unexpected format for signing key', + }); + } + // Create signing certificate + const certificates = await this.ca.createSigningCertificate(identityToken, challenge.key.publicKey, challenge.signature); + // Generate artifact signature + const signature = await this.keyHolder.sign(data); + // Specifically returning only the first certificate in the chain + // as the key. + return { + signature: signature.signature, + key: { + $case: 'x509Certificate', + certificate: certificates[0], + }, + }; + } + async getIdentityToken() { + try { + return await this.identityProvider.getToken(); + } + catch (err) { + throw new error_1.InternalError({ + code: 'IDENTITY_TOKEN_READ_ERROR', + message: 'error retrieving identity token', + cause: err, + }); + } + } +} +exports.FulcioSigner = FulcioSigner; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/signer/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/signer/index.js new file mode 100644 index 0000000000000000000000000000000000000000..e2087767b81c19adce7e65fe939ecf6af5e7a39f --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/signer/index.js @@ -0,0 +1,22 @@ +"use strict"; +/* istanbul ignore file */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.FulcioSigner = exports.DEFAULT_FULCIO_URL = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +var fulcio_1 = require("./fulcio"); +Object.defineProperty(exports, "DEFAULT_FULCIO_URL", { enumerable: true, get: function () { return fulcio_1.DEFAULT_FULCIO_URL; } }); +Object.defineProperty(exports, "FulcioSigner", { enumerable: true, get: function () { return fulcio_1.FulcioSigner; } }); diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/signer/signer.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/signer/signer.js new file mode 100644 index 0000000000000000000000000000000000000000..b92c54183375d95315e504765f6b5217f94a8d4c --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/signer/signer.js @@ -0,0 +1,17 @@ +"use strict"; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/types/fetch.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/types/fetch.js new file mode 100644 index 0000000000000000000000000000000000000000..c8ad2e549bdc6801e0d1c80b0308d4b9bd4985ce --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/types/fetch.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/util/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/util/index.js new file mode 100644 index 0000000000000000000000000000000000000000..436630cfbbf196a33d4c17dabca5b5d676a63a59 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/util/index.js @@ -0,0 +1,59 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ua = exports.oidc = exports.pem = exports.json = exports.encoding = exports.dsse = exports.crypto = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +var core_1 = require("@sigstore/core"); +Object.defineProperty(exports, "crypto", { enumerable: true, get: function () { return core_1.crypto; } }); +Object.defineProperty(exports, "dsse", { enumerable: true, get: function () { return core_1.dsse; } }); +Object.defineProperty(exports, "encoding", { enumerable: true, get: function () { return core_1.encoding; } }); +Object.defineProperty(exports, "json", { enumerable: true, get: function () { return core_1.json; } }); +Object.defineProperty(exports, "pem", { enumerable: true, get: function () { return core_1.pem; } }); +exports.oidc = __importStar(require("./oidc")); +exports.ua = __importStar(require("./ua")); diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/util/oidc.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/util/oidc.js new file mode 100644 index 0000000000000000000000000000000000000000..a9a3b10d3f61adc54542a8117577aaf150ec08be --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/util/oidc.js @@ -0,0 +1,35 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.extractJWTSubject = extractJWTSubject; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const core_1 = require("@sigstore/core"); +function extractJWTSubject(jwt) { + const parts = jwt.split('.', 3); + const payload = JSON.parse(core_1.encoding.base64Decode(parts[1])); + if (payload.email) { + if (!payload.email_verified) { + throw new Error('JWT email not verified by issuer'); + } + return payload.email; + } + if (payload.sub) { + return payload.sub; + } + else { + throw new Error('JWT subject not found'); + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/util/ua.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/util/ua.js new file mode 100644 index 0000000000000000000000000000000000000000..b15ff2070fb9fc60689a116e3130b60f87224cec --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/util/ua.js @@ -0,0 +1,32 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getUserAgent = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const os_1 = __importDefault(require("os")); +// Format User-Agent: / () +// source: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/User-Agent +const getUserAgent = () => { + const packageVersion = require('../../package.json').version; + const nodeVersion = process.version; + const platformName = os_1.default.platform(); + const archName = os_1.default.arch(); + return `sigstore-js/${packageVersion} (Node ${nodeVersion}) (${platformName}/${archName})`; +}; +exports.getUserAgent = getUserAgent; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/witness/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/witness/index.js new file mode 100644 index 0000000000000000000000000000000000000000..72677c399caa7fb128702bb9c5329e48592380b3 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/witness/index.js @@ -0,0 +1,24 @@ +"use strict"; +/* istanbul ignore file */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TSAWitness = exports.RekorWitness = exports.DEFAULT_REKOR_URL = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +var tlog_1 = require("./tlog"); +Object.defineProperty(exports, "DEFAULT_REKOR_URL", { enumerable: true, get: function () { return tlog_1.DEFAULT_REKOR_URL; } }); +Object.defineProperty(exports, "RekorWitness", { enumerable: true, get: function () { return tlog_1.RekorWitness; } }); +var tsa_1 = require("./tsa"); +Object.defineProperty(exports, "TSAWitness", { enumerable: true, get: function () { return tsa_1.TSAWitness; } }); diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/witness/tlog/client.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/witness/tlog/client.js new file mode 100644 index 0000000000000000000000000000000000000000..22c895f2ca7edd9aef31f2d7882ce80739d83412 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/witness/tlog/client.js @@ -0,0 +1,61 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TLogClient = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const error_1 = require("../../error"); +const error_2 = require("../../external/error"); +const rekor_1 = require("../../external/rekor"); +class TLogClient { + constructor(options) { + this.fetchOnConflict = options.fetchOnConflict ?? false; + this.rekor = new rekor_1.Rekor({ + baseURL: options.rekorBaseURL, + retry: options.retry, + timeout: options.timeout, + }); + } + async createEntry(proposedEntry) { + let entry; + try { + entry = await this.rekor.createEntry(proposedEntry); + } + catch (err) { + // If the entry already exists, fetch it (if enabled) + if (entryExistsError(err) && this.fetchOnConflict) { + // Grab the UUID of the existing entry from the location header + /* istanbul ignore next */ + const uuid = err.location.split('/').pop() || ''; + try { + entry = await this.rekor.getEntry(uuid); + } + catch (err) { + (0, error_1.internalError)(err, 'TLOG_FETCH_ENTRY_ERROR', 'error fetching tlog entry'); + } + } + else { + (0, error_1.internalError)(err, 'TLOG_CREATE_ENTRY_ERROR', 'error creating tlog entry'); + } + } + return entry; + } +} +exports.TLogClient = TLogClient; +function entryExistsError(value) { + return (value instanceof error_2.HTTPError && + value.statusCode === 409 && + value.location !== undefined); +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/witness/tlog/entry.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/witness/tlog/entry.js new file mode 100644 index 0000000000000000000000000000000000000000..69a3b477e54429c7edeb7ded8a1c9a1a48b4f660 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/witness/tlog/entry.js @@ -0,0 +1,140 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.toProposedEntry = toProposedEntry; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const bundle_1 = require("@sigstore/bundle"); +const util_1 = require("../../util"); +const SHA256_ALGORITHM = 'sha256'; +function toProposedEntry(content, publicKey, +// TODO: Remove this parameter once have completely switched to 'dsse' entries +entryType = 'dsse') { + switch (content.$case) { + case 'dsseEnvelope': + // TODO: Remove this conditional once have completely ditched "intoto" entries + if (entryType === 'intoto') { + return toProposedIntotoEntry(content.dsseEnvelope, publicKey); + } + return toProposedDSSEEntry(content.dsseEnvelope, publicKey); + case 'messageSignature': + return toProposedHashedRekordEntry(content.messageSignature, publicKey); + } +} +// Returns a properly formatted Rekor "hashedrekord" entry for the given digest +// and signature +function toProposedHashedRekordEntry(messageSignature, publicKey) { + const hexDigest = messageSignature.messageDigest.digest.toString('hex'); + const b64Signature = messageSignature.signature.toString('base64'); + const b64Key = util_1.encoding.base64Encode(publicKey); + return { + apiVersion: '0.0.1', + kind: 'hashedrekord', + spec: { + data: { + hash: { + algorithm: SHA256_ALGORITHM, + value: hexDigest, + }, + }, + signature: { + content: b64Signature, + publicKey: { + content: b64Key, + }, + }, + }, + }; +} +// Returns a properly formatted Rekor "dsse" entry for the given DSSE envelope +// and signature +function toProposedDSSEEntry(envelope, publicKey) { + const envelopeJSON = JSON.stringify((0, bundle_1.envelopeToJSON)(envelope)); + const encodedKey = util_1.encoding.base64Encode(publicKey); + return { + apiVersion: '0.0.1', + kind: 'dsse', + spec: { + proposedContent: { + envelope: envelopeJSON, + verifiers: [encodedKey], + }, + }, + }; +} +// Returns a properly formatted Rekor "intoto" entry for the given DSSE +// envelope and signature +function toProposedIntotoEntry(envelope, publicKey) { + // Calculate the value for the payloadHash field in the Rekor entry + const payloadHash = util_1.crypto + .digest(SHA256_ALGORITHM, envelope.payload) + .toString('hex'); + // Calculate the value for the hash field in the Rekor entry + const envelopeHash = calculateDSSEHash(envelope, publicKey); + // Collect values for re-creating the DSSE envelope. + // Double-encode payload and signature cause that's what Rekor expects + const payload = util_1.encoding.base64Encode(envelope.payload.toString('base64')); + const sig = util_1.encoding.base64Encode(envelope.signatures[0].sig.toString('base64')); + const keyid = envelope.signatures[0].keyid; + const encodedKey = util_1.encoding.base64Encode(publicKey); + // Create the envelope portion of the entry. Note the inclusion of the + // publicKey in the signature struct is not a standard part of a DSSE + // envelope, but is required by Rekor. + const dsse = { + payloadType: envelope.payloadType, + payload: payload, + signatures: [{ sig, publicKey: encodedKey }], + }; + // If the keyid is an empty string, Rekor seems to remove it altogether. We + // need to do the same here so that we can properly recreate the entry for + // verification. + if (keyid.length > 0) { + dsse.signatures[0].keyid = keyid; + } + return { + apiVersion: '0.0.2', + kind: 'intoto', + spec: { + content: { + envelope: dsse, + hash: { algorithm: SHA256_ALGORITHM, value: envelopeHash }, + payloadHash: { algorithm: SHA256_ALGORITHM, value: payloadHash }, + }, + }, + }; +} +// Calculates the hash of a DSSE envelope for inclusion in a Rekor entry. +// There is no standard way to do this, so the scheme we're using as as +// follows: +// * payload is base64 encoded +// * signature is base64 encoded (only the first signature is used) +// * keyid is included ONLY if it is NOT an empty string +// * The resulting JSON is canonicalized and hashed to a hex string +function calculateDSSEHash(envelope, publicKey) { + const dsse = { + payloadType: envelope.payloadType, + payload: envelope.payload.toString('base64'), + signatures: [ + { sig: envelope.signatures[0].sig.toString('base64'), publicKey }, + ], + }; + // If the keyid is an empty string, Rekor seems to remove it altogether. + if (envelope.signatures[0].keyid.length > 0) { + dsse.signatures[0].keyid = envelope.signatures[0].keyid; + } + return util_1.crypto + .digest(SHA256_ALGORITHM, util_1.json.canonicalize(dsse)) + .toString('hex'); +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/witness/tlog/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/witness/tlog/index.js new file mode 100644 index 0000000000000000000000000000000000000000..6197b09d4cdd9a5f10824de15c1c0fb62be5f680 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/witness/tlog/index.js @@ -0,0 +1,82 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.RekorWitness = exports.DEFAULT_REKOR_URL = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const util_1 = require("../../util"); +const client_1 = require("./client"); +const entry_1 = require("./entry"); +exports.DEFAULT_REKOR_URL = 'https://rekor.sigstore.dev'; +class RekorWitness { + constructor(options) { + this.entryType = options.entryType; + this.tlog = new client_1.TLogClient({ + ...options, + rekorBaseURL: options.rekorBaseURL || /* istanbul ignore next */ exports.DEFAULT_REKOR_URL, + }); + } + async testify(content, publicKey) { + const proposedEntry = (0, entry_1.toProposedEntry)(content, publicKey, this.entryType); + const entry = await this.tlog.createEntry(proposedEntry); + return toTransparencyLogEntry(entry); + } +} +exports.RekorWitness = RekorWitness; +function toTransparencyLogEntry(entry) { + const logID = Buffer.from(entry.logID, 'hex'); + // Parse entry body so we can extract the kind and version. + const bodyJSON = util_1.encoding.base64Decode(entry.body); + const entryBody = JSON.parse(bodyJSON); + const promise = entry?.verification?.signedEntryTimestamp + ? inclusionPromise(entry.verification.signedEntryTimestamp) + : undefined; + const proof = entry?.verification?.inclusionProof + ? inclusionProof(entry.verification.inclusionProof) + : undefined; + const tlogEntry = { + logIndex: entry.logIndex.toString(), + logId: { + keyId: logID, + }, + integratedTime: entry.integratedTime.toString(), + kindVersion: { + kind: entryBody.kind, + version: entryBody.apiVersion, + }, + inclusionPromise: promise, + inclusionProof: proof, + canonicalizedBody: Buffer.from(entry.body, 'base64'), + }; + return { + tlogEntries: [tlogEntry], + }; +} +function inclusionPromise(promise) { + return { + signedEntryTimestamp: Buffer.from(promise, 'base64'), + }; +} +function inclusionProof(proof) { + return { + logIndex: proof.logIndex.toString(), + treeSize: proof.treeSize.toString(), + rootHash: Buffer.from(proof.rootHash, 'hex'), + hashes: proof.hashes.map((h) => Buffer.from(h, 'hex')), + checkpoint: { + envelope: proof.checkpoint, + }, + }; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/witness/tsa/client.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/witness/tsa/client.js new file mode 100644 index 0000000000000000000000000000000000000000..754de3748dbb3696c1c28f8f923b9ed493ed32ba --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/witness/tsa/client.js @@ -0,0 +1,46 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TSAClient = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const error_1 = require("../../error"); +const tsa_1 = require("../../external/tsa"); +const util_1 = require("../../util"); +const SHA256_ALGORITHM = 'sha256'; +class TSAClient { + constructor(options) { + this.tsa = new tsa_1.TimestampAuthority({ + baseURL: options.tsaBaseURL, + retry: options.retry, + timeout: options.timeout, + }); + } + async createTimestamp(signature) { + const request = { + artifactHash: util_1.crypto + .digest(SHA256_ALGORITHM, signature) + .toString('base64'), + hashAlgorithm: SHA256_ALGORITHM, + }; + try { + return await this.tsa.createTimestamp(request); + } + catch (err) { + (0, error_1.internalError)(err, 'TSA_CREATE_TIMESTAMP_ERROR', 'error creating timestamp'); + } + } +} +exports.TSAClient = TSAClient; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/witness/tsa/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/witness/tsa/index.js new file mode 100644 index 0000000000000000000000000000000000000000..d4f5c7c859d1068cdf9f2793ab64dabc92efe7a9 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/witness/tsa/index.js @@ -0,0 +1,44 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TSAWitness = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const client_1 = require("./client"); +class TSAWitness { + constructor(options) { + this.tsa = new client_1.TSAClient({ + tsaBaseURL: options.tsaBaseURL, + retry: options.retry, + timeout: options.timeout, + }); + } + async testify(content) { + const signature = extractSignature(content); + const timestamp = await this.tsa.createTimestamp(signature); + return { + rfc3161Timestamps: [{ signedTimestamp: timestamp }], + }; + } +} +exports.TSAWitness = TSAWitness; +function extractSignature(content) { + switch (content.$case) { + case 'dsseEnvelope': + return content.dsseEnvelope.signatures[0].sig; + case 'messageSignature': + return content.messageSignature.signature; + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/witness/witness.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/witness/witness.js new file mode 100644 index 0000000000000000000000000000000000000000..c8ad2e549bdc6801e0d1c80b0308d4b9bd4985ce --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/dist/witness/witness.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/package.json new file mode 100644 index 0000000000000000000000000000000000000000..a24f8e87ff349436935cd78a39c8db7ed1ed8b64 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/sign/package.json @@ -0,0 +1,46 @@ +{ + "name": "@sigstore/sign", + "version": "4.0.1", + "description": "Sigstore signing library", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "clean": "shx rm -rf dist *.tsbuildinfo", + "build": "tsc --build", + "test": "jest" + }, + "files": [ + "dist" + ], + "author": "bdehamer@github.com", + "license": "Apache-2.0", + "repository": { + "type": "git", + "url": "git+https://github.com/sigstore/sigstore-js.git" + }, + "bugs": { + "url": "https://github.com/sigstore/sigstore-js/issues" + }, + "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/sign#readme", + "publishConfig": { + "provenance": true + }, + "devDependencies": { + "@sigstore/jest": "^0.0.0", + "@sigstore/mock": "^0.11.0", + "@sigstore/rekor-types": "^4.0.0", + "@types/make-fetch-happen": "^10.0.4", + "@types/promise-retry": "^1.1.6" + }, + "dependencies": { + "@sigstore/bundle": "^4.0.0", + "@sigstore/core": "^3.0.0", + "@sigstore/protobuf-specs": "^0.5.0", + "make-fetch-happen": "^15.0.2", + "proc-log": "^5.0.0", + "promise-retry": "^2.0.1" + }, + "engines": { + "node": "^20.17.0 || >=22.9.0" + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/tuf/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/tuf/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..e9e7c1679a09dfcb0793682d99f5129e206a8abd --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/tuf/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 The Sigstore Authors + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/tuf/dist/appdata.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/tuf/dist/appdata.js new file mode 100644 index 0000000000000000000000000000000000000000..06a8143e70da2fb45da2fadc1a41df8d54461570 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/tuf/dist/appdata.js @@ -0,0 +1,43 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.appDataPath = appDataPath; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const os_1 = __importDefault(require("os")); +const path_1 = __importDefault(require("path")); +function appDataPath(name) { + const homedir = os_1.default.homedir(); + switch (process.platform) { + /* istanbul ignore next */ + case 'darwin': { + const appSupport = path_1.default.join(homedir, 'Library', 'Application Support'); + return path_1.default.join(appSupport, name); + } + /* istanbul ignore next */ + case 'win32': { + const localAppData = process.env.LOCALAPPDATA || path_1.default.join(homedir, 'AppData', 'Local'); + return path_1.default.join(localAppData, name, 'Data'); + } + /* istanbul ignore next */ + default: { + const localData = process.env.XDG_DATA_HOME || path_1.default.join(homedir, '.local', 'share'); + return path_1.default.join(localData, name); + } + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/tuf/dist/client.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/tuf/dist/client.js new file mode 100644 index 0000000000000000000000000000000000000000..2931a0a6b3ab50011933e83593283adea5f32d62 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/tuf/dist/client.js @@ -0,0 +1,113 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TUFClient = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const fs_1 = __importDefault(require("fs")); +const path_1 = __importDefault(require("path")); +const tuf_js_1 = require("tuf-js"); +const _1 = require("."); +const target_1 = require("./target"); +const TARGETS_DIR_NAME = 'targets'; +class TUFClient { + constructor(options) { + const url = new URL(options.mirrorURL); + const repoName = encodeURIComponent(url.host + url.pathname.replace(/\/$/, '')); + const cachePath = path_1.default.join(options.cachePath, repoName); + initTufCache(cachePath); + seedCache({ + cachePath, + mirrorURL: options.mirrorURL, + tufRootPath: options.rootPath, + forceInit: options.forceInit, + }); + this.updater = initClient({ + mirrorURL: options.mirrorURL, + cachePath, + forceCache: options.forceCache, + retry: options.retry, + timeout: options.timeout, + }); + } + async refresh() { + return this.updater.refresh(); + } + getTarget(targetName) { + return (0, target_1.readTarget)(this.updater, targetName); + } +} +exports.TUFClient = TUFClient; +// Initializes the TUF cache directory structure including the initial +// root.json file. If the cache directory does not exist, it will be +// created. If the targets directory does not exist, it will be created. +// If the root.json file does not exist, it will be copied from the +// rootPath argument. +function initTufCache(cachePath) { + const targetsPath = path_1.default.join(cachePath, TARGETS_DIR_NAME); + if (!fs_1.default.existsSync(cachePath)) { + fs_1.default.mkdirSync(cachePath, { recursive: true }); + } + /* istanbul ignore else */ + if (!fs_1.default.existsSync(targetsPath)) { + fs_1.default.mkdirSync(targetsPath); + } +} +// Populates the TUF cache with the initial root.json file. If the root.json +// file does not exist (or we're forcing re-initialization), copy it from either +// the rootPath argument or from one of the repo seeds. +function seedCache({ cachePath, mirrorURL, tufRootPath, forceInit, }) { + const cachedRootPath = path_1.default.join(cachePath, 'root.json'); + // If the root.json file does not exist (or we're forcing re-initialization), + // populate it either from the supplied rootPath or from one of the repo seeds. + /* istanbul ignore else */ + if (!fs_1.default.existsSync(cachedRootPath) || forceInit) { + if (tufRootPath) { + fs_1.default.copyFileSync(tufRootPath, cachedRootPath); + } + else { + const seeds = require('../seeds.json'); + const repoSeed = seeds[mirrorURL]; + if (!repoSeed) { + throw new _1.TUFError({ + code: 'TUF_INIT_CACHE_ERROR', + message: `No root.json found for mirror: ${mirrorURL}`, + }); + } + fs_1.default.writeFileSync(cachedRootPath, Buffer.from(repoSeed['root.json'], 'base64')); + // Copy any seed targets into the cache + Object.entries(repoSeed.targets).forEach(([targetName, target]) => { + fs_1.default.writeFileSync(path_1.default.join(cachePath, TARGETS_DIR_NAME, targetName), Buffer.from(target, 'base64')); + }); + } + } +} +function initClient(options) { + const config = { + fetchTimeout: options.timeout, + fetchRetry: options.retry, + }; + return new tuf_js_1.Updater({ + metadataBaseUrl: options.mirrorURL, + targetBaseUrl: `${options.mirrorURL}/targets`, + metadataDir: options.cachePath, + targetDir: path_1.default.join(options.cachePath, TARGETS_DIR_NAME), + forceCache: options.forceCache, + config, + }); +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/tuf/dist/error.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/tuf/dist/error.js new file mode 100644 index 0000000000000000000000000000000000000000..e13971b289ff2eff3707e5f27ed6a78b9189323a --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/tuf/dist/error.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TUFError = void 0; +class TUFError extends Error { + constructor({ code, message, cause, }) { + super(message); + this.code = code; + this.cause = cause; + this.name = this.constructor.name; + } +} +exports.TUFError = TUFError; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/tuf/dist/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/tuf/dist/index.js new file mode 100644 index 0000000000000000000000000000000000000000..2af5de93ec5d2f7d4b58db686ce8e32054b41f4f --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/tuf/dist/index.js @@ -0,0 +1,56 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TUFError = exports.DEFAULT_MIRROR_URL = void 0; +exports.getTrustedRoot = getTrustedRoot; +exports.initTUF = initTUF; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const protobuf_specs_1 = require("@sigstore/protobuf-specs"); +const appdata_1 = require("./appdata"); +const client_1 = require("./client"); +exports.DEFAULT_MIRROR_URL = 'https://tuf-repo-cdn.sigstore.dev'; +const DEFAULT_CACHE_DIR = 'sigstore-js'; +const DEFAULT_RETRY = { retries: 2 }; +const DEFAULT_TIMEOUT = 5000; +const TRUSTED_ROOT_TARGET = 'trusted_root.json'; +async function getTrustedRoot( +/* istanbul ignore next */ +options = {}) { + const client = createClient(options); + const trustedRoot = await client.getTarget(TRUSTED_ROOT_TARGET); + return protobuf_specs_1.TrustedRoot.fromJSON(JSON.parse(trustedRoot)); +} +async function initTUF( +/* istanbul ignore next */ +options = {}) { + const client = createClient(options); + return client.refresh().then(() => client); +} +// Create a TUF client with default options +function createClient(options) { + /* istanbul ignore next */ + return new client_1.TUFClient({ + cachePath: options.cachePath || (0, appdata_1.appDataPath)(DEFAULT_CACHE_DIR), + rootPath: options.rootPath, + mirrorURL: options.mirrorURL || exports.DEFAULT_MIRROR_URL, + retry: options.retry ?? DEFAULT_RETRY, + timeout: options.timeout ?? DEFAULT_TIMEOUT, + forceCache: options.forceCache ?? false, + forceInit: options.forceInit ?? options.force ?? false, + }); +} +var error_1 = require("./error"); +Object.defineProperty(exports, "TUFError", { enumerable: true, get: function () { return error_1.TUFError; } }); diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/tuf/dist/target.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/tuf/dist/target.js new file mode 100644 index 0000000000000000000000000000000000000000..5c6675bdfbf5fe5ff117b7f0e4b2d951a9dddc0d --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/tuf/dist/target.js @@ -0,0 +1,79 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.readTarget = readTarget; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const fs_1 = __importDefault(require("fs")); +const error_1 = require("./error"); +// Downloads and returns the specified target from the provided TUF Updater. +async function readTarget(tuf, targetPath) { + const path = await getTargetPath(tuf, targetPath); + return new Promise((resolve, reject) => { + fs_1.default.readFile(path, 'utf-8', (err, data) => { + if (err) { + reject(new error_1.TUFError({ + code: 'TUF_READ_TARGET_ERROR', + message: `error reading target ${path}`, + cause: err, + })); + } + else { + resolve(data); + } + }); + }); +} +// Returns the local path to the specified target. If the target is not yet +// cached locally, the provided TUF Updater will be used to download and +// cache the target. +async function getTargetPath(tuf, target) { + let targetInfo; + try { + targetInfo = await tuf.getTargetInfo(target); + } + catch (err) { + throw new error_1.TUFError({ + code: 'TUF_REFRESH_METADATA_ERROR', + message: 'error refreshing TUF metadata', + cause: err, + }); + } + if (!targetInfo) { + throw new error_1.TUFError({ + code: 'TUF_FIND_TARGET_ERROR', + message: `target ${target} not found`, + }); + } + let path = await tuf.findCachedTarget(targetInfo); + // An empty path here means the target has not been cached locally, or is + // out of date. In either case, we need to download it. + if (!path) { + try { + path = await tuf.downloadTarget(targetInfo); + } + catch (err) { + throw new error_1.TUFError({ + code: 'TUF_DOWNLOAD_TARGET_ERROR', + message: `error downloading target ${path}`, + cause: err, + }); + } + } + return path; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/tuf/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/tuf/package.json new file mode 100644 index 0000000000000000000000000000000000000000..42dad938c28084c115546422548e31daf0e7db3c --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/tuf/package.json @@ -0,0 +1,41 @@ +{ + "name": "@sigstore/tuf", + "version": "4.0.0", + "description": "Client for the Sigstore TUF repository", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "clean": "shx rm -rf dist *.tsbuildinfo", + "build": "tsc --build", + "test": "jest" + }, + "files": [ + "dist", + "seeds.json" + ], + "author": "bdehamer@github.com", + "license": "Apache-2.0", + "repository": { + "type": "git", + "url": "git+https://github.com/sigstore/sigstore-js.git" + }, + "bugs": { + "url": "https://github.com/sigstore/sigstore-js/issues" + }, + "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/tuf#readme", + "publishConfig": { + "provenance": true + }, + "devDependencies": { + "@sigstore/jest": "^0.0.0", + "@tufjs/repo-mock": "^3.0.1", + "@types/make-fetch-happen": "^10.0.4" + }, + "dependencies": { + "@sigstore/protobuf-specs": "^0.5.0", + "tuf-js": "^4.0.0" + }, + "engines": { + "node": "^20.17.0 || >=22.9.0" + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/tuf/seeds.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/tuf/seeds.json new file mode 100644 index 0000000000000000000000000000000000000000..6d48f33afe7003291ec86bcae1c63517166778af --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/tuf/seeds.json @@ -0,0 +1 @@ +{"https://tuf-repo-cdn.sigstore.dev":{"root.json":"ewogInNpZ25hdHVyZXMiOiBbCiAgewogICAia2V5aWQiOiAiNmYyNjAwODlkNTkyM2RhZjIwMTY2Y2E2NTdjNTQzYWY2MTgzNDZhYjk3MTg4NGE5OTk2MmIwMTk4OGJiZTBjMyIsCiAgICJzaWciOiAiIgogIH0sCiAgewogICAia2V5aWQiOiAiZTcxYTU0ZDU0MzgzNWJhODZhZGFkOTQ2MDM3OWM3NjQxZmI4NzI2ZDE2NGVhNzY2ODAxYTFjNTIyYWJhN2VhMiIsCiAgICJzaWciOiAiMzA0NTAyMjEwMGJiZGRkNDY0ZjgwNjZjZWI4OGJhNzg3Mzc1YzEyY2Q2MzMwNjgwZTA4YzI5MTA3MDNlNjUzOGM3MWNjNzlhZDIwMjIwNTE5MGIwNmU0NTM3ZmU5NjFiM2VmODFmZTY4ZWRjZDAwODljMTlmOTE5YWZlZDQyM2I5YWFmZDcwMDY0MTE1MyIKICB9LAogIHsKICAgImtleWlkIjogIjIyZjRjYWVjNmQ4ZTZmOTU1NWFmNjZiM2Q0YzNjYjA2YTNiYjIzZmRjN2UzOWM5MTZjNjFmNDYyZTZmNTJiMDYiLAogICAic2lnIjogIjMwNDQwMjIwNjkzMDZjZDUyNTdmNzMyYTc0MGMxYWZlNjBhOGU0MzNjNWRlNThlYWZlYWRiZTk5YzMzNmM5YzcxZDE5OGNmODAyMjAwZDc3Mzk1M2FlN2RiYzQ4ZDNlNWJhZDlhNmY2NGJhZmZmMTk2YjdlMmFkNGE1MmExOTUxOTM2N2Q0N2RjMDQyIgogIH0sCiAgewogICAia2V5aWQiOiAiNjE2NDM4MzgxMjViNDQwYjQwZGI2OTQyZjVjYjVhMzFjMGRjMDQzNjgzMTZlYjJhYWE1OGI5NTkwNGE1ODIyMiIsCiAgICJzaWciOiAiMzA0NDAyMjA0ZDIxYTJlYzgwZGY2NmU2MWY2ZmUyOTEyOTUxZGM0N2RmODM2MDM2ZjhjMGFiMTA4MTZkMzc1ZTcxZGJmNzllMDIyMDU0N2FkY2UxYWZkZjA0ZTY3OTRlZmEyMDNkZDUyNjRjNmY3ZTBlZjc4ZTU3ZmU5MzRiMGQyNmNiOTk0ZWVjNzYiCiAgfSwKICB7CiAgICJrZXlpZCI6ICJhNjg3ZTViZjRmYWI4MmIwZWU1OGQ0NmUwNWM5NTM1MTQ1YTJjOWFmYjQ1OGY0M2Q0MmI0NWNhMGZkY2UyYTcwIiwKICAgInNpZyI6ICIzMDQ1MDIyMDYwODI2NDk2NTU3MTQ0ZWIxNjQ5ODkzZWQ1ZjZmNGVhNTQ1MzZmZWIwY2E4MmY4Yjg5YWU2NDFiZTM5NzQzZTUwMjIxMDBhZDcxMThiNWU5ZDQ4MzczMjYyMDZlNDEyZmM2ZGEyOTk5OTI1ZDExMDMyOGE3YzE2NmIwNmM2MjQzMzZjOTNmIgogIH0sCiAgewogICAia2V5aWQiOiAiMTgzZTY0ZjM3NjcwZGMxM2NhMGQyODk5NWEzMDUzZjM3NDA5NTRkZGNlNDQzMjFhNDFlNDY1MzRjZjQ0ZTYzMiIsCiAgICJzaWciOiAiMzA0NjAyMjEwMGQ4MTc5NDM5YzJlNzNlYjBjMTczM2FiZWU3ZmFmODMyZGNhZWE3MjYzZWRjYjQ5MTk4OTFjM2EyNDdmMDU5MjMwMjIxMDBlMWE0MzdlMDc5N2U4MDNmOWI3MmRjOWQyZDkyMTU1YjBhMjI3MGMyNGVmZGQ1ZjRiM2E1ZDhmMGIwZjQzMWE3IgogIH0KIF0sCiAic2lnbmVkIjogewogICJfdHlwZSI6ICJyb290IiwKICAiY29uc2lzdGVudF9zbmFwc2hvdCI6IHRydWUsCiAgImV4cGlyZXMiOiAiMjAyNi0wMS0yMlQxMzowNTo1OVoiLAogICJrZXlzIjogewogICAiMGM4NzQzMmMzYmYwOWZkOTkxODlmZGMzMmZhNWVhZWRmNGU0YTVmYWM3YmFiNzNmYTA0YTJlMGZjNjRhZjZmNSI6IHsKICAgICJrZXlpZF9oYXNoX2FsZ29yaXRobXMiOiBbCiAgICAgInNoYTI1NiIsCiAgICAgInNoYTUxMiIKICAgIF0sCiAgICAia2V5dHlwZSI6ICJlY2RzYSIsCiAgICAia2V5dmFsIjogewogICAgICJwdWJsaWMiOiAiLS0tLS1CRUdJTiBQVUJMSUMgS0VZLS0tLS1cbk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRVdSaUdyNStqKzNKNVNzSCtadHI1bkUySDJ3TzdcbkJWK25PM3M5M2dMY2ExOHFUT3pIWTFvV3lBR0R5a01Tc0dUVUJTdDlEK0FuMEtmS3NEMm1mU000MlE9PVxuLS0tLS1FTkQgUFVCTElDIEtFWS0tLS0tXG4iCiAgICB9LAogICAgInNjaGVtZSI6ICJlY2RzYS1zaGEyLW5pc3RwMjU2IiwKICAgICJ4LXR1Zi1vbi1jaS1vbmxpbmUtdXJpIjogImdjcGttczpwcm9qZWN0cy9zaWdzdG9yZS1yb290LXNpZ25pbmcvbG9jYXRpb25zL2dsb2JhbC9rZXlSaW5ncy9yb290L2NyeXB0b0tleXMvdGltZXN0YW1wL2NyeXB0b0tleVZlcnNpb25zLzEiCiAgIH0sCiAgICIxODNlNjRmMzc2NzBkYzEzY2EwZDI4OTk1YTMwNTNmMzc0MDk1NGRkY2U0NDMyMWE0MWU0NjUzNGNmNDRlNjMyIjogewogICAgImtleXR5cGUiOiAiZWNkc2EiLAogICAgImtleXZhbCI6IHsKICAgICAicHVibGljIjogIi0tLS0tQkVHSU4gUFVCTElDIEtFWS0tLS0tXG5NRmt3RXdZSEtvWkl6ajBDQVFZSUtvWkl6ajBEQVFjRFFnQUVNeHBQT0pDSVo1b3RHNDEwNmZHSnNlRVFpM1Y5XG5wa01ZUTR1eVY5VGoxTTdXSFhJeUxHK2prZnZ1RzBnbFExSlpiUlpaQlYzZ0FSNHNvamRHSElTZW93PT1cbi0tLS0tRU5EIFBVQkxJQyBLRVktLS0tLVxuIgogICAgfSwKICAgICJzY2hlbWUiOiAiZWNkc2Etc2hhMi1uaXN0cDI1NiIsCiAgICAieC10dWYtb24tY2kta2V5b3duZXIiOiAiQGxhbmNlIgogICB9LAogICAiMjJmNGNhZWM2ZDhlNmY5NTU1YWY2NmIzZDRjM2NiMDZhM2JiMjNmZGM3ZTM5YzkxNmM2MWY0NjJlNmY1MmIwNiI6IHsKICAgICJrZXlpZF9oYXNoX2FsZ29yaXRobXMiOiBbCiAgICAgInNoYTI1NiIsCiAgICAgInNoYTUxMiIKICAgIF0sCiAgICAia2V5dHlwZSI6ICJlY2RzYSIsCiAgICAia2V5dmFsIjogewogICAgICJwdWJsaWMiOiAiLS0tLS1CRUdJTiBQVUJMSUMgS0VZLS0tLS1cbk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRXpCelZPbUhDUG9qTVZMU0kzNjRXaWlWOE5QckRcbjZJZ1J4Vmxpc2t6L3YreTNKRVI1bWNWR2NPTmxpRGNXTUM1SjJsZkhtalBOUGhiNEg3eG04THpmU0E9PVxuLS0tLS1FTkQgUFVCTElDIEtFWS0tLS0tXG4iCiAgICB9LAogICAgInNjaGVtZSI6ICJlY2RzYS1zaGEyLW5pc3RwMjU2IiwKICAgICJ4LXR1Zi1vbi1jaS1rZXlvd25lciI6ICJAc2FudGlhZ290b3JyZXMiCiAgIH0sCiAgICI2MTY0MzgzODEyNWI0NDBiNDBkYjY5NDJmNWNiNWEzMWMwZGMwNDM2ODMxNmViMmFhYTU4Yjk1OTA0YTU4MjIyIjogewogICAgImtleWlkX2hhc2hfYWxnb3JpdGhtcyI6IFsKICAgICAic2hhMjU2IiwKICAgICAic2hhNTEyIgogICAgXSwKICAgICJrZXl0eXBlIjogImVjZHNhIiwKICAgICJrZXl2YWwiOiB7CiAgICAgInB1YmxpYyI6ICItLS0tLUJFR0lOIFBVQkxJQyBLRVktLS0tLVxuTUZrd0V3WUhLb1pJemowQ0FRWUlLb1pJemowREFRY0RRZ0FFaW5pa1NzQVFtWWtOZUg1ZVlxL0NuSXpMYWFjT1xueGxTYWF3UURPd3FLeS90Q3F4cTV4eFBTSmMyMUs0V0loczlHeU9rS2Z6dWVZM0dJTHpjTUpaNGNXdz09XG4tLS0tLUVORCBQVUJMSUMgS0VZLS0tLS1cbiIKICAgIH0sCiAgICAic2NoZW1lIjogImVjZHNhLXNoYTItbmlzdHAyNTYiLAogICAgIngtdHVmLW9uLWNpLWtleW93bmVyIjogIkBib2JjYWxsYXdheSIKICAgfSwKICAgImE2ODdlNWJmNGZhYjgyYjBlZTU4ZDQ2ZTA1Yzk1MzUxNDVhMmM5YWZiNDU4ZjQzZDQyYjQ1Y2EwZmRjZTJhNzAiOiB7CiAgICAia2V5aWRfaGFzaF9hbGdvcml0aG1zIjogWwogICAgICJzaGEyNTYiLAogICAgICJzaGE1MTIiCiAgICBdLAogICAgImtleXR5cGUiOiAiZWNkc2EiLAogICAgImtleXZhbCI6IHsKICAgICAicHVibGljIjogIi0tLS0tQkVHSU4gUFVCTElDIEtFWS0tLS0tXG5NRmt3RXdZSEtvWkl6ajBDQVFZSUtvWkl6ajBEQVFjRFFnQUUwZ2hyaDkyTHcxWXIzaWRHVjVXcUN0TURCOEN4XG4rRDhoZEM0dzJaTE5JcGxWUm9WR0xza1lhM2doZU15T2ppSjhrUGkxNWFRMi8vN1Arb2o3VXZKUEd3PT1cbi0tLS0tRU5EIFBVQkxJQyBLRVktLS0tLVxuIgogICAgfSwKICAgICJzY2hlbWUiOiAiZWNkc2Etc2hhMi1uaXN0cDI1NiIsCiAgICAieC10dWYtb24tY2kta2V5b3duZXIiOiAiQGpvc2h1YWdsIgogICB9LAogICAiZTcxYTU0ZDU0MzgzNWJhODZhZGFkOTQ2MDM3OWM3NjQxZmI4NzI2ZDE2NGVhNzY2ODAxYTFjNTIyYWJhN2VhMiI6IHsKICAgICJrZXlpZF9oYXNoX2FsZ29yaXRobXMiOiBbCiAgICAgInNoYTI1NiIsCiAgICAgInNoYTUxMiIKICAgIF0sCiAgICAia2V5dHlwZSI6ICJlY2RzYSIsCiAgICAia2V5dmFsIjogewogICAgICJwdWJsaWMiOiAiLS0tLS1CRUdJTiBQVUJMSUMgS0VZLS0tLS1cbk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRUVYc3ozU1pYRmI4ak1WNDJqNnBKbHlqYmpSOEtcbk4zQndvY2V4cTZMTUliNXFzV0tPUXZMTjE2TlVlZkxjNEhzd09vdW1Sc1ZWYWFqU3BRUzZmb2JrUnc9PVxuLS0tLS1FTkQgUFVCTElDIEtFWS0tLS0tXG4iCiAgICB9LAogICAgInNjaGVtZSI6ICJlY2RzYS1zaGEyLW5pc3RwMjU2IiwKICAgICJ4LXR1Zi1vbi1jaS1rZXlvd25lciI6ICJAbW5tNjc4IgogICB9CiAgfSwKICAicm9sZXMiOiB7CiAgICJyb290IjogewogICAgImtleWlkcyI6IFsKICAgICAiZTcxYTU0ZDU0MzgzNWJhODZhZGFkOTQ2MDM3OWM3NjQxZmI4NzI2ZDE2NGVhNzY2ODAxYTFjNTIyYWJhN2VhMiIsCiAgICAgIjIyZjRjYWVjNmQ4ZTZmOTU1NWFmNjZiM2Q0YzNjYjA2YTNiYjIzZmRjN2UzOWM5MTZjNjFmNDYyZTZmNTJiMDYiLAogICAgICI2MTY0MzgzODEyNWI0NDBiNDBkYjY5NDJmNWNiNWEzMWMwZGMwNDM2ODMxNmViMmFhYTU4Yjk1OTA0YTU4MjIyIiwKICAgICAiYTY4N2U1YmY0ZmFiODJiMGVlNThkNDZlMDVjOTUzNTE0NWEyYzlhZmI0NThmNDNkNDJiNDVjYTBmZGNlMmE3MCIsCiAgICAgIjE4M2U2NGYzNzY3MGRjMTNjYTBkMjg5OTVhMzA1M2YzNzQwOTU0ZGRjZTQ0MzIxYTQxZTQ2NTM0Y2Y0NGU2MzIiCiAgICBdLAogICAgInRocmVzaG9sZCI6IDMKICAgfSwKICAgInNuYXBzaG90IjogewogICAgImtleWlkcyI6IFsKICAgICAiMGM4NzQzMmMzYmYwOWZkOTkxODlmZGMzMmZhNWVhZWRmNGU0YTVmYWM3YmFiNzNmYTA0YTJlMGZjNjRhZjZmNSIKICAgIF0sCiAgICAidGhyZXNob2xkIjogMSwKICAgICJ4LXR1Zi1vbi1jaS1leHBpcnktcGVyaW9kIjogMzY1MCwKICAgICJ4LXR1Zi1vbi1jaS1zaWduaW5nLXBlcmlvZCI6IDM2NQogICB9LAogICAidGFyZ2V0cyI6IHsKICAgICJrZXlpZHMiOiBbCiAgICAgImU3MWE1NGQ1NDM4MzViYTg2YWRhZDk0NjAzNzljNzY0MWZiODcyNmQxNjRlYTc2NjgwMWExYzUyMmFiYTdlYTIiLAogICAgICIyMmY0Y2FlYzZkOGU2Zjk1NTVhZjY2YjNkNGMzY2IwNmEzYmIyM2ZkYzdlMzljOTE2YzYxZjQ2MmU2ZjUyYjA2IiwKICAgICAiNjE2NDM4MzgxMjViNDQwYjQwZGI2OTQyZjVjYjVhMzFjMGRjMDQzNjgzMTZlYjJhYWE1OGI5NTkwNGE1ODIyMiIsCiAgICAgImE2ODdlNWJmNGZhYjgyYjBlZTU4ZDQ2ZTA1Yzk1MzUxNDVhMmM5YWZiNDU4ZjQzZDQyYjQ1Y2EwZmRjZTJhNzAiLAogICAgICIxODNlNjRmMzc2NzBkYzEzY2EwZDI4OTk1YTMwNTNmMzc0MDk1NGRkY2U0NDMyMWE0MWU0NjUzNGNmNDRlNjMyIgogICAgXSwKICAgICJ0aHJlc2hvbGQiOiAzCiAgIH0sCiAgICJ0aW1lc3RhbXAiOiB7CiAgICAia2V5aWRzIjogWwogICAgICIwYzg3NDMyYzNiZjA5ZmQ5OTE4OWZkYzMyZmE1ZWFlZGY0ZTRhNWZhYzdiYWI3M2ZhMDRhMmUwZmM2NGFmNmY1IgogICAgXSwKICAgICJ0aHJlc2hvbGQiOiAxLAogICAgIngtdHVmLW9uLWNpLWV4cGlyeS1wZXJpb2QiOiA3LAogICAgIngtdHVmLW9uLWNpLXNpZ25pbmctcGVyaW9kIjogNgogICB9CiAgfSwKICAic3BlY192ZXJzaW9uIjogIjEuMCIsCiAgInZlcnNpb24iOiAxMywKICAieC10dWYtb24tY2ktZXhwaXJ5LXBlcmlvZCI6IDE5NywKICAieC10dWYtb24tY2ktc2lnbmluZy1wZXJpb2QiOiA0NgogfQp9","targets":{"trusted_root.json":"{
  "mediaType": "application/vnd.dev.sigstore.trustedroot+json;version=0.1",
  "tlogs": [
    {
      "baseUrl": "https://rekor.sigstore.dev",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE2G2Y+2tabdTV5BcGiBIx0a9fAFwrkBbmLSGtks4L3qX6yYY0zufBnhC8Ur/iy55GhWP/9A/bY2LhC30M9+RYtw==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2021-01-12T11:53:27Z"
        }
      },
      "logId": {
        "keyId": "wNI9atQGlz+VWfO6LRygH4QUfY/8W4RFwiT5i5WRgB0="
      }
    }
  ],
  "certificateAuthorities": [
    {
      "subject": {
        "organization": "sigstore.dev",
        "commonName": "sigstore"
      },
      "uri": "https://fulcio.sigstore.dev",
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIIB+DCCAX6gAwIBAgITNVkDZoCiofPDsy7dfm6geLbuhzAKBggqhkjOPQQDAzAqMRUwEwYDVQQKEwxzaWdzdG9yZS5kZXYxETAPBgNVBAMTCHNpZ3N0b3JlMB4XDTIxMDMwNzAzMjAyOVoXDTMxMDIyMzAzMjAyOVowKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTB2MBAGByqGSM49AgEGBSuBBAAiA2IABLSyA7Ii5k+pNO8ZEWY0ylemWDowOkNa3kL+GZE5Z5GWehL9/A9bRNA3RbrsZ5i0JcastaRL7Sp5fp/jD5dxqc/UdTVnlvS16an+2Yfswe/QuLolRUCrcOE2+2iA5+tzd6NmMGQwDgYDVR0PAQH/BAQDAgEGMBIGA1UdEwEB/wQIMAYBAf8CAQEwHQYDVR0OBBYEFMjFHQBBmiQpMlEk6w2uSu1KBtPsMB8GA1UdIwQYMBaAFMjFHQBBmiQpMlEk6w2uSu1KBtPsMAoGCCqGSM49BAMDA2gAMGUCMH8liWJfMui6vXXBhjDgY4MwslmN/TJxVe/83WrFomwmNf056y1X48F9c4m3a3ozXAIxAKjRay5/aj/jsKKGIkmQatjI8uupHr/+CxFvaJWmpYqNkLDGRU+9orzh5hI2RrcuaQ=="
          }
        ]
      },
      "validFor": {
        "start": "2021-03-07T03:20:29Z",
        "end": "2022-12-31T23:59:59.999Z"
      }
    },
    {
      "subject": {
        "organization": "sigstore.dev",
        "commonName": "sigstore"
      },
      "uri": "https://fulcio.sigstore.dev",
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIICGjCCAaGgAwIBAgIUALnViVfnU0brJasmRkHrn/UnfaQwCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMjA0MTMyMDA2MTVaFw0zMTEwMDUxMzU2NThaMDcxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjEeMBwGA1UEAxMVc2lnc3RvcmUtaW50ZXJtZWRpYXRlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE8RVS/ysH+NOvuDZyPIZtilgUF9NlarYpAd9HP1vBBH1U5CV77LSS7s0ZiH4nE7Hv7ptS6LvvR/STk798LVgMzLlJ4HeIfF3tHSaexLcYpSASr1kS0N/RgBJz/9jWCiXno3sweTAOBgNVHQ8BAf8EBAMCAQYwEwYDVR0lBAwwCgYIKwYBBQUHAwMwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQU39Ppz1YkEZb5qNjpKFWixi4YZD8wHwYDVR0jBBgwFoAUWMAeX5FFpWapesyQoZMi0CrFxfowCgYIKoZIzj0EAwMDZwAwZAIwPCsQK4DYiZYDPIaDi5HFKnfxXx6ASSVmERfsynYBiX2X6SJRnZU84/9DZdnFvvxmAjBOt6QpBlc4J/0DxvkTCqpclvziL6BCCPnjdlIB3Pu3BxsPmygUY7Ii2zbdCdliiow="
          },
          {
            "rawBytes": "MIIB9zCCAXygAwIBAgIUALZNAPFdxHPwjeDloDwyYChAO/4wCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMTEwMDcxMzU2NTlaFw0zMTEwMDUxMzU2NThaMCoxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjERMA8GA1UEAxMIc2lnc3RvcmUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAT7XeFT4rb3PQGwS4IajtLk3/OlnpgangaBclYpsYBr5i+4ynB07ceb3LP0OIOZdxexX69c5iVuyJRQ+Hz05yi+UF3uBWAlHpiS5sh0+H2GHE7SXrk1EC5m1Tr19L9gg92jYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRYwB5fkUWlZql6zJChkyLQKsXF+jAfBgNVHSMEGDAWgBRYwB5fkUWlZql6zJChkyLQKsXF+jAKBggqhkjOPQQDAwNpADBmAjEAj1nHeXZp+13NWBNa+EDsDP8G1WWg1tCMWP/WHPqpaVo0jhsweNFZgSs0eE7wYI4qAjEA2WB9ot98sIkoF3vZYdd3/VtWB5b9TNMea7Ix/stJ5TfcLLeABLE4BNJOsQ4vnBHJ"
          }
        ]
      },
      "validFor": {
        "start": "2022-04-13T20:06:15Z"
      }
    }
  ],
  "ctlogs": [
    {
      "baseUrl": "https://ctfe.sigstore.dev/test",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEbfwR+RJudXscgRBRpKX1XFDy3PyudDxz/SfnRi1fT8ekpfBd2O1uoz7jr3Z8nKzxA69EUQ+eFCFI3zeubPWU7w==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2021-03-14T00:00:00Z",
          "end": "2022-10-31T23:59:59.999Z"
        }
      },
      "logId": {
        "keyId": "CGCS8ChS/2hF0dFrJ4ScRWcYrBY9wzjSbea8IgY2b3I="
      }
    },
    {
      "baseUrl": "https://ctfe.sigstore.dev/2022",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEiPSlFi0CmFTfEjCUqF9HuCEcYXNKAaYalIJmBZ8yyezPjTqhxrKBpMnaocVtLJBI1eM3uXnQzQGAJdJ4gs9Fyw==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2022-10-20T00:00:00Z"
        }
      },
      "logId": {
        "keyId": "3T0wasbHETJjGR4cmWc3AqJKXrjePK3/h4pygC8p7o4="
      }
    }
  ],
  "timestampAuthorities": [
    {
      "subject": {
        "organization": "sigstore.dev",
        "commonName": "sigstore-tsa-selfsigned"
      },
      "uri": "https://timestamp.sigstore.dev/api/v1/timestamp",
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIICEDCCAZagAwIBAgIUOhNULwyQYe68wUMvy4qOiyojiwwwCgYIKoZIzj0EAwMwOTEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MSAwHgYDVQQDExdzaWdzdG9yZS10c2Etc2VsZnNpZ25lZDAeFw0yNTA0MDgwNjU5NDNaFw0zNTA0MDYwNjU5NDNaMC4xFTATBgNVBAoTDHNpZ3N0b3JlLmRldjEVMBMGA1UEAxMMc2lnc3RvcmUtdHNhMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE4ra2Z8hKNig2T9kFjCAToGG30jky+WQv3BzL+mKvh1SKNR/UwuwsfNCg4sryoYAd8E6isovVA3M4aoNdm9QDi50Z8nTEyvqgfDPtTIwXItfiW/AFf1V7uwkbkAoj0xxco2owaDAOBgNVHQ8BAf8EBAMCB4AwHQYDVR0OBBYEFIn9eUOHz9BlRsMCRscsc1t9tOsDMB8GA1UdIwQYMBaAFJjsAe9/u1H/1JUeb4qImFMHic6/MBYGA1UdJQEB/wQMMAoGCCsGAQUFBwMIMAoGCCqGSM49BAMDA2gAMGUCMDtpsV/6KaO0qyF/UMsX2aSUXKQFdoGTptQGc0ftq1csulHPGG6dsmyMNd3JB+G3EQIxAOajvBcjpJmKb4Nv+2Taoj8Uc5+b6ih6FXCCKraSqupe07zqswMcXJTe1cExvHvvlw=="
          },
          {
            "rawBytes": "MIIB9zCCAXygAwIBAgIUV7f0GLDOoEzIh8LXSW80OJiUp14wCgYIKoZIzj0EAwMwOTEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MSAwHgYDVQQDExdzaWdzdG9yZS10c2Etc2VsZnNpZ25lZDAeFw0yNTA0MDgwNjU5NDNaFw0zNTA0MDYwNjU5NDNaMDkxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjEgMB4GA1UEAxMXc2lnc3RvcmUtdHNhLXNlbGZzaWduZWQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQUQNtfRT/ou3YATa6wB/kKTe70cfJwyRIBovMnt8RcJph/COE82uyS6FmppLLL1VBPGcPfpQPYJNXzWwi8icwhKQ6W/Qe2h3oebBb2FHpwNJDqo+TMaC/tdfkv/ElJB72jRTBDMA4GA1UdDwEB/wQEAwIBBjASBgNVHRMBAf8ECDAGAQH/AgEAMB0GA1UdDgQWBBSY7AHvf7tR/9SVHm+KiJhTB4nOvzAKBggqhkjOPQQDAwNpADBmAjEAwGEGrfGZR1cen1R8/DTVMI943LssZmJRtDp/i7SfGHmGRP6gRbuj9vOK3b67Z0QQAjEAuT2H673LQEaHTcyQSZrkp4mX7WwkmF+sVbkYY5mXN+RMH13KUEHHOqASaemYWK/E"
          }
        ]
      },
      "validFor": {
        "start": "2025-07-04T00:00:00Z"
      }
    }
  ]
}
","registry.npmjs.org%2Fkeys.json":"ewogICAgImtleXMiOiBbCiAgICAgICAgewogICAgICAgICAgICAia2V5SWQiOiAiU0hBMjU2OmpsM2J3c3d1ODBQampva0NnaDBvMnc1YzJVNExoUUFFNTdnajljejFrekEiLAogICAgICAgICAgICAia2V5VXNhZ2UiOiAibnBtOnNpZ25hdHVyZXMiLAogICAgICAgICAgICAicHVibGljS2V5IjogewogICAgICAgICAgICAgICAgInJhd0J5dGVzIjogIk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRTFPbGIzek1BRkZ4WEtIaUlrUU81Y0ozWWhsNWk2VVBwK0lodXRlQkpidUhjQTVVb2dLbzBFV3RsV3dXNktTYUtvVE5FWUw3SmxDUWlWbmtoQmt0VWdnPT0iLAogICAgICAgICAgICAgICAgImtleURldGFpbHMiOiAiUEtJWF9FQ0RTQV9QMjU2X1NIQV8yNTYiLAogICAgICAgICAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgICAgICAgICAgICJzdGFydCI6ICIxOTk5LTAxLTAxVDAwOjAwOjAwLjAwMFoiLAogICAgICAgICAgICAgICAgICAgICJlbmQiOiAiMjAyNS0wMS0yOVQwMDowMDowMC4wMDBaIgogICAgICAgICAgICAgICAgfQogICAgICAgICAgICB9CiAgICAgICAgfSwKICAgICAgICB7CiAgICAgICAgICAgICJrZXlJZCI6ICJTSEEyNTY6amwzYndzd3U4MFBqam9rQ2doMG8ydzVjMlU0TGhRQUU1N2dqOWN6MWt6QSIsCiAgICAgICAgICAgICJrZXlVc2FnZSI6ICJucG06YXR0ZXN0YXRpb25zIiwKICAgICAgICAgICAgInB1YmxpY0tleSI6IHsKICAgICAgICAgICAgICAgICJyYXdCeXRlcyI6ICJNRmt3RXdZSEtvWkl6ajBDQVFZSUtvWkl6ajBEQVFjRFFnQUUxT2xiM3pNQUZGeFhLSGlJa1FPNWNKM1lobDVpNlVQcCtJaHV0ZUJKYnVIY0E1VW9nS28wRVd0bFd3VzZLU2FLb1RORVlMN0psQ1FpVm5raEJrdFVnZz09IiwKICAgICAgICAgICAgICAgICJrZXlEZXRhaWxzIjogIlBLSVhfRUNEU0FfUDI1Nl9TSEFfMjU2IiwKICAgICAgICAgICAgICAgICJ2YWxpZEZvciI6IHsKICAgICAgICAgICAgICAgICAgICAic3RhcnQiOiAiMjAyMi0xMi0wMVQwMDowMDowMC4wMDBaIiwKICAgICAgICAgICAgICAgICAgICAiZW5kIjogIjIwMjUtMDEtMjlUMDA6MDA6MDAuMDAwWiIKICAgICAgICAgICAgICAgIH0KICAgICAgICAgICAgfQogICAgICAgIH0sCiAgICAgICAgewogICAgICAgICAgICAia2V5SWQiOiAiU0hBMjU2OkRoUTh3UjVBUEJ2RkhMRi8rVGMrQVl2UE9kVHBjSURxT2h4c0JIUndDN1UiLAogICAgICAgICAgICAia2V5VXNhZ2UiOiAibnBtOnNpZ25hdHVyZXMiLAogICAgICAgICAgICAicHVibGljS2V5IjogewogICAgICAgICAgICAgICAgInJhd0J5dGVzIjogIk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRVk2WWE3VysrN2FVUHp2TVRyZXpINlljeDNjK0hPS1lDY05HeWJKWlNDSnEvZmQ3UWE4dXVBS3RkSWtVUXRRaUVLRVJoQW1FNWxNTUpoUDhPa0RPYTJnPT0iLAogICAgICAgICAgICAgICAgImtleURldGFpbHMiOiAiUEtJWF9FQ0RTQV9QMjU2X1NIQV8yNTYiLAogICAgICAgICAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgICAgICAgICAgICJzdGFydCI6ICIyMDI1LTAxLTEzVDAwOjAwOjAwLjAwMFoiCiAgICAgICAgICAgICAgICB9CiAgICAgICAgICAgIH0KICAgICAgICB9LAogICAgICAgIHsKICAgICAgICAgICAgImtleUlkIjogIlNIQTI1NjpEaFE4d1I1QVBCdkZITEYvK1RjK0FZdlBPZFRwY0lEcU9oeHNCSFJ3QzdVIiwKICAgICAgICAgICAgImtleVVzYWdlIjogIm5wbTphdHRlc3RhdGlvbnMiLAogICAgICAgICAgICAicHVibGljS2V5IjogewogICAgICAgICAgICAgICAgInJhd0J5dGVzIjogIk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRVk2WWE3VysrN2FVUHp2TVRyZXpINlljeDNjK0hPS1lDY05HeWJKWlNDSnEvZmQ3UWE4dXVBS3RkSWtVUXRRaUVLRVJoQW1FNWxNTUpoUDhPa0RPYTJnPT0iLAogICAgICAgICAgICAgICAgImtleURldGFpbHMiOiAiUEtJWF9FQ0RTQV9QMjU2X1NIQV8yNTYiLAogICAgICAgICAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgICAgICAgICAgICJzdGFydCI6ICIyMDI1LTAxLTEzVDAwOjAwOjAwLjAwMFoiCiAgICAgICAgICAgICAgICB9CiAgICAgICAgICAgIH0KICAgICAgICB9CiAgICBdCn0K"}}} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/bundle/dsse.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/bundle/dsse.js new file mode 100644 index 0000000000000000000000000000000000000000..1033fc422aba0971ba2c22f4e806ee6faff3d197 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/bundle/dsse.js @@ -0,0 +1,43 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DSSESignatureContent = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const core_1 = require("@sigstore/core"); +class DSSESignatureContent { + constructor(env) { + this.env = env; + } + compareDigest(digest) { + return core_1.crypto.bufferEqual(digest, core_1.crypto.digest('sha256', this.env.payload)); + } + compareSignature(signature) { + return core_1.crypto.bufferEqual(signature, this.signature); + } + verifySignature(key) { + return core_1.crypto.verify(this.preAuthEncoding, key, this.signature); + } + get signature() { + return this.env.signatures.length > 0 + ? this.env.signatures[0].sig + : Buffer.from(''); + } + // DSSE Pre-Authentication Encoding + get preAuthEncoding() { + return core_1.dsse.preAuthEncoding(this.env.payloadType, this.env.payload); + } +} +exports.DSSESignatureContent = DSSESignatureContent; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/bundle/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/bundle/index.js new file mode 100644 index 0000000000000000000000000000000000000000..4287d8032b75f00569ee02f623f521bdb1489ef5 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/bundle/index.js @@ -0,0 +1,57 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.toSignedEntity = toSignedEntity; +exports.signatureContent = signatureContent; +const core_1 = require("@sigstore/core"); +const dsse_1 = require("./dsse"); +const message_1 = require("./message"); +function toSignedEntity(bundle, artifact) { + const { tlogEntries, timestampVerificationData } = bundle.verificationMaterial; + const timestamps = []; + for (const entry of tlogEntries) { + timestamps.push({ + $case: 'transparency-log', + tlogEntry: entry, + }); + } + for (const ts of timestampVerificationData?.rfc3161Timestamps ?? []) { + timestamps.push({ + $case: 'timestamp-authority', + timestamp: core_1.RFC3161Timestamp.parse(ts.signedTimestamp), + }); + } + return { + signature: signatureContent(bundle, artifact), + key: key(bundle), + tlogEntries, + timestamps, + }; +} +function signatureContent(bundle, artifact) { + switch (bundle.content.$case) { + case 'dsseEnvelope': + return new dsse_1.DSSESignatureContent(bundle.content.dsseEnvelope); + case 'messageSignature': + return new message_1.MessageSignatureContent(bundle.content.messageSignature, artifact); + } +} +function key(bundle) { + switch (bundle.verificationMaterial.content.$case) { + case 'publicKey': + return { + $case: 'public-key', + hint: bundle.verificationMaterial.content.publicKey.hint, + }; + case 'x509CertificateChain': + return { + $case: 'certificate', + certificate: core_1.X509Certificate.parse(bundle.verificationMaterial.content.x509CertificateChain + .certificates[0].rawBytes), + }; + case 'certificate': + return { + $case: 'certificate', + certificate: core_1.X509Certificate.parse(bundle.verificationMaterial.content.certificate.rawBytes), + }; + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/bundle/message.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/bundle/message.js new file mode 100644 index 0000000000000000000000000000000000000000..836148c68a8b661d2e050992abe0f5af925ed6ec --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/bundle/message.js @@ -0,0 +1,36 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.MessageSignatureContent = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const core_1 = require("@sigstore/core"); +class MessageSignatureContent { + constructor(messageSignature, artifact) { + this.signature = messageSignature.signature; + this.messageDigest = messageSignature.messageDigest.digest; + this.artifact = artifact; + } + compareSignature(signature) { + return core_1.crypto.bufferEqual(signature, this.signature); + } + compareDigest(digest) { + return core_1.crypto.bufferEqual(digest, this.messageDigest); + } + verifySignature(key) { + return core_1.crypto.verify(this.artifact, key, this.signature); + } +} +exports.MessageSignatureContent = MessageSignatureContent; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/error.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/error.js new file mode 100644 index 0000000000000000000000000000000000000000..6cb1cd41213435f8d4e1a166e26f4a6c353d18dc --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/error.js @@ -0,0 +1,32 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.PolicyError = exports.VerificationError = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +class BaseError extends Error { + constructor({ code, message, cause, }) { + super(message); + this.code = code; + this.cause = cause; + this.name = this.constructor.name; + } +} +class VerificationError extends BaseError { +} +exports.VerificationError = VerificationError; +class PolicyError extends BaseError { +} +exports.PolicyError = PolicyError; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/index.js new file mode 100644 index 0000000000000000000000000000000000000000..3222876fcd68b7d7edc5f18e3112c4ae31867f20 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/index.js @@ -0,0 +1,28 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Verifier = exports.toTrustMaterial = exports.VerificationError = exports.PolicyError = exports.toSignedEntity = void 0; +/* istanbul ignore file */ +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +var bundle_1 = require("./bundle"); +Object.defineProperty(exports, "toSignedEntity", { enumerable: true, get: function () { return bundle_1.toSignedEntity; } }); +var error_1 = require("./error"); +Object.defineProperty(exports, "PolicyError", { enumerable: true, get: function () { return error_1.PolicyError; } }); +Object.defineProperty(exports, "VerificationError", { enumerable: true, get: function () { return error_1.VerificationError; } }); +var trust_1 = require("./trust"); +Object.defineProperty(exports, "toTrustMaterial", { enumerable: true, get: function () { return trust_1.toTrustMaterial; } }); +var verifier_1 = require("./verifier"); +Object.defineProperty(exports, "Verifier", { enumerable: true, get: function () { return verifier_1.Verifier; } }); diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/key/certificate.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/key/certificate.js new file mode 100644 index 0000000000000000000000000000000000000000..35ad947f0bafc655e349f110842fa8719438dc23 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/key/certificate.js @@ -0,0 +1,212 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.CertificateChainVerifier = void 0; +exports.verifyCertificateChain = verifyCertificateChain; +const error_1 = require("../error"); +const trust_1 = require("../trust"); +function verifyCertificateChain(timestamp, leaf, certificateAuthorities) { + // Filter list of trusted CAs to those which are valid for the given + // timestamp + const cas = (0, trust_1.filterCertAuthorities)(certificateAuthorities, timestamp); + /* eslint-disable-next-line @typescript-eslint/no-explicit-any */ + let error; + for (const ca of cas) { + try { + const verifier = new CertificateChainVerifier({ + trustedCerts: ca.certChain, + untrustedCert: leaf, + timestamp, + }); + return verifier.verify(); + } + catch (err) { + error = err; + } + } + // If we failed to verify the certificate chain for all of the trusted + // CAs, throw the last error we encountered. + throw new error_1.VerificationError({ + code: 'CERTIFICATE_ERROR', + message: 'Failed to verify certificate chain', + cause: error, + }); +} +class CertificateChainVerifier { + constructor(opts) { + this.untrustedCert = opts.untrustedCert; + this.trustedCerts = opts.trustedCerts; + this.localCerts = dedupeCertificates([ + ...opts.trustedCerts, + opts.untrustedCert, + ]); + this.timestamp = opts.timestamp; + } + verify() { + // Construct certificate path from leaf to root + const certificatePath = this.sort(); + // Perform validation checks on each certificate in the path + this.checkPath(certificatePath); + const validForDate = certificatePath.every((cert) => cert.validForDate(this.timestamp)); + if (!validForDate) { + throw new error_1.VerificationError({ + code: 'CERTIFICATE_ERROR', + message: 'certificate is not valid or expired at the specified date', + }); + } + // Return verified certificate path + return certificatePath; + } + sort() { + const leafCert = this.untrustedCert; + // Construct all possible paths from the leaf + let paths = this.buildPaths(leafCert); + // Filter for paths which contain a trusted certificate + paths = paths.filter((path) => path.some((cert) => this.trustedCerts.includes(cert))); + if (paths.length === 0) { + throw new error_1.VerificationError({ + code: 'CERTIFICATE_ERROR', + message: 'no trusted certificate path found', + }); + } + // Find the shortest of possible paths + /* istanbul ignore next */ + const path = paths.reduce((prev, curr) => prev.length < curr.length ? prev : curr); + // Construct chain from shortest path + // Removes the last certificate in the path, which will be a second copy + // of the root certificate given that the root is self-signed. + return [leafCert, ...path].slice(0, -1); + } + // Recursively build all possible paths from the leaf to the root + buildPaths(certificate) { + const paths = []; + const issuers = this.findIssuer(certificate); + if (issuers.length === 0) { + throw new error_1.VerificationError({ + code: 'CERTIFICATE_ERROR', + message: 'no valid certificate path found', + }); + } + for (let i = 0; i < issuers.length; i++) { + const issuer = issuers[i]; + // Base case - issuer is self + if (issuer.equals(certificate)) { + paths.push([certificate]); + continue; + } + // Recursively build path for the issuer + const subPaths = this.buildPaths(issuer); + // Construct paths by appending the issuer to each subpath + for (let j = 0; j < subPaths.length; j++) { + paths.push([issuer, ...subPaths[j]]); + } + } + return paths; + } + // Return all possible issuers for the given certificate + findIssuer(certificate) { + let issuers = []; + let keyIdentifier; + // Exit early if the certificate is self-signed + if (certificate.subject.equals(certificate.issuer)) { + if (certificate.verify()) { + return [certificate]; + } + } + // If the certificate has an authority key identifier, use that + // to find the issuer + if (certificate.extAuthorityKeyID) { + keyIdentifier = certificate.extAuthorityKeyID.keyIdentifier; + // TODO: Add support for authorityCertIssuer/authorityCertSerialNumber + // though Fulcio doesn't appear to use these + } + // Find possible issuers by comparing the authorityKeyID/subjectKeyID + // or issuer/subject. Potential issuers are added to the result array. + this.localCerts.forEach((possibleIssuer) => { + if (keyIdentifier) { + /* istanbul ignore else */ + if (possibleIssuer.extSubjectKeyID) { + if (possibleIssuer.extSubjectKeyID.keyIdentifier.equals(keyIdentifier)) { + issuers.push(possibleIssuer); + } + return; + } + } + // Fallback to comparing certificate issuer and subject if + // subjectKey/authorityKey extensions are not present + if (possibleIssuer.subject.equals(certificate.issuer)) { + issuers.push(possibleIssuer); + } + }); + // Remove any issuers which fail to verify the certificate + issuers = issuers.filter((issuer) => { + try { + return certificate.verify(issuer); + } + catch (ex) { + /* istanbul ignore next - should never error */ + return false; + } + }); + return issuers; + } + checkPath(path) { + /* istanbul ignore if */ + if (path.length < 1) { + throw new error_1.VerificationError({ + code: 'CERTIFICATE_ERROR', + message: 'certificate chain must contain at least one certificate', + }); + } + // Ensure that all certificates beyond the leaf are CAs + const validCAs = path.slice(1).every((cert) => cert.isCA); + if (!validCAs) { + throw new error_1.VerificationError({ + code: 'CERTIFICATE_ERROR', + message: 'intermediate certificate is not a CA', + }); + } + // Certificate's issuer must match the subject of the next certificate + // in the chain + for (let i = path.length - 2; i >= 0; i--) { + /* istanbul ignore if */ + if (!path[i].issuer.equals(path[i + 1].subject)) { + throw new error_1.VerificationError({ + code: 'CERTIFICATE_ERROR', + message: 'incorrect certificate name chaining', + }); + } + } + // Check pathlength constraints + for (let i = 0; i < path.length; i++) { + const cert = path[i]; + // If the certificate is a CA, check the path length + if (cert.extBasicConstraints?.isCA) { + const pathLength = cert.extBasicConstraints.pathLenConstraint; + // The path length, if set, indicates how many intermediate + // certificates (NOT including the leaf) are allowed to follow. The + // pathLength constraint of any intermediate CA certificate MUST be + // greater than or equal to it's own depth in the chain (with an + // adjustment for the leaf certificate) + if (pathLength !== undefined && pathLength < i - 1) { + throw new error_1.VerificationError({ + code: 'CERTIFICATE_ERROR', + message: 'path length constraint exceeded', + }); + } + } + } + } +} +exports.CertificateChainVerifier = CertificateChainVerifier; +// Remove duplicate certificates from the array +function dedupeCertificates(certs) { + for (let i = 0; i < certs.length; i++) { + for (let j = i + 1; j < certs.length; j++) { + if (certs[i].equals(certs[j])) { + certs.splice(j, 1); + j--; + } + } + } + return certs; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/key/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/key/index.js new file mode 100644 index 0000000000000000000000000000000000000000..c966ccb1e925efdd89ca3bc5e3b0ec08a4ce9a16 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/key/index.js @@ -0,0 +1,67 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifyPublicKey = verifyPublicKey; +exports.verifyCertificate = verifyCertificate; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const core_1 = require("@sigstore/core"); +const error_1 = require("../error"); +const certificate_1 = require("./certificate"); +const sct_1 = require("./sct"); +const OID_FULCIO_ISSUER_V1 = '1.3.6.1.4.1.57264.1.1'; +const OID_FULCIO_ISSUER_V2 = '1.3.6.1.4.1.57264.1.8'; +function verifyPublicKey(hint, timestamps, trustMaterial) { + const key = trustMaterial.publicKey(hint); + timestamps.forEach((timestamp) => { + if (!key.validFor(timestamp)) { + throw new error_1.VerificationError({ + code: 'PUBLIC_KEY_ERROR', + message: `Public key is not valid for timestamp: ${timestamp.toISOString()}`, + }); + } + }); + return { key: key.publicKey }; +} +function verifyCertificate(leaf, timestamps, trustMaterial) { + // Check that leaf certificate chains to a trusted CA + let path = []; + timestamps.forEach((timestamp) => { + path = (0, certificate_1.verifyCertificateChain)(timestamp, leaf, trustMaterial.certificateAuthorities); + }); + return { + scts: (0, sct_1.verifySCTs)(path[0], path[1], trustMaterial.ctlogs), + signer: getSigner(path[0]), + }; +} +function getSigner(cert) { + let issuer; + const issuerExtension = cert.extension(OID_FULCIO_ISSUER_V2); + /* istanbul ignore next */ + if (issuerExtension) { + issuer = issuerExtension.valueObj.subs?.[0]?.value.toString('ascii'); + } + else { + issuer = cert.extension(OID_FULCIO_ISSUER_V1)?.value.toString('ascii'); + } + const identity = { + extensions: { issuer }, + subjectAlternativeName: cert.subjectAltName, + }; + return { + key: core_1.crypto.createPublicKey(cert.publicKey), + identity, + }; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/key/sct.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/key/sct.js new file mode 100644 index 0000000000000000000000000000000000000000..8eca48738096ee75670e5599a07da1854495256f --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/key/sct.js @@ -0,0 +1,78 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifySCTs = verifySCTs; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const core_1 = require("@sigstore/core"); +const error_1 = require("../error"); +const trust_1 = require("../trust"); +function verifySCTs(cert, issuer, ctlogs) { + let extSCT; + // Verifying the SCT requires that we remove the SCT extension and + // re-encode the TBS structure to DER -- this value is part of the data + // over which the signature is calculated. Since this is a destructive action + // we create a copy of the certificate so we can remove the SCT extension + // without affecting the original certificate. + const clone = cert.clone(); + // Intentionally not using the findExtension method here because we want to + // remove the the SCT extension from the certificate before calculating the + // PreCert structure + for (let i = 0; i < clone.extensions.length; i++) { + const ext = clone.extensions[i]; + if (ext.subs[0].toOID() === core_1.EXTENSION_OID_SCT) { + extSCT = new core_1.X509SCTExtension(ext); + // Remove the extension from the certificate + clone.extensions.splice(i, 1); + break; + } + } + // No SCT extension found to verify + if (!extSCT) { + return []; + } + // Found an SCT extension but it has no SCTs + /* istanbul ignore if -- too difficult to fabricate test case for this */ + if (extSCT.signedCertificateTimestamps.length === 0) { + return []; + } + // Construct the PreCert structure + // https://www.rfc-editor.org/rfc/rfc6962#section-3.2 + const preCert = new core_1.ByteStream(); + // Calculate hash of the issuer's public key + const issuerId = core_1.crypto.digest('sha256', issuer.publicKey); + preCert.appendView(issuerId); + // Re-encodes the certificate to DER after removing the SCT extension + const tbs = clone.tbsCertificate.toDER(); + preCert.appendUint24(tbs.length); + preCert.appendView(tbs); + // Calculate and return the verification results for each SCT + return extSCT.signedCertificateTimestamps.map((sct) => { + // Find the ctlog instance that corresponds to the SCT's logID + const validCTLogs = (0, trust_1.filterTLogAuthorities)(ctlogs, { + logID: sct.logID, + targetDate: sct.datetime, + }); + // See if the SCT is valid for any of the CT logs + const verified = validCTLogs.some((log) => sct.verify(preCert.buffer, log.publicKey)); + if (!verified) { + throw new error_1.VerificationError({ + code: 'CERTIFICATE_ERROR', + message: 'SCT verification failed', + }); + } + return sct.logID; + }); +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/policy.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/policy.js new file mode 100644 index 0000000000000000000000000000000000000000..f5960cf047b84b0e84ae2fa6804a2d888cf2b0c0 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/policy.js @@ -0,0 +1,24 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifySubjectAlternativeName = verifySubjectAlternativeName; +exports.verifyExtensions = verifyExtensions; +const error_1 = require("./error"); +function verifySubjectAlternativeName(policyIdentity, signerIdentity) { + if (signerIdentity === undefined || !signerIdentity.match(policyIdentity)) { + throw new error_1.PolicyError({ + code: 'UNTRUSTED_SIGNER_ERROR', + message: `certificate identity error - expected ${policyIdentity}, got ${signerIdentity}`, + }); + } +} +function verifyExtensions(policyExtensions, signerExtensions = {}) { + let key; + for (key in policyExtensions) { + if (signerExtensions[key] !== policyExtensions[key]) { + throw new error_1.PolicyError({ + code: 'UNTRUSTED_SIGNER_ERROR', + message: `invalid certificate extension - expected ${key}=${policyExtensions[key]}, got ${key}=${signerExtensions[key]}`, + }); + } + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/shared.types.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/shared.types.js new file mode 100644 index 0000000000000000000000000000000000000000..c8ad2e549bdc6801e0d1c80b0308d4b9bd4985ce --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/shared.types.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/timestamp/checkpoint.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/timestamp/checkpoint.js new file mode 100644 index 0000000000000000000000000000000000000000..46619b675f8863af80a9b2b67a6d096583a5f8f2 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/timestamp/checkpoint.js @@ -0,0 +1,157 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifyCheckpoint = verifyCheckpoint; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const core_1 = require("@sigstore/core"); +const error_1 = require("../error"); +const trust_1 = require("../trust"); +// Separator between the note and the signatures in a checkpoint +const CHECKPOINT_SEPARATOR = '\n\n'; +// Checkpoint signatures are of the following form: +// "– \n" +// where: +// - the prefix is an emdash (U+2014). +// - gives a human-readable representation of the signing ID. +// - is the first 4 bytes of the SHA256 hash of the +// associated public key followed by the signature bytes. +const SIGNATURE_REGEX = /\u2014 (\S+) (\S+)\n/g; +// Verifies the checkpoint value in the given tlog entry. There are two steps +// to the verification: +// 1. Verify that all signatures in the checkpoint can be verified against a +// trusted public key +// 2. Verify that the root hash in the checkpoint matches the root hash in the +// inclusion proof +// See: https://github.com/transparency-dev/formats/blob/main/log/README.md +function verifyCheckpoint(entry, tlogs) { + // Filter tlog instances to just those which were valid at the time of the + // entry + const validTLogs = (0, trust_1.filterTLogAuthorities)(tlogs, { + targetDate: new Date(Number(entry.integratedTime) * 1000), + }); + const inclusionProof = entry.inclusionProof; + const signedNote = SignedNote.fromString(inclusionProof.checkpoint.envelope); + const checkpoint = LogCheckpoint.fromString(signedNote.note); + // Verify that the signatures in the checkpoint are all valid + if (!verifySignedNote(signedNote, validTLogs)) { + throw new error_1.VerificationError({ + code: 'TLOG_INCLUSION_PROOF_ERROR', + message: 'invalid checkpoint signature', + }); + } + // Verify that the root hash from the checkpoint matches the root hash in the + // inclusion proof + if (!core_1.crypto.bufferEqual(checkpoint.logHash, inclusionProof.rootHash)) { + throw new error_1.VerificationError({ + code: 'TLOG_INCLUSION_PROOF_ERROR', + message: 'root hash mismatch', + }); + } +} +// Verifies the signatures in the SignedNote. For each signature, the +// corresponding transparency log is looked up by the key hint and the +// signature is verified against the public key in the transparency log. +// Throws an error if any of the signatures are invalid. +function verifySignedNote(signedNote, tlogs) { + const data = Buffer.from(signedNote.note, 'utf-8'); + return signedNote.signatures.every((signature) => { + // Find the transparency log instance with the matching key hint + const tlog = tlogs.find((tlog) => core_1.crypto.bufferEqual(tlog.logID.subarray(0, 4), signature.keyHint)); + if (!tlog) { + return false; + } + return core_1.crypto.verify(data, tlog.publicKey, signature.signature); + }); +} +// SignedNote represents a signed note from a transparency log checkpoint. Consists +// of a body (or note) and one more signatures calculated over the body. See +// https://github.com/transparency-dev/formats/blob/main/log/README.md#signed-envelope +class SignedNote { + constructor(note, signatures) { + this.note = note; + this.signatures = signatures; + } + // Deserialize a SignedNote from a string + static fromString(envelope) { + if (!envelope.includes(CHECKPOINT_SEPARATOR)) { + throw new error_1.VerificationError({ + code: 'TLOG_INCLUSION_PROOF_ERROR', + message: 'missing checkpoint separator', + }); + } + // Split the note into the header and the data portions at the separator + const split = envelope.indexOf(CHECKPOINT_SEPARATOR); + const header = envelope.slice(0, split + 1); + const data = envelope.slice(split + CHECKPOINT_SEPARATOR.length); + // Find all the signature lines in the data portion + const matches = data.matchAll(SIGNATURE_REGEX); + // Parse each of the matched signature lines into the name and signature. + // The first four bytes of the signature are the key hint (should match the + // first four bytes of the log ID), and the rest is the signature itself. + const signatures = Array.from(matches, (match) => { + const [, name, signature] = match; + const sigBytes = Buffer.from(signature, 'base64'); + if (sigBytes.length < 5) { + throw new error_1.VerificationError({ + code: 'TLOG_INCLUSION_PROOF_ERROR', + message: 'malformed checkpoint signature', + }); + } + return { + name, + keyHint: sigBytes.subarray(0, 4), + signature: sigBytes.subarray(4), + }; + }); + if (signatures.length === 0) { + throw new error_1.VerificationError({ + code: 'TLOG_INCLUSION_PROOF_ERROR', + message: 'no signatures found in checkpoint', + }); + } + return new SignedNote(header, signatures); + } +} +// LogCheckpoint represents a transparency log checkpoint. Consists of the +// following: +// - origin: the name of the transparency log +// - logSize: the size of the log at the time of the checkpoint +// - logHash: the root hash of the log at the time of the checkpoint +// - rest: the rest of the checkpoint body, which is a list of log entries +// See: +// https://github.com/transparency-dev/formats/blob/main/log/README.md#checkpoint-body +class LogCheckpoint { + constructor(origin, logSize, logHash, rest) { + this.origin = origin; + this.logSize = logSize; + this.logHash = logHash; + this.rest = rest; + } + static fromString(note) { + const lines = note.trimEnd().split('\n'); + if (lines.length < 3) { + throw new error_1.VerificationError({ + code: 'TLOG_INCLUSION_PROOF_ERROR', + message: 'too few lines in checkpoint header', + }); + } + const origin = lines[0]; + const logSize = BigInt(lines[1]); + const rootHash = Buffer.from(lines[2], 'base64'); + const rest = lines.slice(3); + return new LogCheckpoint(origin, logSize, rootHash, rest); + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/timestamp/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/timestamp/index.js new file mode 100644 index 0000000000000000000000000000000000000000..56e948de19338d3319621156885bc98503e5180b --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/timestamp/index.js @@ -0,0 +1,46 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifyTSATimestamp = verifyTSATimestamp; +exports.verifyTLogTimestamp = verifyTLogTimestamp; +const error_1 = require("../error"); +const checkpoint_1 = require("./checkpoint"); +const merkle_1 = require("./merkle"); +const set_1 = require("./set"); +const tsa_1 = require("./tsa"); +function verifyTSATimestamp(timestamp, data, timestampAuthorities) { + (0, tsa_1.verifyRFC3161Timestamp)(timestamp, data, timestampAuthorities); + return { + type: 'timestamp-authority', + logID: timestamp.signerSerialNumber, + timestamp: timestamp.signingTime, + }; +} +function verifyTLogTimestamp(entry, tlogAuthorities) { + let inclusionVerified = false; + if (isTLogEntryWithInclusionPromise(entry)) { + (0, set_1.verifyTLogSET)(entry, tlogAuthorities); + inclusionVerified = true; + } + if (isTLogEntryWithInclusionProof(entry)) { + (0, merkle_1.verifyMerkleInclusion)(entry); + (0, checkpoint_1.verifyCheckpoint)(entry, tlogAuthorities); + inclusionVerified = true; + } + if (!inclusionVerified) { + throw new error_1.VerificationError({ + code: 'TLOG_MISSING_INCLUSION_ERROR', + message: 'inclusion could not be verified', + }); + } + return { + type: 'transparency-log', + logID: entry.logId.keyId, + timestamp: new Date(Number(entry.integratedTime) * 1000), + }; +} +function isTLogEntryWithInclusionPromise(entry) { + return entry.inclusionPromise !== undefined; +} +function isTLogEntryWithInclusionProof(entry) { + return entry.inclusionProof !== undefined; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/timestamp/merkle.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/timestamp/merkle.js new file mode 100644 index 0000000000000000000000000000000000000000..f57cae42002bd01991839351c50d9fc58ce36075 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/timestamp/merkle.js @@ -0,0 +1,104 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifyMerkleInclusion = verifyMerkleInclusion; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const core_1 = require("@sigstore/core"); +const error_1 = require("../error"); +const RFC6962_LEAF_HASH_PREFIX = Buffer.from([0x00]); +const RFC6962_NODE_HASH_PREFIX = Buffer.from([0x01]); +function verifyMerkleInclusion(entry) { + const inclusionProof = entry.inclusionProof; + const logIndex = BigInt(inclusionProof.logIndex); + const treeSize = BigInt(inclusionProof.treeSize); + if (logIndex < 0n || logIndex >= treeSize) { + throw new error_1.VerificationError({ + code: 'TLOG_INCLUSION_PROOF_ERROR', + message: `invalid index: ${logIndex}`, + }); + } + // Figure out which subset of hashes corresponds to the inner and border + // nodes + const { inner, border } = decompInclProof(logIndex, treeSize); + if (inclusionProof.hashes.length !== inner + border) { + throw new error_1.VerificationError({ + code: 'TLOG_INCLUSION_PROOF_ERROR', + message: 'invalid hash count', + }); + } + const innerHashes = inclusionProof.hashes.slice(0, inner); + const borderHashes = inclusionProof.hashes.slice(inner); + // The entry's hash is the leaf hash + const leafHash = hashLeaf(entry.canonicalizedBody); + // Chain the hashes belonging to the inner and border portions + const calculatedHash = chainBorderRight(chainInner(leafHash, innerHashes, logIndex), borderHashes); + // Calculated hash should match the root hash in the inclusion proof + if (!core_1.crypto.bufferEqual(calculatedHash, inclusionProof.rootHash)) { + throw new error_1.VerificationError({ + code: 'TLOG_INCLUSION_PROOF_ERROR', + message: 'calculated root hash does not match inclusion proof', + }); + } +} +// Breaks down inclusion proof for a leaf at the specified index in a tree of +// the specified size. The split point is where paths to the index leaf and +// the (size - 1) leaf diverge. Returns lengths of the bottom and upper proof +// parts. +function decompInclProof(index, size) { + const inner = innerProofSize(index, size); + const border = onesCount(index >> BigInt(inner)); + return { inner, border }; +} +// Computes a subtree hash for a node on or below the tree's right border. +// Assumes the provided proof hashes are ordered from lower to higher levels +// and seed is the initial hash of the node specified by the index. +function chainInner(seed, hashes, index) { + return hashes.reduce((acc, h, i) => { + if ((index >> BigInt(i)) & BigInt(1)) { + return hashChildren(h, acc); + } + else { + return hashChildren(acc, h); + } + }, seed); +} +// Computes a subtree hash for nodes along the tree's right border. +function chainBorderRight(seed, hashes) { + return hashes.reduce((acc, h) => hashChildren(h, acc), seed); +} +function innerProofSize(index, size) { + return bitLength(index ^ (size - BigInt(1))); +} +// Counts the number of ones in the binary representation of the given number. +// https://en.wikipedia.org/wiki/Hamming_weight +function onesCount(num) { + return num.toString(2).split('1').length - 1; +} +// Returns the number of bits necessary to represent an integer in binary. +function bitLength(n) { + if (n === 0n) { + return 0; + } + return n.toString(2).length; +} +// Hashing logic according to RFC6962. +// https://datatracker.ietf.org/doc/html/rfc6962#section-2 +function hashChildren(left, right) { + return core_1.crypto.digest('sha256', RFC6962_NODE_HASH_PREFIX, left, right); +} +function hashLeaf(leaf) { + return core_1.crypto.digest('sha256', RFC6962_LEAF_HASH_PREFIX, leaf); +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/timestamp/set.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/timestamp/set.js new file mode 100644 index 0000000000000000000000000000000000000000..5d3f47bb88746addedd419c3a1fbdc1f2ca85bec --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/timestamp/set.js @@ -0,0 +1,60 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifyTLogSET = verifyTLogSET; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const core_1 = require("@sigstore/core"); +const error_1 = require("../error"); +const trust_1 = require("../trust"); +// Verifies the SET for the given entry against the list of trusted +// transparency logs. Returns true if the SET can be verified against at least +// one of the trusted logs; otherwise, returns false. +function verifyTLogSET(entry, tlogs) { + // Filter the list of tlog instances to only those which might be able to + // verify the SET + const validTLogs = (0, trust_1.filterTLogAuthorities)(tlogs, { + logID: entry.logId.keyId, + targetDate: new Date(Number(entry.integratedTime) * 1000), + }); + // Check to see if we can verify the SET against any of the valid tlogs + const verified = validTLogs.some((tlog) => { + // Re-create the original Rekor verification payload + const payload = toVerificationPayload(entry); + // Canonicalize the payload and turn into a buffer for verification + const data = Buffer.from(core_1.json.canonicalize(payload), 'utf8'); + // Extract the SET from the tlog entry + const signature = entry.inclusionPromise.signedEntryTimestamp; + return core_1.crypto.verify(data, tlog.publicKey, signature); + }); + if (!verified) { + throw new error_1.VerificationError({ + code: 'TLOG_INCLUSION_PROMISE_ERROR', + message: 'inclusion promise could not be verified', + }); + } +} +// Returns a properly formatted "VerificationPayload" for one of the +// transaction log entires in the given bundle which can be used for SET +// verification. +function toVerificationPayload(entry) { + const { integratedTime, logIndex, logId, canonicalizedBody } = entry; + return { + body: canonicalizedBody.toString('base64'), + integratedTime: Number(integratedTime), + logIndex: Number(logIndex), + logID: logId.keyId.toString('hex'), + }; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/timestamp/tsa.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/timestamp/tsa.js new file mode 100644 index 0000000000000000000000000000000000000000..0da4a3de8247fdb6f78c71e724c3042d71842c05 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/timestamp/tsa.js @@ -0,0 +1,63 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifyRFC3161Timestamp = verifyRFC3161Timestamp; +const core_1 = require("@sigstore/core"); +const error_1 = require("../error"); +const certificate_1 = require("../key/certificate"); +const trust_1 = require("../trust"); +function verifyRFC3161Timestamp(timestamp, data, timestampAuthorities) { + const signingTime = timestamp.signingTime; + // Filter for CAs which were valid at the time of signing + timestampAuthorities = (0, trust_1.filterCertAuthorities)(timestampAuthorities, signingTime); + // Filter for CAs which match serial and issuer embedded in the timestamp + timestampAuthorities = filterCAsBySerialAndIssuer(timestampAuthorities, { + serialNumber: timestamp.signerSerialNumber, + issuer: timestamp.signerIssuer, + }); + // Check that we can verify the timestamp with AT LEAST ONE of the remaining + // CAs + const verified = timestampAuthorities.some((ca) => { + try { + verifyTimestampForCA(timestamp, data, ca); + return true; + } + catch (e) { + return false; + } + }); + if (!verified) { + throw new error_1.VerificationError({ + code: 'TIMESTAMP_ERROR', + message: 'timestamp could not be verified', + }); + } +} +function verifyTimestampForCA(timestamp, data, ca) { + const [leaf, ...cas] = ca.certChain; + const signingKey = core_1.crypto.createPublicKey(leaf.publicKey); + const signingTime = timestamp.signingTime; + // Verify the certificate chain for the provided CA + try { + new certificate_1.CertificateChainVerifier({ + untrustedCert: leaf, + trustedCerts: cas, + timestamp: signingTime, + }).verify(); + } + catch (e) { + throw new error_1.VerificationError({ + code: 'TIMESTAMP_ERROR', + message: 'invalid certificate chain', + }); + } + // Check that the signing certificate's key can be used to verify the + // timestamp signature. + timestamp.verify(data, signingKey); +} +// Filters the list of CAs to those which have a leaf signing certificate which +// matches the given serial number and issuer. +function filterCAsBySerialAndIssuer(timestampAuthorities, criteria) { + return timestampAuthorities.filter((ca) => ca.certChain.length > 0 && + core_1.crypto.bufferEqual(ca.certChain[0].serialNumber, criteria.serialNumber) && + core_1.crypto.bufferEqual(ca.certChain[0].issuer, criteria.issuer)); +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/tlog/dsse.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/tlog/dsse.js new file mode 100644 index 0000000000000000000000000000000000000000..d71ed8c6e7ad9a98cbca423aca0760317115d3f7 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/tlog/dsse.js @@ -0,0 +1,57 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifyDSSETLogBody = verifyDSSETLogBody; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const error_1 = require("../error"); +// Compare the given intoto tlog entry to the given bundle +function verifyDSSETLogBody(tlogEntry, content) { + switch (tlogEntry.apiVersion) { + case '0.0.1': + return verifyDSSE001TLogBody(tlogEntry, content); + default: + throw new error_1.VerificationError({ + code: 'TLOG_BODY_ERROR', + message: `unsupported dsse version: ${tlogEntry.apiVersion}`, + }); + } +} +// Compare the given dsse v0.0.1 tlog entry to the given DSSE envelope. +function verifyDSSE001TLogBody(tlogEntry, content) { + // Ensure the bundle's DSSE only contains a single signature + if (tlogEntry.spec.signatures?.length !== 1) { + throw new error_1.VerificationError({ + code: 'TLOG_BODY_ERROR', + message: 'signature count mismatch', + }); + } + const tlogSig = tlogEntry.spec.signatures[0].signature; + // Ensure that the signature in the bundle's DSSE matches tlog entry + if (!content.compareSignature(Buffer.from(tlogSig, 'base64'))) + throw new error_1.VerificationError({ + code: 'TLOG_BODY_ERROR', + message: 'tlog entry signature mismatch', + }); + // Ensure the digest of the bundle's DSSE payload matches the digest in the + // tlog entry + const tlogHash = tlogEntry.spec.payloadHash?.value || ''; + if (!content.compareDigest(Buffer.from(tlogHash, 'hex'))) { + throw new error_1.VerificationError({ + code: 'TLOG_BODY_ERROR', + message: 'DSSE payload hash mismatch', + }); + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/tlog/hashedrekord.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/tlog/hashedrekord.js new file mode 100644 index 0000000000000000000000000000000000000000..c4aa345b57ba7afd41888f99ecaf70216c3f812d --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/tlog/hashedrekord.js @@ -0,0 +1,51 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifyHashedRekordTLogBody = verifyHashedRekordTLogBody; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const error_1 = require("../error"); +// Compare the given hashedrekord tlog entry to the given bundle +function verifyHashedRekordTLogBody(tlogEntry, content) { + switch (tlogEntry.apiVersion) { + case '0.0.1': + return verifyHashedrekord001TLogBody(tlogEntry, content); + default: + throw new error_1.VerificationError({ + code: 'TLOG_BODY_ERROR', + message: `unsupported hashedrekord version: ${tlogEntry.apiVersion}`, + }); + } +} +// Compare the given hashedrekord v0.0.1 tlog entry to the given message +// signature +function verifyHashedrekord001TLogBody(tlogEntry, content) { + // Ensure that the bundles message signature matches the tlog entry + const tlogSig = tlogEntry.spec.signature.content || ''; + if (!content.compareSignature(Buffer.from(tlogSig, 'base64'))) { + throw new error_1.VerificationError({ + code: 'TLOG_BODY_ERROR', + message: 'signature mismatch', + }); + } + // Ensure that the bundle's message digest matches the tlog entry + const tlogDigest = tlogEntry.spec.data.hash?.value || ''; + if (!content.compareDigest(Buffer.from(tlogDigest, 'hex'))) { + throw new error_1.VerificationError({ + code: 'TLOG_BODY_ERROR', + message: 'digest mismatch', + }); + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/tlog/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/tlog/index.js new file mode 100644 index 0000000000000000000000000000000000000000..da235360c594a85b88f76bf9ec07462a1c380bf9 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/tlog/index.js @@ -0,0 +1,47 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifyTLogBody = verifyTLogBody; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const error_1 = require("../error"); +const dsse_1 = require("./dsse"); +const hashedrekord_1 = require("./hashedrekord"); +const intoto_1 = require("./intoto"); +// Verifies that the given tlog entry matches the supplied signature content. +function verifyTLogBody(entry, sigContent) { + const { kind, version } = entry.kindVersion; + const body = JSON.parse(entry.canonicalizedBody.toString('utf8')); + if (kind !== body.kind || version !== body.apiVersion) { + throw new error_1.VerificationError({ + code: 'TLOG_BODY_ERROR', + message: `kind/version mismatch - expected: ${kind}/${version}, received: ${body.kind}/${body.apiVersion}`, + }); + } + switch (body.kind) { + case 'dsse': + return (0, dsse_1.verifyDSSETLogBody)(body, sigContent); + case 'intoto': + return (0, intoto_1.verifyIntotoTLogBody)(body, sigContent); + case 'hashedrekord': + return (0, hashedrekord_1.verifyHashedRekordTLogBody)(body, sigContent); + /* istanbul ignore next */ + default: + throw new error_1.VerificationError({ + code: 'TLOG_BODY_ERROR', + message: `unsupported kind: ${kind}`, + }); + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/tlog/intoto.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/tlog/intoto.js new file mode 100644 index 0000000000000000000000000000000000000000..9096ae9418cc307adb419c9daefd8099e0a26094 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/tlog/intoto.js @@ -0,0 +1,62 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifyIntotoTLogBody = verifyIntotoTLogBody; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const error_1 = require("../error"); +// Compare the given intoto tlog entry to the given bundle +function verifyIntotoTLogBody(tlogEntry, content) { + switch (tlogEntry.apiVersion) { + case '0.0.2': + return verifyIntoto002TLogBody(tlogEntry, content); + default: + throw new error_1.VerificationError({ + code: 'TLOG_BODY_ERROR', + message: `unsupported intoto version: ${tlogEntry.apiVersion}`, + }); + } +} +// Compare the given intoto v0.0.2 tlog entry to the given DSSE envelope. +function verifyIntoto002TLogBody(tlogEntry, content) { + // Ensure the bundle's DSSE contains a single signature + if (tlogEntry.spec.content.envelope.signatures?.length !== 1) { + throw new error_1.VerificationError({ + code: 'TLOG_BODY_ERROR', + message: 'signature count mismatch', + }); + } + // Signature is double-base64-encoded in the tlog entry + const tlogSig = base64Decode(tlogEntry.spec.content.envelope.signatures[0].sig); + // Ensure that the signature in the bundle's DSSE matches tlog entry + if (!content.compareSignature(Buffer.from(tlogSig, 'base64'))) { + throw new error_1.VerificationError({ + code: 'TLOG_BODY_ERROR', + message: 'tlog entry signature mismatch', + }); + } + // Ensure the digest of the bundle's DSSE payload matches the digest in the + // tlog entry + const tlogHash = tlogEntry.spec.content.payloadHash?.value || ''; + if (!content.compareDigest(Buffer.from(tlogHash, 'hex'))) { + throw new error_1.VerificationError({ + code: 'TLOG_BODY_ERROR', + message: 'DSSE payload hash mismatch', + }); + } +} +function base64Decode(str) { + return Buffer.from(str, 'base64').toString('utf-8'); +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/trust/filter.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/trust/filter.js new file mode 100644 index 0000000000000000000000000000000000000000..98bd25cd70e591f58998abff2f224a2e10403113 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/trust/filter.js @@ -0,0 +1,23 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.filterCertAuthorities = filterCertAuthorities; +exports.filterTLogAuthorities = filterTLogAuthorities; +function filterCertAuthorities(certAuthorities, timestamp) { + return certAuthorities.filter((ca) => { + return ca.validFor.start <= timestamp && ca.validFor.end >= timestamp; + }); +} +// Filter the list of tlog instances to only those which match the given log +// ID and have public keys which are valid for the given integrated time. +function filterTLogAuthorities(tlogAuthorities, criteria) { + return tlogAuthorities.filter((tlog) => { + // If we're filtering by log ID and the log IDs don't match, we can't use + // this tlog + if (criteria.logID && !tlog.logID.equals(criteria.logID)) { + return false; + } + // Check that the integrated time is within the validFor range + return (tlog.validFor.start <= criteria.targetDate && + criteria.targetDate <= tlog.validFor.end); + }); +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/trust/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/trust/index.js new file mode 100644 index 0000000000000000000000000000000000000000..bfab2eb4f9975a379afa6b8cc7d87e180abbead0 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/trust/index.js @@ -0,0 +1,86 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.filterTLogAuthorities = exports.filterCertAuthorities = void 0; +exports.toTrustMaterial = toTrustMaterial; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const core_1 = require("@sigstore/core"); +const protobuf_specs_1 = require("@sigstore/protobuf-specs"); +const error_1 = require("../error"); +const BEGINNING_OF_TIME = new Date(0); +const END_OF_TIME = new Date(8640000000000000); +var filter_1 = require("./filter"); +Object.defineProperty(exports, "filterCertAuthorities", { enumerable: true, get: function () { return filter_1.filterCertAuthorities; } }); +Object.defineProperty(exports, "filterTLogAuthorities", { enumerable: true, get: function () { return filter_1.filterTLogAuthorities; } }); +function toTrustMaterial(root, keys) { + const keyFinder = typeof keys === 'function' ? keys : keyLocator(keys); + return { + certificateAuthorities: root.certificateAuthorities.map(createCertAuthority), + timestampAuthorities: root.timestampAuthorities.map(createCertAuthority), + tlogs: root.tlogs.map(createTLogAuthority), + ctlogs: root.ctlogs.map(createTLogAuthority), + publicKey: keyFinder, + }; +} +function createTLogAuthority(tlogInstance) { + const keyDetails = tlogInstance.publicKey.keyDetails; + const keyType = keyDetails === protobuf_specs_1.PublicKeyDetails.PKCS1_RSA_PKCS1V5 || + keyDetails === protobuf_specs_1.PublicKeyDetails.PKIX_RSA_PKCS1V5 || + keyDetails === protobuf_specs_1.PublicKeyDetails.PKIX_RSA_PKCS1V15_2048_SHA256 || + keyDetails === protobuf_specs_1.PublicKeyDetails.PKIX_RSA_PKCS1V15_3072_SHA256 || + keyDetails === protobuf_specs_1.PublicKeyDetails.PKIX_RSA_PKCS1V15_4096_SHA256 + ? 'pkcs1' + : 'spki'; + return { + logID: tlogInstance.logId.keyId, + publicKey: core_1.crypto.createPublicKey(tlogInstance.publicKey.rawBytes, keyType), + validFor: { + start: tlogInstance.publicKey.validFor?.start || BEGINNING_OF_TIME, + end: tlogInstance.publicKey.validFor?.end || END_OF_TIME, + }, + }; +} +function createCertAuthority(ca) { + /* istanbul ignore next */ + return { + certChain: ca.certChain.certificates.map((cert) => { + return core_1.X509Certificate.parse(cert.rawBytes); + }), + validFor: { + start: ca.validFor?.start || BEGINNING_OF_TIME, + end: ca.validFor?.end || END_OF_TIME, + }, + }; +} +function keyLocator(keys) { + return (hint) => { + const key = (keys || {})[hint]; + if (!key) { + throw new error_1.VerificationError({ + code: 'PUBLIC_KEY_ERROR', + message: `key not found: ${hint}`, + }); + } + return { + publicKey: core_1.crypto.createPublicKey(key.rawBytes), + validFor: (date) => { + /* istanbul ignore next */ + return ((key.validFor?.start || BEGINNING_OF_TIME) <= date && + (key.validFor?.end || END_OF_TIME) >= date); + }, + }; + }; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/trust/trust.types.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/trust/trust.types.js new file mode 100644 index 0000000000000000000000000000000000000000..c8ad2e549bdc6801e0d1c80b0308d4b9bd4985ce --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/trust/trust.types.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/verifier.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/verifier.js new file mode 100644 index 0000000000000000000000000000000000000000..6a9d11a3b6f8f422a6c65a5435afa952b0411b14 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/dist/verifier.js @@ -0,0 +1,143 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Verifier = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const util_1 = require("util"); +const error_1 = require("./error"); +const key_1 = require("./key"); +const policy_1 = require("./policy"); +const timestamp_1 = require("./timestamp"); +const tlog_1 = require("./tlog"); +class Verifier { + constructor(trustMaterial, options = {}) { + this.trustMaterial = trustMaterial; + this.options = { + ctlogThreshold: options.ctlogThreshold ?? 1, + tlogThreshold: options.tlogThreshold ?? 1, + tsaThreshold: options.tsaThreshold ?? 0, + }; + } + verify(entity, policy) { + const timestamps = this.verifyTimestamps(entity); + const signer = this.verifySigningKey(entity, timestamps); + this.verifyTLogs(entity); + this.verifySignature(entity, signer); + if (policy) { + this.verifyPolicy(policy, signer.identity || {}); + } + return signer; + } + // Checks that all of the timestamps in the entity are valid and returns them + verifyTimestamps(entity) { + let tlogCount = 0; + let tsaCount = 0; + const timestamps = entity.timestamps.map((timestamp) => { + switch (timestamp.$case) { + case 'timestamp-authority': + tsaCount++; + return (0, timestamp_1.verifyTSATimestamp)(timestamp.timestamp, entity.signature.signature, this.trustMaterial.timestampAuthorities); + case 'transparency-log': + tlogCount++; + return (0, timestamp_1.verifyTLogTimestamp)(timestamp.tlogEntry, this.trustMaterial.tlogs); + } + }); + // Check for duplicate timestamps + if (containsDupes(timestamps)) { + throw new error_1.VerificationError({ + code: 'TIMESTAMP_ERROR', + message: 'duplicate timestamp', + }); + } + if (tlogCount < this.options.tlogThreshold) { + throw new error_1.VerificationError({ + code: 'TIMESTAMP_ERROR', + message: `expected ${this.options.tlogThreshold} tlog timestamps, got ${tlogCount}`, + }); + } + if (tsaCount < this.options.tsaThreshold) { + throw new error_1.VerificationError({ + code: 'TIMESTAMP_ERROR', + message: `expected ${this.options.tsaThreshold} tsa timestamps, got ${tsaCount}`, + }); + } + return timestamps.map((t) => t.timestamp); + } + // Checks that the signing key is valid for all of the the supplied timestamps + // and returns the signer. + verifySigningKey({ key }, timestamps) { + switch (key.$case) { + case 'public-key': { + return (0, key_1.verifyPublicKey)(key.hint, timestamps, this.trustMaterial); + } + case 'certificate': { + const result = (0, key_1.verifyCertificate)(key.certificate, timestamps, this.trustMaterial); + /* istanbul ignore next - no fixture */ + if (containsDupes(result.scts)) { + throw new error_1.VerificationError({ + code: 'CERTIFICATE_ERROR', + message: 'duplicate SCT', + }); + } + if (result.scts.length < this.options.ctlogThreshold) { + throw new error_1.VerificationError({ + code: 'CERTIFICATE_ERROR', + message: `expected ${this.options.ctlogThreshold} SCTs, got ${result.scts.length}`, + }); + } + return result.signer; + } + } + } + // Checks that the tlog entries are valid for the supplied content + verifyTLogs({ signature: content, tlogEntries }) { + tlogEntries.forEach((entry) => (0, tlog_1.verifyTLogBody)(entry, content)); + } + // Checks that the signature is valid for the supplied content + verifySignature(entity, signer) { + if (!entity.signature.verifySignature(signer.key)) { + throw new error_1.VerificationError({ + code: 'SIGNATURE_ERROR', + message: 'signature verification failed', + }); + } + } + verifyPolicy(policy, identity) { + // Check the subject alternative name of the signer matches the policy + /* istanbul ignore else */ + if (policy.subjectAlternativeName) { + (0, policy_1.verifySubjectAlternativeName)(policy.subjectAlternativeName, identity.subjectAlternativeName); + } + // Check that the extensions of the signer match the policy + /* istanbul ignore else */ + if (policy.extensions) { + (0, policy_1.verifyExtensions)(policy.extensions, identity.extensions); + } + } +} +exports.Verifier = Verifier; +// Checks for duplicate items in the array. Objects are compared using +// deep equality. +function containsDupes(arr) { + for (let i = 0; i < arr.length; i++) { + for (let j = i + 1; j < arr.length; j++) { + if ((0, util_1.isDeepStrictEqual)(arr[i], arr[j])) { + return true; + } + } + } + return false; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/package.json new file mode 100644 index 0000000000000000000000000000000000000000..eaf12376c90254e7356daa373a3370318e7fb071 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@sigstore/verify/package.json @@ -0,0 +1,36 @@ +{ + "name": "@sigstore/verify", + "version": "3.0.0", + "description": "Verification of Sigstore signatures", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "clean": "shx rm -rf dist *.tsbuildinfo", + "build": "tsc --build", + "test": "jest" + }, + "files": [ + "dist" + ], + "author": "bdehamer@github.com", + "license": "Apache-2.0", + "repository": { + "type": "git", + "url": "git+https://github.com/sigstore/sigstore-js.git" + }, + "bugs": { + "url": "https://github.com/sigstore/sigstore-js/issues" + }, + "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/verify#readme", + "publishConfig": { + "provenance": true + }, + "dependencies": { + "@sigstore/protobuf-specs": "^0.5.0", + "@sigstore/bundle": "^4.0.0", + "@sigstore/core": "^3.0.0" + }, + "engines": { + "node": "^20.17.0 || >=22.9.0" + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/canonical-json/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/canonical-json/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..420700f5d376596d46e8e2548ce51b1199384d9a --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/canonical-json/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 GitHub and the TUF Contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/canonical-json/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/canonical-json/lib/index.js new file mode 100644 index 0000000000000000000000000000000000000000..d480696de1f6c38d4bd81439a069d759652d3594 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/canonical-json/lib/index.js @@ -0,0 +1,64 @@ +const COMMA = ','; +const COLON = ':'; +const LEFT_SQUARE_BRACKET = '['; +const RIGHT_SQUARE_BRACKET = ']'; +const LEFT_CURLY_BRACKET = '{'; +const RIGHT_CURLY_BRACKET = '}'; + +// Recursively encodes the supplied object according to the canonical JSON form +// as specified at http://wiki.laptop.org/go/Canonical_JSON. It's a restricted +// dialect of JSON in which keys are lexically sorted, floats are not allowed, +// and only double quotes and backslashes are escaped. +function canonicalize(object) { + const buffer = []; + if (typeof object === 'string') { + buffer.push(canonicalizeString(object)); + } else if (typeof object === 'boolean') { + buffer.push(JSON.stringify(object)); + } else if (Number.isInteger(object)) { + buffer.push(JSON.stringify(object)); + } else if (object === null) { + buffer.push(JSON.stringify(object)); + } else if (Array.isArray(object)) { + buffer.push(LEFT_SQUARE_BRACKET); + let first = true; + object.forEach((element) => { + if (!first) { + buffer.push(COMMA); + } + first = false; + buffer.push(canonicalize(element)); + }); + buffer.push(RIGHT_SQUARE_BRACKET); + } else if (typeof object === 'object') { + buffer.push(LEFT_CURLY_BRACKET); + let first = true; + Object.keys(object) + .sort() + .forEach((property) => { + if (!first) { + buffer.push(COMMA); + } + first = false; + buffer.push(canonicalizeString(property)); + buffer.push(COLON); + buffer.push(canonicalize(object[property])); + }); + buffer.push(RIGHT_CURLY_BRACKET); + } else { + throw new TypeError('cannot encode ' + object.toString()); + } + + return buffer.join(''); +} + +// String canonicalization consists of escaping backslash (\) and double +// quote (") characters and wrapping the resulting string in double quotes. +function canonicalizeString(string) { + const escapedString = string.replace(/\\/g, '\\\\').replace(/"/g, '\\"'); + return '"' + escapedString + '"'; +} + +module.exports = { + canonicalize, +}; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/canonical-json/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/canonical-json/package.json new file mode 100644 index 0000000000000000000000000000000000000000..886c0c3969225adc8eee4d118be4a7bdff8dbf70 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/canonical-json/package.json @@ -0,0 +1,35 @@ +{ + "name": "@tufjs/canonical-json", + "version": "2.0.0", + "description": "OLPC JSON canonicalization", + "main": "lib/index.js", + "typings": "lib/index.d.ts", + "license": "MIT", + "keywords": [ + "json", + "canonical", + "canonicalize", + "canonicalization", + "crypto", + "signature", + "olpc" + ], + "author": "bdehamer@github.com", + "repository": { + "type": "git", + "url": "git+https://github.com/theupdateframework/tuf-js.git" + }, + "homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/canonical-json#readme", + "bugs": { + "url": "https://github.com/theupdateframework/tuf-js/issues" + }, + "files": [ + "lib/" + ], + "scripts": { + "test": "jest" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..420700f5d376596d46e8e2548ce51b1199384d9a --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 GitHub and the TUF Contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/base.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/base.js new file mode 100644 index 0000000000000000000000000000000000000000..14f0024f8091a1dec75be696700476996da140fa --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/base.js @@ -0,0 +1,96 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Signed = exports.MetadataKind = void 0; +exports.isMetadataKind = isMetadataKind; +const util_1 = __importDefault(require("util")); +const error_1 = require("./error"); +const utils_1 = require("./utils"); +const SPECIFICATION_VERSION = ['1', '0', '31']; +var MetadataKind; +(function (MetadataKind) { + MetadataKind["Root"] = "root"; + MetadataKind["Timestamp"] = "timestamp"; + MetadataKind["Snapshot"] = "snapshot"; + MetadataKind["Targets"] = "targets"; +})(MetadataKind || (exports.MetadataKind = MetadataKind = {})); +function isMetadataKind(value) { + return (typeof value === 'string' && + Object.values(MetadataKind).includes(value)); +} +/*** + * A base class for the signed part of TUF metadata. + * + * Objects with base class Signed are usually included in a ``Metadata`` object + * on the signed attribute. This class provides attributes and methods that + * are common for all TUF metadata types (roles). + */ +class Signed { + specVersion; + expires; + version; + unrecognizedFields; + constructor(options) { + this.specVersion = options.specVersion || SPECIFICATION_VERSION.join('.'); + const specList = this.specVersion.split('.'); + if (!(specList.length === 2 || specList.length === 3) || + !specList.every((item) => isNumeric(item))) { + throw new error_1.ValueError('Failed to parse specVersion'); + } + // major version must match + if (specList[0] != SPECIFICATION_VERSION[0]) { + throw new error_1.ValueError('Unsupported specVersion'); + } + this.expires = options.expires; + this.version = options.version; + this.unrecognizedFields = options.unrecognizedFields || {}; + } + equals(other) { + if (!(other instanceof Signed)) { + return false; + } + return (this.specVersion === other.specVersion && + this.expires === other.expires && + this.version === other.version && + util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields)); + } + isExpired(referenceTime) { + if (!referenceTime) { + referenceTime = new Date(); + } + return referenceTime >= new Date(this.expires); + } + static commonFieldsFromJSON(data) { + const { spec_version, expires, version, ...rest } = data; + if (!utils_1.guard.isDefined(spec_version)) { + throw new error_1.ValueError('spec_version is not defined'); + } + else if (typeof spec_version !== 'string') { + throw new TypeError('spec_version must be a string'); + } + if (!utils_1.guard.isDefined(expires)) { + throw new error_1.ValueError('expires is not defined'); + } + else if (!(typeof expires === 'string')) { + throw new TypeError('expires must be a string'); + } + if (!utils_1.guard.isDefined(version)) { + throw new error_1.ValueError('version is not defined'); + } + else if (!(typeof version === 'number')) { + throw new TypeError('version must be a number'); + } + return { + specVersion: spec_version, + expires, + version, + unrecognizedFields: rest, + }; + } +} +exports.Signed = Signed; +function isNumeric(str) { + return !isNaN(Number(str)); +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/delegations.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/delegations.js new file mode 100644 index 0000000000000000000000000000000000000000..9ad8bf05f1c6b7359a3f3bd3a40b4293c435d2cf --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/delegations.js @@ -0,0 +1,119 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Delegations = void 0; +const util_1 = __importDefault(require("util")); +const error_1 = require("./error"); +const key_1 = require("./key"); +const role_1 = require("./role"); +const utils_1 = require("./utils"); +/** + * A container object storing information about all delegations. + * + * Targets roles that are trusted to provide signed metadata files + * describing targets with designated pathnames and/or further delegations. + */ +class Delegations { + keys; + roles; + unrecognizedFields; + succinctRoles; + constructor(options) { + this.keys = options.keys; + this.unrecognizedFields = options.unrecognizedFields || {}; + if (options.roles) { + if (Object.keys(options.roles).some((roleName) => role_1.TOP_LEVEL_ROLE_NAMES.includes(roleName))) { + throw new error_1.ValueError('Delegated role name conflicts with top-level role name'); + } + } + this.succinctRoles = options.succinctRoles; + this.roles = options.roles; + } + equals(other) { + if (!(other instanceof Delegations)) { + return false; + } + return (util_1.default.isDeepStrictEqual(this.keys, other.keys) && + util_1.default.isDeepStrictEqual(this.roles, other.roles) && + util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields) && + util_1.default.isDeepStrictEqual(this.succinctRoles, other.succinctRoles)); + } + *rolesForTarget(targetPath) { + if (this.roles) { + for (const role of Object.values(this.roles)) { + if (role.isDelegatedPath(targetPath)) { + yield { role: role.name, terminating: role.terminating }; + } + } + } + else if (this.succinctRoles) { + yield { + role: this.succinctRoles.getRoleForTarget(targetPath), + terminating: true, + }; + } + } + toJSON() { + const json = { + keys: keysToJSON(this.keys), + ...this.unrecognizedFields, + }; + if (this.roles) { + json.roles = rolesToJSON(this.roles); + } + else if (this.succinctRoles) { + json.succinct_roles = this.succinctRoles.toJSON(); + } + return json; + } + static fromJSON(data) { + const { keys, roles, succinct_roles, ...unrecognizedFields } = data; + let succinctRoles; + if (utils_1.guard.isObject(succinct_roles)) { + succinctRoles = role_1.SuccinctRoles.fromJSON(succinct_roles); + } + return new Delegations({ + keys: keysFromJSON(keys), + roles: rolesFromJSON(roles), + unrecognizedFields, + succinctRoles, + }); + } +} +exports.Delegations = Delegations; +function keysToJSON(keys) { + return Object.entries(keys).reduce((acc, [keyId, key]) => ({ + ...acc, + [keyId]: key.toJSON(), + }), {}); +} +function rolesToJSON(roles) { + return Object.values(roles).map((role) => role.toJSON()); +} +function keysFromJSON(data) { + if (!utils_1.guard.isObjectRecord(data)) { + throw new TypeError('keys is malformed'); + } + return Object.entries(data).reduce((acc, [keyID, keyData]) => ({ + ...acc, + [keyID]: key_1.Key.fromJSON(keyID, keyData), + }), {}); +} +function rolesFromJSON(data) { + let roleMap; + if (utils_1.guard.isDefined(data)) { + if (!utils_1.guard.isObjectArray(data)) { + throw new TypeError('roles is malformed'); + } + roleMap = data.reduce((acc, role) => { + const delegatedRole = role_1.DelegatedRole.fromJSON(role); + return { + ...acc, + [delegatedRole.name]: delegatedRole, + }; + }, {}); + } + return roleMap; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/error.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/error.js new file mode 100644 index 0000000000000000000000000000000000000000..ba80698747ba06a21c349ae95c3d3738b0379b58 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/error.js @@ -0,0 +1,27 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.UnsupportedAlgorithmError = exports.CryptoError = exports.LengthOrHashMismatchError = exports.UnsignedMetadataError = exports.RepositoryError = exports.ValueError = void 0; +// An error about insufficient values +class ValueError extends Error { +} +exports.ValueError = ValueError; +// An error with a repository's state, such as a missing file. +// It covers all exceptions that come from the repository side when +// looking from the perspective of users of metadata API or ngclient. +class RepositoryError extends Error { +} +exports.RepositoryError = RepositoryError; +// An error about metadata object with insufficient threshold of signatures. +class UnsignedMetadataError extends RepositoryError { +} +exports.UnsignedMetadataError = UnsignedMetadataError; +// An error while checking the length and hash values of an object. +class LengthOrHashMismatchError extends RepositoryError { +} +exports.LengthOrHashMismatchError = LengthOrHashMismatchError; +class CryptoError extends Error { +} +exports.CryptoError = CryptoError; +class UnsupportedAlgorithmError extends CryptoError { +} +exports.UnsupportedAlgorithmError = UnsupportedAlgorithmError; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/file.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/file.js new file mode 100644 index 0000000000000000000000000000000000000000..c8cdcb1c40271cb850de30d9a78ca26fe34a4be5 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/file.js @@ -0,0 +1,191 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TargetFile = exports.MetaFile = void 0; +const crypto_1 = __importDefault(require("crypto")); +const util_1 = __importDefault(require("util")); +const error_1 = require("./error"); +const utils_1 = require("./utils"); +// A container with information about a particular metadata file. +// +// This class is used for Timestamp and Snapshot metadata. +class MetaFile { + version; + length; + hashes; + unrecognizedFields; + constructor(opts) { + if (opts.version <= 0) { + throw new error_1.ValueError('Metafile version must be at least 1'); + } + if (opts.length !== undefined) { + validateLength(opts.length); + } + this.version = opts.version; + this.length = opts.length; + this.hashes = opts.hashes; + this.unrecognizedFields = opts.unrecognizedFields || {}; + } + equals(other) { + if (!(other instanceof MetaFile)) { + return false; + } + return (this.version === other.version && + this.length === other.length && + util_1.default.isDeepStrictEqual(this.hashes, other.hashes) && + util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields)); + } + verify(data) { + // Verifies that the given data matches the expected length. + if (this.length !== undefined) { + if (data.length !== this.length) { + throw new error_1.LengthOrHashMismatchError(`Expected length ${this.length} but got ${data.length}`); + } + } + // Verifies that the given data matches the supplied hashes. + if (this.hashes) { + Object.entries(this.hashes).forEach(([key, value]) => { + let hash; + try { + hash = crypto_1.default.createHash(key); + } + catch (e) { + throw new error_1.LengthOrHashMismatchError(`Hash algorithm ${key} not supported`); + } + const observedHash = hash.update(data).digest('hex'); + if (observedHash !== value) { + throw new error_1.LengthOrHashMismatchError(`Expected hash ${value} but got ${observedHash}`); + } + }); + } + } + toJSON() { + const json = { + version: this.version, + ...this.unrecognizedFields, + }; + if (this.length !== undefined) { + json.length = this.length; + } + if (this.hashes) { + json.hashes = this.hashes; + } + return json; + } + static fromJSON(data) { + const { version, length, hashes, ...rest } = data; + if (typeof version !== 'number') { + throw new TypeError('version must be a number'); + } + if (utils_1.guard.isDefined(length) && typeof length !== 'number') { + throw new TypeError('length must be a number'); + } + if (utils_1.guard.isDefined(hashes) && !utils_1.guard.isStringRecord(hashes)) { + throw new TypeError('hashes must be string keys and values'); + } + return new MetaFile({ + version, + length, + hashes, + unrecognizedFields: rest, + }); + } +} +exports.MetaFile = MetaFile; +// Container for info about a particular target file. +// +// This class is used for Target metadata. +class TargetFile { + length; + path; + hashes; + unrecognizedFields; + constructor(opts) { + validateLength(opts.length); + this.length = opts.length; + this.path = opts.path; + this.hashes = opts.hashes; + this.unrecognizedFields = opts.unrecognizedFields || {}; + } + get custom() { + const custom = this.unrecognizedFields['custom']; + if (!custom || Array.isArray(custom) || !(typeof custom === 'object')) { + return {}; + } + return custom; + } + equals(other) { + if (!(other instanceof TargetFile)) { + return false; + } + return (this.length === other.length && + this.path === other.path && + util_1.default.isDeepStrictEqual(this.hashes, other.hashes) && + util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields)); + } + async verify(stream) { + let observedLength = 0; + // Create a digest for each hash algorithm + const digests = Object.keys(this.hashes).reduce((acc, key) => { + try { + acc[key] = crypto_1.default.createHash(key); + } + catch (e) { + throw new error_1.LengthOrHashMismatchError(`Hash algorithm ${key} not supported`); + } + return acc; + }, {}); + // Read stream chunk by chunk + for await (const chunk of stream) { + // Keep running tally of stream length + observedLength += chunk.length; + // Append chunk to each digest + Object.values(digests).forEach((digest) => { + digest.update(chunk); + }); + } + // Verify length matches expected value + if (observedLength !== this.length) { + throw new error_1.LengthOrHashMismatchError(`Expected length ${this.length} but got ${observedLength}`); + } + // Verify each digest matches expected value + Object.entries(digests).forEach(([key, value]) => { + const expected = this.hashes[key]; + const actual = value.digest('hex'); + if (actual !== expected) { + throw new error_1.LengthOrHashMismatchError(`Expected hash ${expected} but got ${actual}`); + } + }); + } + toJSON() { + return { + length: this.length, + hashes: this.hashes, + ...this.unrecognizedFields, + }; + } + static fromJSON(path, data) { + const { length, hashes, ...rest } = data; + if (typeof length !== 'number') { + throw new TypeError('length must be a number'); + } + if (!utils_1.guard.isStringRecord(hashes)) { + throw new TypeError('hashes must have string keys and values'); + } + return new TargetFile({ + length, + path, + hashes, + unrecognizedFields: rest, + }); + } +} +exports.TargetFile = TargetFile; +// Check that supplied length if valid +function validateLength(length) { + if (length < 0) { + throw new error_1.ValueError('Length must be at least 0'); + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/index.js new file mode 100644 index 0000000000000000000000000000000000000000..a4dc783659f04557286110454f4a56ab61fcef1d --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/index.js @@ -0,0 +1,24 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Timestamp = exports.Targets = exports.Snapshot = exports.Signature = exports.Root = exports.Metadata = exports.Key = exports.TargetFile = exports.MetaFile = exports.ValueError = exports.MetadataKind = void 0; +var base_1 = require("./base"); +Object.defineProperty(exports, "MetadataKind", { enumerable: true, get: function () { return base_1.MetadataKind; } }); +var error_1 = require("./error"); +Object.defineProperty(exports, "ValueError", { enumerable: true, get: function () { return error_1.ValueError; } }); +var file_1 = require("./file"); +Object.defineProperty(exports, "MetaFile", { enumerable: true, get: function () { return file_1.MetaFile; } }); +Object.defineProperty(exports, "TargetFile", { enumerable: true, get: function () { return file_1.TargetFile; } }); +var key_1 = require("./key"); +Object.defineProperty(exports, "Key", { enumerable: true, get: function () { return key_1.Key; } }); +var metadata_1 = require("./metadata"); +Object.defineProperty(exports, "Metadata", { enumerable: true, get: function () { return metadata_1.Metadata; } }); +var root_1 = require("./root"); +Object.defineProperty(exports, "Root", { enumerable: true, get: function () { return root_1.Root; } }); +var signature_1 = require("./signature"); +Object.defineProperty(exports, "Signature", { enumerable: true, get: function () { return signature_1.Signature; } }); +var snapshot_1 = require("./snapshot"); +Object.defineProperty(exports, "Snapshot", { enumerable: true, get: function () { return snapshot_1.Snapshot; } }); +var targets_1 = require("./targets"); +Object.defineProperty(exports, "Targets", { enumerable: true, get: function () { return targets_1.Targets; } }); +var timestamp_1 = require("./timestamp"); +Object.defineProperty(exports, "Timestamp", { enumerable: true, get: function () { return timestamp_1.Timestamp; } }); diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/key.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/key.js new file mode 100644 index 0000000000000000000000000000000000000000..10bf2f4b66fc014ecde890a7f548014c504df37e --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/key.js @@ -0,0 +1,90 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Key = void 0; +const util_1 = __importDefault(require("util")); +const error_1 = require("./error"); +const utils_1 = require("./utils"); +const key_1 = require("./utils/key"); +// A container class representing the public portion of a Key. +class Key { + keyID; + keyType; + scheme; + keyVal; + unrecognizedFields; + constructor(options) { + const { keyID, keyType, scheme, keyVal, unrecognizedFields } = options; + this.keyID = keyID; + this.keyType = keyType; + this.scheme = scheme; + this.keyVal = keyVal; + this.unrecognizedFields = unrecognizedFields || {}; + } + // Verifies the that the metadata.signatures contains a signature made with + // this key and is correctly signed. + verifySignature(metadata) { + const signature = metadata.signatures[this.keyID]; + if (!signature) + throw new error_1.UnsignedMetadataError('no signature for key found in metadata'); + if (!this.keyVal.public) + throw new error_1.UnsignedMetadataError('no public key found'); + const publicKey = (0, key_1.getPublicKey)({ + keyType: this.keyType, + scheme: this.scheme, + keyVal: this.keyVal.public, + }); + const signedData = metadata.signed.toJSON(); + try { + if (!utils_1.crypto.verifySignature(signedData, publicKey, signature.sig)) { + throw new error_1.UnsignedMetadataError(`failed to verify ${this.keyID} signature`); + } + } + catch (error) { + if (error instanceof error_1.UnsignedMetadataError) { + throw error; + } + throw new error_1.UnsignedMetadataError(`failed to verify ${this.keyID} signature`); + } + } + equals(other) { + if (!(other instanceof Key)) { + return false; + } + return (this.keyID === other.keyID && + this.keyType === other.keyType && + this.scheme === other.scheme && + util_1.default.isDeepStrictEqual(this.keyVal, other.keyVal) && + util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields)); + } + toJSON() { + return { + keytype: this.keyType, + scheme: this.scheme, + keyval: this.keyVal, + ...this.unrecognizedFields, + }; + } + static fromJSON(keyID, data) { + const { keytype, scheme, keyval, ...rest } = data; + if (typeof keytype !== 'string') { + throw new TypeError('keytype must be a string'); + } + if (typeof scheme !== 'string') { + throw new TypeError('scheme must be a string'); + } + if (!utils_1.guard.isStringRecord(keyval)) { + throw new TypeError('keyval must be a string record'); + } + return new Key({ + keyID, + keyType: keytype, + scheme, + keyVal: keyval, + unrecognizedFields: rest, + }); + } +} +exports.Key = Key; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/metadata.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/metadata.js new file mode 100644 index 0000000000000000000000000000000000000000..1ae4b6829c0c74ba0e26ba9f139c530a6e39bcf7 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/metadata.js @@ -0,0 +1,165 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Metadata = void 0; +const canonical_json_1 = require("@tufjs/canonical-json"); +const util_1 = __importDefault(require("util")); +const base_1 = require("./base"); +const error_1 = require("./error"); +const root_1 = require("./root"); +const signature_1 = require("./signature"); +const snapshot_1 = require("./snapshot"); +const targets_1 = require("./targets"); +const timestamp_1 = require("./timestamp"); +const utils_1 = require("./utils"); +/*** + * A container for signed TUF metadata. + * + * Provides methods to convert to and from json, read and write to and + * from JSON and to create and verify metadata signatures. + * + * ``Metadata[T]`` is a generic container type where T can be any one type of + * [``Root``, ``Timestamp``, ``Snapshot``, ``Targets``]. The purpose of this + * is to allow static type checking of the signed attribute in code using + * Metadata:: + * + * root_md = Metadata[Root].fromJSON("root.json") + * # root_md type is now Metadata[Root]. This means signed and its + * # attributes like consistent_snapshot are now statically typed and the + * # types can be verified by static type checkers and shown by IDEs + * + * Using a type constraint is not required but not doing so means T is not a + * specific type so static typing cannot happen. Note that the type constraint + * ``[Root]`` is not validated at runtime (as pure annotations are not available + * then). + * + * Apart from ``expires`` all of the arguments to the inner constructors have + * reasonable default values for new metadata. + */ +class Metadata { + signed; + signatures; + unrecognizedFields; + constructor(signed, signatures, unrecognizedFields) { + this.signed = signed; + this.signatures = signatures || {}; + this.unrecognizedFields = unrecognizedFields || {}; + } + sign(signer, append = true) { + const bytes = Buffer.from((0, canonical_json_1.canonicalize)(this.signed.toJSON())); + const signature = signer(bytes); + if (!append) { + this.signatures = {}; + } + this.signatures[signature.keyID] = signature; + } + verifyDelegate(delegatedRole, delegatedMetadata) { + let role; + let keys = {}; + switch (this.signed.type) { + case base_1.MetadataKind.Root: + keys = this.signed.keys; + role = this.signed.roles[delegatedRole]; + break; + case base_1.MetadataKind.Targets: + if (!this.signed.delegations) { + throw new error_1.ValueError(`No delegations found for ${delegatedRole}`); + } + keys = this.signed.delegations.keys; + if (this.signed.delegations.roles) { + role = this.signed.delegations.roles[delegatedRole]; + } + else if (this.signed.delegations.succinctRoles) { + if (this.signed.delegations.succinctRoles.isDelegatedRole(delegatedRole)) { + role = this.signed.delegations.succinctRoles; + } + } + break; + default: + throw new TypeError('invalid metadata type'); + } + if (!role) { + throw new error_1.ValueError(`no delegation found for ${delegatedRole}`); + } + const signingKeys = new Set(); + role.keyIDs.forEach((keyID) => { + const key = keys[keyID]; + // If we dont' have the key, continue checking other keys + if (!key) { + return; + } + try { + key.verifySignature(delegatedMetadata); + signingKeys.add(key.keyID); + } + catch (error) { + // continue + } + }); + if (signingKeys.size < role.threshold) { + throw new error_1.UnsignedMetadataError(`${delegatedRole} was signed by ${signingKeys.size}/${role.threshold} keys`); + } + } + equals(other) { + if (!(other instanceof Metadata)) { + return false; + } + return ( + // eslint-disable-next-line @typescript-eslint/no-unsafe-argument + this.signed.equals(other.signed) && + util_1.default.isDeepStrictEqual(this.signatures, other.signatures) && + util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields)); + } + toJSON() { + const signatures = Object.values(this.signatures).map((signature) => { + return signature.toJSON(); + }); + return { + signatures, + signed: this.signed.toJSON(), + ...this.unrecognizedFields, + }; + } + static fromJSON(type, data) { + const { signed, signatures, ...rest } = data; + if (!utils_1.guard.isDefined(signed) || !utils_1.guard.isObject(signed)) { + throw new TypeError('signed is not defined'); + } + if (type !== signed._type) { + throw new error_1.ValueError(`expected '${type}', got ${signed['_type']}`); + } + if (!utils_1.guard.isObjectArray(signatures)) { + throw new TypeError('signatures is not an array'); + } + let signedObj; + switch (type) { + case base_1.MetadataKind.Root: + signedObj = root_1.Root.fromJSON(signed); + break; + case base_1.MetadataKind.Timestamp: + signedObj = timestamp_1.Timestamp.fromJSON(signed); + break; + case base_1.MetadataKind.Snapshot: + signedObj = snapshot_1.Snapshot.fromJSON(signed); + break; + case base_1.MetadataKind.Targets: + signedObj = targets_1.Targets.fromJSON(signed); + break; + default: + throw new TypeError('invalid metadata type'); + } + const sigMap = {}; + // Ensure that each signature is unique + signatures.forEach((sigData) => { + const sig = signature_1.Signature.fromJSON(sigData); + if (sigMap[sig.keyID]) { + throw new error_1.ValueError(`multiple signatures found for keyid: ${sig.keyID}`); + } + sigMap[sig.keyID] = sig; + }); + return new Metadata(signedObj, sigMap, rest); + } +} +exports.Metadata = Metadata; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/role.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/role.js new file mode 100644 index 0000000000000000000000000000000000000000..6c049e17c8dab96df7e79208a7c466380efaf773 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/role.js @@ -0,0 +1,310 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SuccinctRoles = exports.DelegatedRole = exports.Role = exports.TOP_LEVEL_ROLE_NAMES = void 0; +const crypto_1 = __importDefault(require("crypto")); +const minimatch_1 = require("minimatch"); +const util_1 = __importDefault(require("util")); +const error_1 = require("./error"); +const utils_1 = require("./utils"); +exports.TOP_LEVEL_ROLE_NAMES = [ + 'root', + 'targets', + 'snapshot', + 'timestamp', +]; +/** + * Container that defines which keys are required to sign roles metadata. + * + * Role defines how many keys are required to successfully sign the roles + * metadata, and which keys are accepted. + */ +class Role { + keyIDs; + threshold; + unrecognizedFields; + constructor(options) { + const { keyIDs, threshold, unrecognizedFields } = options; + if (hasDuplicates(keyIDs)) { + throw new error_1.ValueError('duplicate key IDs found'); + } + if (threshold < 1) { + throw new error_1.ValueError('threshold must be at least 1'); + } + this.keyIDs = keyIDs; + this.threshold = threshold; + this.unrecognizedFields = unrecognizedFields || {}; + } + equals(other) { + if (!(other instanceof Role)) { + return false; + } + return (this.threshold === other.threshold && + util_1.default.isDeepStrictEqual(this.keyIDs, other.keyIDs) && + util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields)); + } + toJSON() { + return { + keyids: this.keyIDs, + threshold: this.threshold, + ...this.unrecognizedFields, + }; + } + static fromJSON(data) { + const { keyids, threshold, ...rest } = data; + if (!utils_1.guard.isStringArray(keyids)) { + throw new TypeError('keyids must be an array'); + } + if (typeof threshold !== 'number') { + throw new TypeError('threshold must be a number'); + } + return new Role({ + keyIDs: keyids, + threshold, + unrecognizedFields: rest, + }); + } +} +exports.Role = Role; +function hasDuplicates(array) { + return new Set(array).size !== array.length; +} +/** + * A container with information about a delegated role. + * + * A delegation can happen in two ways: + * - ``paths`` is set: delegates targets matching any path pattern in ``paths`` + * - ``pathHashPrefixes`` is set: delegates targets whose target path hash + * starts with any of the prefixes in ``pathHashPrefixes`` + * + * ``paths`` and ``pathHashPrefixes`` are mutually exclusive: both cannot be + * set, at least one of them must be set. + */ +class DelegatedRole extends Role { + name; + terminating; + paths; + pathHashPrefixes; + constructor(opts) { + super(opts); + const { name, terminating, paths, pathHashPrefixes } = opts; + this.name = name; + this.terminating = terminating; + if (opts.paths && opts.pathHashPrefixes) { + throw new error_1.ValueError('paths and pathHashPrefixes are mutually exclusive'); + } + this.paths = paths; + this.pathHashPrefixes = pathHashPrefixes; + } + equals(other) { + if (!(other instanceof DelegatedRole)) { + return false; + } + return (super.equals(other) && + this.name === other.name && + this.terminating === other.terminating && + util_1.default.isDeepStrictEqual(this.paths, other.paths) && + util_1.default.isDeepStrictEqual(this.pathHashPrefixes, other.pathHashPrefixes)); + } + isDelegatedPath(targetFilepath) { + if (this.paths) { + return this.paths.some((pathPattern) => isTargetInPathPattern(targetFilepath, pathPattern)); + } + if (this.pathHashPrefixes) { + const hasher = crypto_1.default.createHash('sha256'); + const pathHash = hasher.update(targetFilepath).digest('hex'); + return this.pathHashPrefixes.some((pathHashPrefix) => pathHash.startsWith(pathHashPrefix)); + } + return false; + } + toJSON() { + const json = { + ...super.toJSON(), + name: this.name, + terminating: this.terminating, + }; + if (this.paths) { + json.paths = this.paths; + } + if (this.pathHashPrefixes) { + json.path_hash_prefixes = this.pathHashPrefixes; + } + return json; + } + static fromJSON(data) { + const { keyids, threshold, name, terminating, paths, path_hash_prefixes, ...rest } = data; + if (!utils_1.guard.isStringArray(keyids)) { + throw new TypeError('keyids must be an array of strings'); + } + if (typeof threshold !== 'number') { + throw new TypeError('threshold must be a number'); + } + if (typeof name !== 'string') { + throw new TypeError('name must be a string'); + } + if (typeof terminating !== 'boolean') { + throw new TypeError('terminating must be a boolean'); + } + if (utils_1.guard.isDefined(paths) && !utils_1.guard.isStringArray(paths)) { + throw new TypeError('paths must be an array of strings'); + } + if (utils_1.guard.isDefined(path_hash_prefixes) && + !utils_1.guard.isStringArray(path_hash_prefixes)) { + throw new TypeError('path_hash_prefixes must be an array of strings'); + } + return new DelegatedRole({ + keyIDs: keyids, + threshold, + name, + terminating, + paths, + pathHashPrefixes: path_hash_prefixes, + unrecognizedFields: rest, + }); + } +} +exports.DelegatedRole = DelegatedRole; +// JS version of Ruby's Array#zip +const zip = (a, b) => a.map((k, i) => [k, b[i]]); +function isTargetInPathPattern(target, pattern) { + const targetParts = target.split('/'); + const patternParts = pattern.split('/'); + if (patternParts.length != targetParts.length) { + return false; + } + return zip(targetParts, patternParts).every(([targetPart, patternPart]) => (0, minimatch_1.minimatch)(targetPart, patternPart)); +} +/** + * Succinctly defines a hash bin delegation graph. + * + * A ``SuccinctRoles`` object describes a delegation graph that covers all + * targets, distributing them uniformly over the delegated roles (i.e. bins) + * in the graph. + * + * The total number of bins is 2 to the power of the passed ``bit_length``. + * + * Bin names are the concatenation of the passed ``name_prefix`` and a + * zero-padded hex representation of the bin index separated by a hyphen. + * + * The passed ``keyids`` and ``threshold`` is used for each bin, and each bin + * is 'terminating'. + * + * For details: https://github.com/theupdateframework/taps/blob/master/tap15.md + */ +class SuccinctRoles extends Role { + bitLength; + namePrefix; + numberOfBins; + suffixLen; + constructor(opts) { + super(opts); + const { bitLength, namePrefix } = opts; + if (bitLength <= 0 || bitLength > 32) { + throw new error_1.ValueError('bitLength must be between 1 and 32'); + } + this.bitLength = bitLength; + this.namePrefix = namePrefix; + // Calculate the suffix_len value based on the total number of bins in + // hex. If bit_length = 10 then number_of_bins = 1024 or bin names will + // have a suffix between "000" and "3ff" in hex and suffix_len will be 3 + // meaning the third bin will have a suffix of "003". + this.numberOfBins = Math.pow(2, bitLength); + // suffix_len is calculated based on "number_of_bins - 1" as the name + // of the last bin contains the number "number_of_bins -1" as a suffix. + this.suffixLen = (this.numberOfBins - 1).toString(16).length; + } + equals(other) { + if (!(other instanceof SuccinctRoles)) { + return false; + } + return (super.equals(other) && + this.bitLength === other.bitLength && + this.namePrefix === other.namePrefix); + } + /*** + * Calculates the name of the delegated role responsible for 'target_filepath'. + * + * The target at path ''target_filepath' is assigned to a bin by casting + * the left-most 'bit_length' of bits of the file path hash digest to + * int, using it as bin index between 0 and '2**bit_length - 1'. + * + * Args: + * target_filepath: URL path to a target file, relative to a base + * targets URL. + */ + getRoleForTarget(targetFilepath) { + const hasher = crypto_1.default.createHash('sha256'); + const hasherBuffer = hasher.update(targetFilepath).digest(); + // can't ever need more than 4 bytes (32 bits). + const hashBytes = hasherBuffer.subarray(0, 4); + // Right shift hash bytes, so that we only have the leftmost + // bit_length bits that we care about. + const shiftValue = 32 - this.bitLength; + const binNumber = hashBytes.readUInt32BE() >>> shiftValue; + // Add zero padding if necessary and cast to hex the suffix. + const suffix = binNumber.toString(16).padStart(this.suffixLen, '0'); + return `${this.namePrefix}-${suffix}`; + } + *getRoles() { + for (let i = 0; i < this.numberOfBins; i++) { + const suffix = i.toString(16).padStart(this.suffixLen, '0'); + yield `${this.namePrefix}-${suffix}`; + } + } + /*** + * Determines whether the given ``role_name`` is in one of + * the delegated roles that ``SuccinctRoles`` represents. + * + * Args: + * role_name: The name of the role to check against. + */ + isDelegatedRole(roleName) { + const desiredPrefix = this.namePrefix + '-'; + if (!roleName.startsWith(desiredPrefix)) { + return false; + } + const suffix = roleName.slice(desiredPrefix.length, roleName.length); + if (suffix.length != this.suffixLen) { + return false; + } + // make sure the suffix is a hex string + if (!suffix.match(/^[0-9a-fA-F]+$/)) { + return false; + } + const num = parseInt(suffix, 16); + return 0 <= num && num < this.numberOfBins; + } + toJSON() { + const json = { + ...super.toJSON(), + bit_length: this.bitLength, + name_prefix: this.namePrefix, + }; + return json; + } + static fromJSON(data) { + const { keyids, threshold, bit_length, name_prefix, ...rest } = data; + if (!utils_1.guard.isStringArray(keyids)) { + throw new TypeError('keyids must be an array of strings'); + } + if (typeof threshold !== 'number') { + throw new TypeError('threshold must be a number'); + } + if (typeof bit_length !== 'number') { + throw new TypeError('bit_length must be a number'); + } + if (typeof name_prefix !== 'string') { + throw new TypeError('name_prefix must be a string'); + } + return new SuccinctRoles({ + keyIDs: keyids, + threshold, + bitLength: bit_length, + namePrefix: name_prefix, + unrecognizedFields: rest, + }); + } +} +exports.SuccinctRoles = SuccinctRoles; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/root.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/root.js new file mode 100644 index 0000000000000000000000000000000000000000..76d4e4039980e70ba2a002087f3d1e1ea9f18276 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/root.js @@ -0,0 +1,119 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Root = void 0; +const util_1 = __importDefault(require("util")); +const base_1 = require("./base"); +const error_1 = require("./error"); +const key_1 = require("./key"); +const role_1 = require("./role"); +const utils_1 = require("./utils"); +/** + * A container for the signed part of root metadata. + * + * The top-level role and metadata file signed by the root keys. + * This role specifies trusted keys for all other top-level roles, which may further delegate trust. + */ +class Root extends base_1.Signed { + type = base_1.MetadataKind.Root; + keys; + roles; + consistentSnapshot; + constructor(options) { + super(options); + this.keys = options.keys || {}; + this.consistentSnapshot = options.consistentSnapshot ?? true; + if (!options.roles) { + this.roles = role_1.TOP_LEVEL_ROLE_NAMES.reduce((acc, role) => ({ + ...acc, + [role]: new role_1.Role({ keyIDs: [], threshold: 1 }), + }), {}); + } + else { + const roleNames = new Set(Object.keys(options.roles)); + if (!role_1.TOP_LEVEL_ROLE_NAMES.every((role) => roleNames.has(role))) { + throw new error_1.ValueError('missing top-level role'); + } + this.roles = options.roles; + } + } + addKey(key, role) { + if (!this.roles[role]) { + throw new error_1.ValueError(`role ${role} does not exist`); + } + if (!this.roles[role].keyIDs.includes(key.keyID)) { + this.roles[role].keyIDs.push(key.keyID); + } + this.keys[key.keyID] = key; + } + equals(other) { + if (!(other instanceof Root)) { + return false; + } + return (super.equals(other) && + this.consistentSnapshot === other.consistentSnapshot && + util_1.default.isDeepStrictEqual(this.keys, other.keys) && + util_1.default.isDeepStrictEqual(this.roles, other.roles)); + } + toJSON() { + return { + _type: this.type, + spec_version: this.specVersion, + version: this.version, + expires: this.expires, + keys: keysToJSON(this.keys), + roles: rolesToJSON(this.roles), + consistent_snapshot: this.consistentSnapshot, + ...this.unrecognizedFields, + }; + } + static fromJSON(data) { + const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data); + const { keys, roles, consistent_snapshot, ...rest } = unrecognizedFields; + if (typeof consistent_snapshot !== 'boolean') { + throw new TypeError('consistent_snapshot must be a boolean'); + } + return new Root({ + ...commonFields, + keys: keysFromJSON(keys), + roles: rolesFromJSON(roles), + consistentSnapshot: consistent_snapshot, + unrecognizedFields: rest, + }); + } +} +exports.Root = Root; +function keysToJSON(keys) { + return Object.entries(keys).reduce((acc, [keyID, key]) => ({ ...acc, [keyID]: key.toJSON() }), {}); +} +function rolesToJSON(roles) { + return Object.entries(roles).reduce((acc, [roleName, role]) => ({ ...acc, [roleName]: role.toJSON() }), {}); +} +function keysFromJSON(data) { + let keys; + if (utils_1.guard.isDefined(data)) { + if (!utils_1.guard.isObjectRecord(data)) { + throw new TypeError('keys must be an object'); + } + keys = Object.entries(data).reduce((acc, [keyID, keyData]) => ({ + ...acc, + [keyID]: key_1.Key.fromJSON(keyID, keyData), + }), {}); + } + return keys; +} +function rolesFromJSON(data) { + let roles; + if (utils_1.guard.isDefined(data)) { + if (!utils_1.guard.isObjectRecord(data)) { + throw new TypeError('roles must be an object'); + } + roles = Object.entries(data).reduce((acc, [roleName, roleData]) => ({ + ...acc, + [roleName]: role_1.Role.fromJSON(roleData), + }), {}); + } + return roles; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/signature.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/signature.js new file mode 100644 index 0000000000000000000000000000000000000000..43c0bfe58c483ff3982e80c861aaaa6f98f47947 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/signature.js @@ -0,0 +1,40 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Signature = void 0; +/** + * A container class containing information about a signature. + * + * Contains a signature and the keyid uniquely identifying the key used + * to generate the signature. + * + * Provide a `fromJSON` method to create a Signature from a JSON object. + */ +class Signature { + keyID; + sig; + constructor(options) { + const { keyID, sig } = options; + this.keyID = keyID; + this.sig = sig; + } + toJSON() { + return { + keyid: this.keyID, + sig: this.sig, + }; + } + static fromJSON(data) { + const { keyid, sig } = data; + if (typeof keyid !== 'string') { + throw new TypeError('keyid must be a string'); + } + if (typeof sig !== 'string') { + throw new TypeError('sig must be a string'); + } + return new Signature({ + keyID: keyid, + sig: sig, + }); + } +} +exports.Signature = Signature; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/snapshot.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/snapshot.js new file mode 100644 index 0000000000000000000000000000000000000000..bc9983c12e6691fe89176da92770fd0ea28a3da9 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/snapshot.js @@ -0,0 +1,72 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Snapshot = void 0; +const util_1 = __importDefault(require("util")); +const base_1 = require("./base"); +const file_1 = require("./file"); +const utils_1 = require("./utils"); +/** + * A container for the signed part of snapshot metadata. + * + * Snapshot contains information about all target Metadata files. + * A top-level role that specifies the latest versions of all targets metadata files, + * and hence the latest versions of all targets (including any dependencies between them) on the repository. + */ +class Snapshot extends base_1.Signed { + type = base_1.MetadataKind.Snapshot; + meta; + constructor(opts) { + super(opts); + this.meta = opts.meta || { 'targets.json': new file_1.MetaFile({ version: 1 }) }; + } + equals(other) { + if (!(other instanceof Snapshot)) { + return false; + } + return super.equals(other) && util_1.default.isDeepStrictEqual(this.meta, other.meta); + } + toJSON() { + return { + _type: this.type, + meta: metaToJSON(this.meta), + spec_version: this.specVersion, + version: this.version, + expires: this.expires, + ...this.unrecognizedFields, + }; + } + static fromJSON(data) { + const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data); + const { meta, ...rest } = unrecognizedFields; + return new Snapshot({ + ...commonFields, + meta: metaFromJSON(meta), + unrecognizedFields: rest, + }); + } +} +exports.Snapshot = Snapshot; +function metaToJSON(meta) { + return Object.entries(meta).reduce((acc, [path, metadata]) => ({ + ...acc, + [path]: metadata.toJSON(), + }), {}); +} +function metaFromJSON(data) { + let meta; + if (utils_1.guard.isDefined(data)) { + if (!utils_1.guard.isObjectRecord(data)) { + throw new TypeError('meta field is malformed'); + } + else { + meta = Object.entries(data).reduce((acc, [path, metadata]) => ({ + ...acc, + [path]: file_1.MetaFile.fromJSON(metadata), + }), {}); + } + } + return meta; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/targets.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/targets.js new file mode 100644 index 0000000000000000000000000000000000000000..e509722f94758d6104ec43c66fb4f237d3de22bf --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/targets.js @@ -0,0 +1,94 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Targets = void 0; +const util_1 = __importDefault(require("util")); +const base_1 = require("./base"); +const delegations_1 = require("./delegations"); +const file_1 = require("./file"); +const utils_1 = require("./utils"); +// Container for the signed part of targets metadata. +// +// Targets contains verifying information about target files and also delegates +// responsible to other Targets roles. +class Targets extends base_1.Signed { + type = base_1.MetadataKind.Targets; + targets; + delegations; + constructor(options) { + super(options); + this.targets = options.targets || {}; + this.delegations = options.delegations; + } + addTarget(target) { + this.targets[target.path] = target; + } + equals(other) { + if (!(other instanceof Targets)) { + return false; + } + return (super.equals(other) && + util_1.default.isDeepStrictEqual(this.targets, other.targets) && + util_1.default.isDeepStrictEqual(this.delegations, other.delegations)); + } + toJSON() { + const json = { + _type: this.type, + spec_version: this.specVersion, + version: this.version, + expires: this.expires, + targets: targetsToJSON(this.targets), + ...this.unrecognizedFields, + }; + if (this.delegations) { + json.delegations = this.delegations.toJSON(); + } + return json; + } + static fromJSON(data) { + const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data); + const { targets, delegations, ...rest } = unrecognizedFields; + return new Targets({ + ...commonFields, + targets: targetsFromJSON(targets), + delegations: delegationsFromJSON(delegations), + unrecognizedFields: rest, + }); + } +} +exports.Targets = Targets; +function targetsToJSON(targets) { + return Object.entries(targets).reduce((acc, [path, target]) => ({ + ...acc, + [path]: target.toJSON(), + }), {}); +} +function targetsFromJSON(data) { + let targets; + if (utils_1.guard.isDefined(data)) { + if (!utils_1.guard.isObjectRecord(data)) { + throw new TypeError('targets must be an object'); + } + else { + targets = Object.entries(data).reduce((acc, [path, target]) => ({ + ...acc, + [path]: file_1.TargetFile.fromJSON(path, target), + }), {}); + } + } + return targets; +} +function delegationsFromJSON(data) { + let delegations; + if (utils_1.guard.isDefined(data)) { + if (!utils_1.guard.isObject(data)) { + throw new TypeError('delegations must be an object'); + } + else { + delegations = delegations_1.Delegations.fromJSON(data); + } + } + return delegations; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/timestamp.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/timestamp.js new file mode 100644 index 0000000000000000000000000000000000000000..d454b308f27e15f7b9d2be74e903f21c10a76ef9 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/timestamp.js @@ -0,0 +1,59 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Timestamp = void 0; +const base_1 = require("./base"); +const file_1 = require("./file"); +const utils_1 = require("./utils"); +/** + * A container for the signed part of timestamp metadata. + * + * A top-level that specifies the latest version of the snapshot role metadata file, + * and hence the latest versions of all metadata and targets on the repository. + */ +class Timestamp extends base_1.Signed { + type = base_1.MetadataKind.Timestamp; + snapshotMeta; + constructor(options) { + super(options); + this.snapshotMeta = options.snapshotMeta || new file_1.MetaFile({ version: 1 }); + } + equals(other) { + if (!(other instanceof Timestamp)) { + return false; + } + return super.equals(other) && this.snapshotMeta.equals(other.snapshotMeta); + } + toJSON() { + return { + _type: this.type, + spec_version: this.specVersion, + version: this.version, + expires: this.expires, + meta: { 'snapshot.json': this.snapshotMeta.toJSON() }, + ...this.unrecognizedFields, + }; + } + static fromJSON(data) { + const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data); + const { meta, ...rest } = unrecognizedFields; + return new Timestamp({ + ...commonFields, + snapshotMeta: snapshotMetaFromJSON(meta), + unrecognizedFields: rest, + }); + } +} +exports.Timestamp = Timestamp; +function snapshotMetaFromJSON(data) { + let snapshotMeta; + if (utils_1.guard.isDefined(data)) { + const snapshotData = data['snapshot.json']; + if (!utils_1.guard.isDefined(snapshotData) || !utils_1.guard.isObject(snapshotData)) { + throw new TypeError('missing snapshot.json in meta'); + } + else { + snapshotMeta = file_1.MetaFile.fromJSON(snapshotData); + } + } + return snapshotMeta; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/utils/guard.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/utils/guard.js new file mode 100644 index 0000000000000000000000000000000000000000..911e8475986bbc20a8067f6d45250c49fac6d5fe --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/utils/guard.js @@ -0,0 +1,32 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isDefined = isDefined; +exports.isObject = isObject; +exports.isStringArray = isStringArray; +exports.isObjectArray = isObjectArray; +exports.isStringRecord = isStringRecord; +exports.isObjectRecord = isObjectRecord; +function isDefined(val) { + return val !== undefined; +} +function isObject(value) { + return typeof value === 'object' && value !== null; +} +function isStringArray(value) { + return Array.isArray(value) && value.every((v) => typeof v === 'string'); +} +function isObjectArray(value) { + return Array.isArray(value) && value.every(isObject); +} +function isStringRecord(value) { + return (typeof value === 'object' && + value !== null && + Object.keys(value).every((k) => typeof k === 'string') && + Object.values(value).every((v) => typeof v === 'string')); +} +function isObjectRecord(value) { + return (typeof value === 'object' && + value !== null && + Object.keys(value).every((k) => typeof k === 'string') && + Object.values(value).every((v) => typeof v === 'object' && v !== null)); +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/utils/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/utils/index.js new file mode 100644 index 0000000000000000000000000000000000000000..395cccc36cf927b7bd0cca9800cb7e7b6ce1ee15 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/utils/index.js @@ -0,0 +1,38 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +Object.defineProperty(exports, "__esModule", { value: true }); +exports.crypto = exports.guard = void 0; +exports.guard = __importStar(require("./guard")); +exports.crypto = __importStar(require("./verify")); diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/utils/key.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/utils/key.js new file mode 100644 index 0000000000000000000000000000000000000000..3c3ec07f1425a7164b6d916faae276761d4df99f --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/utils/key.js @@ -0,0 +1,142 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getPublicKey = getPublicKey; +const crypto_1 = __importDefault(require("crypto")); +const error_1 = require("../error"); +const oid_1 = require("./oid"); +const ASN1_TAG_SEQUENCE = 0x30; +const ANS1_TAG_BIT_STRING = 0x03; +const NULL_BYTE = 0x00; +const OID_EDDSA = '1.3.101.112'; +const OID_EC_PUBLIC_KEY = '1.2.840.10045.2.1'; +const OID_EC_CURVE_P256V1 = '1.2.840.10045.3.1.7'; +const PEM_HEADER = '-----BEGIN PUBLIC KEY-----'; +function getPublicKey(keyInfo) { + switch (keyInfo.keyType) { + case 'rsa': + return getRSAPublicKey(keyInfo); + case 'ed25519': + return getED25519PublicKey(keyInfo); + case 'ecdsa': + case 'ecdsa-sha2-nistp256': + case 'ecdsa-sha2-nistp384': + return getECDCSAPublicKey(keyInfo); + default: + throw new error_1.UnsupportedAlgorithmError(`Unsupported key type: ${keyInfo.keyType}`); + } +} +function getRSAPublicKey(keyInfo) { + // Only support PEM-encoded RSA keys + if (!keyInfo.keyVal.startsWith(PEM_HEADER)) { + throw new error_1.CryptoError('Invalid key format'); + } + const key = crypto_1.default.createPublicKey(keyInfo.keyVal); + switch (keyInfo.scheme) { + case 'rsassa-pss-sha256': + return { + key: key, + padding: crypto_1.default.constants.RSA_PKCS1_PSS_PADDING, + }; + default: + throw new error_1.UnsupportedAlgorithmError(`Unsupported RSA scheme: ${keyInfo.scheme}`); + } +} +function getED25519PublicKey(keyInfo) { + let key; + // If key is already PEM-encoded we can just parse it + if (keyInfo.keyVal.startsWith(PEM_HEADER)) { + key = crypto_1.default.createPublicKey(keyInfo.keyVal); + } + else { + // If key is not PEM-encoded it had better be hex + if (!isHex(keyInfo.keyVal)) { + throw new error_1.CryptoError('Invalid key format'); + } + key = crypto_1.default.createPublicKey({ + key: ed25519.hexToDER(keyInfo.keyVal), + format: 'der', + type: 'spki', + }); + } + return { key }; +} +function getECDCSAPublicKey(keyInfo) { + let key; + // If key is already PEM-encoded we can just parse it + if (keyInfo.keyVal.startsWith(PEM_HEADER)) { + key = crypto_1.default.createPublicKey(keyInfo.keyVal); + } + else { + // If key is not PEM-encoded it had better be hex + if (!isHex(keyInfo.keyVal)) { + throw new error_1.CryptoError('Invalid key format'); + } + key = crypto_1.default.createPublicKey({ + key: ecdsa.hexToDER(keyInfo.keyVal), + format: 'der', + type: 'spki', + }); + } + return { key }; +} +const ed25519 = { + // Translates a hex key into a crypto KeyObject + // https://keygen.sh/blog/how-to-use-hexadecimal-ed25519-keys-in-node/ + hexToDER: (hex) => { + const key = Buffer.from(hex, 'hex'); + const oid = (0, oid_1.encodeOIDString)(OID_EDDSA); + // Create a byte sequence containing the OID and key + const elements = Buffer.concat([ + Buffer.concat([ + Buffer.from([ASN1_TAG_SEQUENCE]), + Buffer.from([oid.length]), + oid, + ]), + Buffer.concat([ + Buffer.from([ANS1_TAG_BIT_STRING]), + Buffer.from([key.length + 1]), + Buffer.from([NULL_BYTE]), + key, + ]), + ]); + // Wrap up by creating a sequence of elements + const der = Buffer.concat([ + Buffer.from([ASN1_TAG_SEQUENCE]), + Buffer.from([elements.length]), + elements, + ]); + return der; + }, +}; +const ecdsa = { + hexToDER: (hex) => { + const key = Buffer.from(hex, 'hex'); + const bitString = Buffer.concat([ + Buffer.from([ANS1_TAG_BIT_STRING]), + Buffer.from([key.length + 1]), + Buffer.from([NULL_BYTE]), + key, + ]); + const oids = Buffer.concat([ + (0, oid_1.encodeOIDString)(OID_EC_PUBLIC_KEY), + (0, oid_1.encodeOIDString)(OID_EC_CURVE_P256V1), + ]); + const oidSequence = Buffer.concat([ + Buffer.from([ASN1_TAG_SEQUENCE]), + Buffer.from([oids.length]), + oids, + ]); + // Wrap up by creating a sequence of elements + const der = Buffer.concat([ + Buffer.from([ASN1_TAG_SEQUENCE]), + Buffer.from([oidSequence.length + bitString.length]), + oidSequence, + bitString, + ]); + return der; + }, +}; +const isHex = (key) => /^[0-9a-fA-F]+$/.test(key); diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/utils/oid.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/utils/oid.js new file mode 100644 index 0000000000000000000000000000000000000000..00b29c3030d1ec2684fc216c7de75744d4f9c88e --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/utils/oid.js @@ -0,0 +1,26 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.encodeOIDString = encodeOIDString; +const ANS1_TAG_OID = 0x06; +function encodeOIDString(oid) { + const parts = oid.split('.'); + // The first two subidentifiers are encoded into the first byte + const first = parseInt(parts[0], 10) * 40 + parseInt(parts[1], 10); + const rest = []; + parts.slice(2).forEach((part) => { + const bytes = encodeVariableLengthInteger(parseInt(part, 10)); + rest.push(...bytes); + }); + const der = Buffer.from([first, ...rest]); + return Buffer.from([ANS1_TAG_OID, der.length, ...der]); +} +function encodeVariableLengthInteger(value) { + const bytes = []; + let mask = 0x00; + while (value > 0) { + bytes.unshift((value & 0x7f) | mask); + value >>= 7; + mask = 0x80; + } + return bytes; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/utils/types.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/utils/types.js new file mode 100644 index 0000000000000000000000000000000000000000..c8ad2e549bdc6801e0d1c80b0308d4b9bd4985ce --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/utils/types.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/utils/verify.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/utils/verify.js new file mode 100644 index 0000000000000000000000000000000000000000..8232b6f6a97ab1713303f5e3696f4de253b8e22b --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/dist/utils/verify.js @@ -0,0 +1,13 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifySignature = void 0; +const canonical_json_1 = require("@tufjs/canonical-json"); +const crypto_1 = __importDefault(require("crypto")); +const verifySignature = (metaDataSignedData, key, signature) => { + const canonicalData = Buffer.from((0, canonical_json_1.canonicalize)(metaDataSignedData)); + return crypto_1.default.verify(undefined, canonicalData, key, Buffer.from(signature, 'hex')); +}; +exports.verifySignature = verifySignature; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/node_modules/minimatch/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/node_modules/minimatch/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..1493534e60dce4a4c73e60e0bb448a0ab2567c9d --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/node_modules/minimatch/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2011-2023 Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/assert-valid-pattern.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/assert-valid-pattern.js new file mode 100644 index 0000000000000000000000000000000000000000..7b534fc30200bb8e36fc6f85aebb49a8738f711d --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/assert-valid-pattern.js @@ -0,0 +1,10 @@ +const MAX_PATTERN_LENGTH = 1024 * 64; +export const assertValidPattern = (pattern) => { + if (typeof pattern !== 'string') { + throw new TypeError('invalid pattern'); + } + if (pattern.length > MAX_PATTERN_LENGTH) { + throw new TypeError('pattern is too long'); + } +}; +//# sourceMappingURL=assert-valid-pattern.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/ast.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/ast.js new file mode 100644 index 0000000000000000000000000000000000000000..02c6bda68427fcd80caaf07a43e1dc4b12c54ab1 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/ast.js @@ -0,0 +1,588 @@ +// parse a single path portion +import { parseClass } from './brace-expressions.js'; +import { unescape } from './unescape.js'; +const types = new Set(['!', '?', '+', '*', '@']); +const isExtglobType = (c) => types.has(c); +// Patterns that get prepended to bind to the start of either the +// entire string, or just a single path portion, to prevent dots +// and/or traversal patterns, when needed. +// Exts don't need the ^ or / bit, because the root binds that already. +const startNoTraversal = '(?!(?:^|/)\\.\\.?(?:$|/))'; +const startNoDot = '(?!\\.)'; +// characters that indicate a start of pattern needs the "no dots" bit, +// because a dot *might* be matched. ( is not in the list, because in +// the case of a child extglob, it will handle the prevention itself. +const addPatternStart = new Set(['[', '.']); +// cases where traversal is A-OK, no dot prevention needed +const justDots = new Set(['..', '.']); +const reSpecials = new Set('().*{}+?[]^$\\!'); +const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&'); +// any single thing other than / +const qmark = '[^/]'; +// * => any number of characters +const star = qmark + '*?'; +// use + when we need to ensure that *something* matches, because the * is +// the only thing in the path portion. +const starNoEmpty = qmark + '+?'; +// remove the \ chars that we added if we end up doing a nonmagic compare +// const deslash = (s: string) => s.replace(/\\(.)/g, '$1') +export class AST { + type; + #root; + #hasMagic; + #uflag = false; + #parts = []; + #parent; + #parentIndex; + #negs; + #filledNegs = false; + #options; + #toString; + // set to true if it's an extglob with no children + // (which really means one child of '') + #emptyExt = false; + constructor(type, parent, options = {}) { + this.type = type; + // extglobs are inherently magical + if (type) + this.#hasMagic = true; + this.#parent = parent; + this.#root = this.#parent ? this.#parent.#root : this; + this.#options = this.#root === this ? options : this.#root.#options; + this.#negs = this.#root === this ? [] : this.#root.#negs; + if (type === '!' && !this.#root.#filledNegs) + this.#negs.push(this); + this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0; + } + get hasMagic() { + /* c8 ignore start */ + if (this.#hasMagic !== undefined) + return this.#hasMagic; + /* c8 ignore stop */ + for (const p of this.#parts) { + if (typeof p === 'string') + continue; + if (p.type || p.hasMagic) + return (this.#hasMagic = true); + } + // note: will be undefined until we generate the regexp src and find out + return this.#hasMagic; + } + // reconstructs the pattern + toString() { + if (this.#toString !== undefined) + return this.#toString; + if (!this.type) { + return (this.#toString = this.#parts.map(p => String(p)).join('')); + } + else { + return (this.#toString = + this.type + '(' + this.#parts.map(p => String(p)).join('|') + ')'); + } + } + #fillNegs() { + /* c8 ignore start */ + if (this !== this.#root) + throw new Error('should only call on root'); + if (this.#filledNegs) + return this; + /* c8 ignore stop */ + // call toString() once to fill this out + this.toString(); + this.#filledNegs = true; + let n; + while ((n = this.#negs.pop())) { + if (n.type !== '!') + continue; + // walk up the tree, appending everthing that comes AFTER parentIndex + let p = n; + let pp = p.#parent; + while (pp) { + for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) { + for (const part of n.#parts) { + /* c8 ignore start */ + if (typeof part === 'string') { + throw new Error('string part in extglob AST??'); + } + /* c8 ignore stop */ + part.copyIn(pp.#parts[i]); + } + } + p = pp; + pp = p.#parent; + } + } + return this; + } + push(...parts) { + for (const p of parts) { + if (p === '') + continue; + /* c8 ignore start */ + if (typeof p !== 'string' && !(p instanceof AST && p.#parent === this)) { + throw new Error('invalid part: ' + p); + } + /* c8 ignore stop */ + this.#parts.push(p); + } + } + toJSON() { + const ret = this.type === null + ? this.#parts.slice().map(p => (typeof p === 'string' ? p : p.toJSON())) + : [this.type, ...this.#parts.map(p => p.toJSON())]; + if (this.isStart() && !this.type) + ret.unshift([]); + if (this.isEnd() && + (this === this.#root || + (this.#root.#filledNegs && this.#parent?.type === '!'))) { + ret.push({}); + } + return ret; + } + isStart() { + if (this.#root === this) + return true; + // if (this.type) return !!this.#parent?.isStart() + if (!this.#parent?.isStart()) + return false; + if (this.#parentIndex === 0) + return true; + // if everything AHEAD of this is a negation, then it's still the "start" + const p = this.#parent; + for (let i = 0; i < this.#parentIndex; i++) { + const pp = p.#parts[i]; + if (!(pp instanceof AST && pp.type === '!')) { + return false; + } + } + return true; + } + isEnd() { + if (this.#root === this) + return true; + if (this.#parent?.type === '!') + return true; + if (!this.#parent?.isEnd()) + return false; + if (!this.type) + return this.#parent?.isEnd(); + // if not root, it'll always have a parent + /* c8 ignore start */ + const pl = this.#parent ? this.#parent.#parts.length : 0; + /* c8 ignore stop */ + return this.#parentIndex === pl - 1; + } + copyIn(part) { + if (typeof part === 'string') + this.push(part); + else + this.push(part.clone(this)); + } + clone(parent) { + const c = new AST(this.type, parent); + for (const p of this.#parts) { + c.copyIn(p); + } + return c; + } + static #parseAST(str, ast, pos, opt) { + let escaping = false; + let inBrace = false; + let braceStart = -1; + let braceNeg = false; + if (ast.type === null) { + // outside of a extglob, append until we find a start + let i = pos; + let acc = ''; + while (i < str.length) { + const c = str.charAt(i++); + // still accumulate escapes at this point, but we do ignore + // starts that are escaped + if (escaping || c === '\\') { + escaping = !escaping; + acc += c; + continue; + } + if (inBrace) { + if (i === braceStart + 1) { + if (c === '^' || c === '!') { + braceNeg = true; + } + } + else if (c === ']' && !(i === braceStart + 2 && braceNeg)) { + inBrace = false; + } + acc += c; + continue; + } + else if (c === '[') { + inBrace = true; + braceStart = i; + braceNeg = false; + acc += c; + continue; + } + if (!opt.noext && isExtglobType(c) && str.charAt(i) === '(') { + ast.push(acc); + acc = ''; + const ext = new AST(c, ast); + i = AST.#parseAST(str, ext, i, opt); + ast.push(ext); + continue; + } + acc += c; + } + ast.push(acc); + return i; + } + // some kind of extglob, pos is at the ( + // find the next | or ) + let i = pos + 1; + let part = new AST(null, ast); + const parts = []; + let acc = ''; + while (i < str.length) { + const c = str.charAt(i++); + // still accumulate escapes at this point, but we do ignore + // starts that are escaped + if (escaping || c === '\\') { + escaping = !escaping; + acc += c; + continue; + } + if (inBrace) { + if (i === braceStart + 1) { + if (c === '^' || c === '!') { + braceNeg = true; + } + } + else if (c === ']' && !(i === braceStart + 2 && braceNeg)) { + inBrace = false; + } + acc += c; + continue; + } + else if (c === '[') { + inBrace = true; + braceStart = i; + braceNeg = false; + acc += c; + continue; + } + if (isExtglobType(c) && str.charAt(i) === '(') { + part.push(acc); + acc = ''; + const ext = new AST(c, part); + part.push(ext); + i = AST.#parseAST(str, ext, i, opt); + continue; + } + if (c === '|') { + part.push(acc); + acc = ''; + parts.push(part); + part = new AST(null, ast); + continue; + } + if (c === ')') { + if (acc === '' && ast.#parts.length === 0) { + ast.#emptyExt = true; + } + part.push(acc); + acc = ''; + ast.push(...parts, part); + return i; + } + acc += c; + } + // unfinished extglob + // if we got here, it was a malformed extglob! not an extglob, but + // maybe something else in there. + ast.type = null; + ast.#hasMagic = undefined; + ast.#parts = [str.substring(pos - 1)]; + return i; + } + static fromGlob(pattern, options = {}) { + const ast = new AST(null, undefined, options); + AST.#parseAST(pattern, ast, 0, options); + return ast; + } + // returns the regular expression if there's magic, or the unescaped + // string if not. + toMMPattern() { + // should only be called on root + /* c8 ignore start */ + if (this !== this.#root) + return this.#root.toMMPattern(); + /* c8 ignore stop */ + const glob = this.toString(); + const [re, body, hasMagic, uflag] = this.toRegExpSource(); + // if we're in nocase mode, and not nocaseMagicOnly, then we do + // still need a regular expression if we have to case-insensitively + // match capital/lowercase characters. + const anyMagic = hasMagic || + this.#hasMagic || + (this.#options.nocase && + !this.#options.nocaseMagicOnly && + glob.toUpperCase() !== glob.toLowerCase()); + if (!anyMagic) { + return body; + } + const flags = (this.#options.nocase ? 'i' : '') + (uflag ? 'u' : ''); + return Object.assign(new RegExp(`^${re}$`, flags), { + _src: re, + _glob: glob, + }); + } + get options() { + return this.#options; + } + // returns the string match, the regexp source, whether there's magic + // in the regexp (so a regular expression is required) and whether or + // not the uflag is needed for the regular expression (for posix classes) + // TODO: instead of injecting the start/end at this point, just return + // the BODY of the regexp, along with the start/end portions suitable + // for binding the start/end in either a joined full-path makeRe context + // (where we bind to (^|/), or a standalone matchPart context (where + // we bind to ^, and not /). Otherwise slashes get duped! + // + // In part-matching mode, the start is: + // - if not isStart: nothing + // - if traversal possible, but not allowed: ^(?!\.\.?$) + // - if dots allowed or not possible: ^ + // - if dots possible and not allowed: ^(?!\.) + // end is: + // - if not isEnd(): nothing + // - else: $ + // + // In full-path matching mode, we put the slash at the START of the + // pattern, so start is: + // - if first pattern: same as part-matching mode + // - if not isStart(): nothing + // - if traversal possible, but not allowed: /(?!\.\.?(?:$|/)) + // - if dots allowed or not possible: / + // - if dots possible and not allowed: /(?!\.) + // end is: + // - if last pattern, same as part-matching mode + // - else nothing + // + // Always put the (?:$|/) on negated tails, though, because that has to be + // there to bind the end of the negated pattern portion, and it's easier to + // just stick it in now rather than try to inject it later in the middle of + // the pattern. + // + // We can just always return the same end, and leave it up to the caller + // to know whether it's going to be used joined or in parts. + // And, if the start is adjusted slightly, can do the same there: + // - if not isStart: nothing + // - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$) + // - if dots allowed or not possible: (?:/|^) + // - if dots possible and not allowed: (?:/|^)(?!\.) + // + // But it's better to have a simpler binding without a conditional, for + // performance, so probably better to return both start options. + // + // Then the caller just ignores the end if it's not the first pattern, + // and the start always gets applied. + // + // But that's always going to be $ if it's the ending pattern, or nothing, + // so the caller can just attach $ at the end of the pattern when building. + // + // So the todo is: + // - better detect what kind of start is needed + // - return both flavors of starting pattern + // - attach $ at the end of the pattern when creating the actual RegExp + // + // Ah, but wait, no, that all only applies to the root when the first pattern + // is not an extglob. If the first pattern IS an extglob, then we need all + // that dot prevention biz to live in the extglob portions, because eg + // +(*|.x*) can match .xy but not .yx. + // + // So, return the two flavors if it's #root and the first child is not an + // AST, otherwise leave it to the child AST to handle it, and there, + // use the (?:^|/) style of start binding. + // + // Even simplified further: + // - Since the start for a join is eg /(?!\.) and the start for a part + // is ^(?!\.), we can just prepend (?!\.) to the pattern (either root + // or start or whatever) and prepend ^ or / at the Regexp construction. + toRegExpSource(allowDot) { + const dot = allowDot ?? !!this.#options.dot; + if (this.#root === this) + this.#fillNegs(); + if (!this.type) { + const noEmpty = this.isStart() && this.isEnd(); + const src = this.#parts + .map(p => { + const [re, _, hasMagic, uflag] = typeof p === 'string' + ? AST.#parseGlob(p, this.#hasMagic, noEmpty) + : p.toRegExpSource(allowDot); + this.#hasMagic = this.#hasMagic || hasMagic; + this.#uflag = this.#uflag || uflag; + return re; + }) + .join(''); + let start = ''; + if (this.isStart()) { + if (typeof this.#parts[0] === 'string') { + // this is the string that will match the start of the pattern, + // so we need to protect against dots and such. + // '.' and '..' cannot match unless the pattern is that exactly, + // even if it starts with . or dot:true is set. + const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]); + if (!dotTravAllowed) { + const aps = addPatternStart; + // check if we have a possibility of matching . or .., + // and prevent that. + const needNoTrav = + // dots are allowed, and the pattern starts with [ or . + (dot && aps.has(src.charAt(0))) || + // the pattern starts with \., and then [ or . + (src.startsWith('\\.') && aps.has(src.charAt(2))) || + // the pattern starts with \.\., and then [ or . + (src.startsWith('\\.\\.') && aps.has(src.charAt(4))); + // no need to prevent dots if it can't match a dot, or if a + // sub-pattern will be preventing it anyway. + const needNoDot = !dot && !allowDot && aps.has(src.charAt(0)); + start = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : ''; + } + } + } + // append the "end of path portion" pattern to negation tails + let end = ''; + if (this.isEnd() && + this.#root.#filledNegs && + this.#parent?.type === '!') { + end = '(?:$|\\/)'; + } + const final = start + src + end; + return [ + final, + unescape(src), + (this.#hasMagic = !!this.#hasMagic), + this.#uflag, + ]; + } + // We need to calculate the body *twice* if it's a repeat pattern + // at the start, once in nodot mode, then again in dot mode, so a + // pattern like *(?) can match 'x.y' + const repeated = this.type === '*' || this.type === '+'; + // some kind of extglob + const start = this.type === '!' ? '(?:(?!(?:' : '(?:'; + let body = this.#partsToRegExp(dot); + if (this.isStart() && this.isEnd() && !body && this.type !== '!') { + // invalid extglob, has to at least be *something* present, if it's + // the entire path portion. + const s = this.toString(); + this.#parts = [s]; + this.type = null; + this.#hasMagic = undefined; + return [s, unescape(this.toString()), false, false]; + } + // XXX abstract out this map method + let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot + ? '' + : this.#partsToRegExp(true); + if (bodyDotAllowed === body) { + bodyDotAllowed = ''; + } + if (bodyDotAllowed) { + body = `(?:${body})(?:${bodyDotAllowed})*?`; + } + // an empty !() is exactly equivalent to a starNoEmpty + let final = ''; + if (this.type === '!' && this.#emptyExt) { + final = (this.isStart() && !dot ? startNoDot : '') + starNoEmpty; + } + else { + const close = this.type === '!' + ? // !() must match something,but !(x) can match '' + '))' + + (this.isStart() && !dot && !allowDot ? startNoDot : '') + + star + + ')' + : this.type === '@' + ? ')' + : this.type === '?' + ? ')?' + : this.type === '+' && bodyDotAllowed + ? ')' + : this.type === '*' && bodyDotAllowed + ? `)?` + : `)${this.type}`; + final = start + body + close; + } + return [ + final, + unescape(body), + (this.#hasMagic = !!this.#hasMagic), + this.#uflag, + ]; + } + #partsToRegExp(dot) { + return this.#parts + .map(p => { + // extglob ASTs should only contain parent ASTs + /* c8 ignore start */ + if (typeof p === 'string') { + throw new Error('string type in extglob ast??'); + } + /* c8 ignore stop */ + // can ignore hasMagic, because extglobs are already always magic + const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot); + this.#uflag = this.#uflag || uflag; + return re; + }) + .filter(p => !(this.isStart() && this.isEnd()) || !!p) + .join('|'); + } + static #parseGlob(glob, hasMagic, noEmpty = false) { + let escaping = false; + let re = ''; + let uflag = false; + for (let i = 0; i < glob.length; i++) { + const c = glob.charAt(i); + if (escaping) { + escaping = false; + re += (reSpecials.has(c) ? '\\' : '') + c; + continue; + } + if (c === '\\') { + if (i === glob.length - 1) { + re += '\\\\'; + } + else { + escaping = true; + } + continue; + } + if (c === '[') { + const [src, needUflag, consumed, magic] = parseClass(glob, i); + if (consumed) { + re += src; + uflag = uflag || needUflag; + i += consumed - 1; + hasMagic = hasMagic || magic; + continue; + } + } + if (c === '*') { + if (noEmpty && glob === '*') + re += starNoEmpty; + else + re += star; + hasMagic = true; + continue; + } + if (c === '?') { + re += qmark; + hasMagic = true; + continue; + } + re += regExpEscape(c); + } + return [re, unescape(glob), !!hasMagic, uflag]; + } +} +//# sourceMappingURL=ast.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/escape.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/escape.js new file mode 100644 index 0000000000000000000000000000000000000000..16f7c8c7bdc64645a201065cb264cb561eac851c --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/escape.js @@ -0,0 +1,18 @@ +/** + * Escape all magic characters in a glob pattern. + * + * If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape} + * option is used, then characters are escaped by wrapping in `[]`, because + * a magic character wrapped in a character class can only be satisfied by + * that exact character. In this mode, `\` is _not_ escaped, because it is + * not interpreted as a magic character, but instead as a path separator. + */ +export const escape = (s, { windowsPathsNoEscape = false, } = {}) => { + // don't need to escape +@! because we escape the parens + // that make those magic, and escaping ! as [!] isn't valid, + // because [!]] is a valid glob class meaning not ']'. + return windowsPathsNoEscape + ? s.replace(/[?*()[\]]/g, '[$&]') + : s.replace(/[?*()[\]\\]/g, '\\$&'); +}; +//# sourceMappingURL=escape.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/node_modules/minimatch/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/node_modules/minimatch/package.json new file mode 100644 index 0000000000000000000000000000000000000000..01fc48ecfd6a9fc1c0ad503ab2185af2e35754e4 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/node_modules/minimatch/package.json @@ -0,0 +1,82 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me)", + "name": "minimatch", + "description": "a glob matcher in javascript", + "version": "9.0.5", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/minimatch.git" + }, + "main": "./dist/commonjs/index.js", + "types": "./dist/commonjs/index.d.ts", + "exports": { + "./package.json": "./package.json", + ".": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } + }, + "files": [ + "dist" + ], + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "prepare": "tshy", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "test": "tap", + "snap": "tap", + "format": "prettier --write . --loglevel warn", + "benchmark": "node benchmark/index.js", + "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts" + }, + "prettier": { + "semi": false, + "printWidth": 80, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "devDependencies": { + "@types/brace-expansion": "^1.1.0", + "@types/node": "^18.15.11", + "@types/tap": "^15.0.8", + "eslint-config-prettier": "^8.6.0", + "mkdirp": "1", + "prettier": "^2.8.2", + "tap": "^18.7.2", + "ts-node": "^10.9.1", + "tshy": "^1.12.0", + "typedoc": "^0.23.21", + "typescript": "^4.9.3" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "license": "ISC", + "tshy": { + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts" + } + }, + "type": "module" +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/package.json new file mode 100644 index 0000000000000000000000000000000000000000..dfd60d248118cc4c4aa37d69ea014a310a205e0a --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/@tufjs/models/package.json @@ -0,0 +1,37 @@ +{ + "name": "@tufjs/models", + "version": "4.0.0", + "description": "TUF metadata models", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "files": [ + "dist" + ], + "scripts": { + "build": "tsc --build tsconfig.build.json", + "clean": "rm -rf dist && rm tsconfig.build.tsbuildinfo", + "test": "jest" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/theupdateframework/tuf-js.git" + }, + "keywords": [ + "tuf", + "security", + "update" + ], + "author": "bdehamer@github.com", + "license": "MIT", + "bugs": { + "url": "https://github.com/theupdateframework/tuf-js/issues" + }, + "homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/models#readme", + "dependencies": { + "@tufjs/canonical-json": "2.0.0", + "minimatch": "^9.0.5" + }, + "engines": { + "node": "^20.17.0 || >=22.9.0" + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/abbrev/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/abbrev/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..9bcfa9d7d8d26ebb256fe6df302bf22c771dc386 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/abbrev/LICENSE @@ -0,0 +1,46 @@ +This software is dual-licensed under the ISC and MIT licenses. +You may use this software under EITHER of the following licenses. + +---------- + +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +---------- + +Copyright Isaac Z. Schlueter and Contributors +All rights reserved. + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/abbrev/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/abbrev/lib/index.js new file mode 100644 index 0000000000000000000000000000000000000000..f7bee0c6fc7ada54600b21fea238de5f3ee6f790 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/abbrev/lib/index.js @@ -0,0 +1,53 @@ +module.exports = abbrev + +function abbrev (...args) { + let list = args + if (args.length === 1 && (Array.isArray(args[0]) || typeof args[0] === 'string')) { + list = [].concat(args[0]) + } + + for (let i = 0, l = list.length; i < l; i++) { + list[i] = typeof list[i] === 'string' ? list[i] : String(list[i]) + } + + // sort them lexicographically, so that they're next to their nearest kin + list = list.sort(lexSort) + + // walk through each, seeing how much it has in common with the next and previous + const abbrevs = {} + let prev = '' + for (let ii = 0, ll = list.length; ii < ll; ii++) { + const current = list[ii] + const next = list[ii + 1] || '' + let nextMatches = true + let prevMatches = true + if (current === next) { + continue + } + let j = 0 + const cl = current.length + for (; j < cl; j++) { + const curChar = current.charAt(j) + nextMatches = nextMatches && curChar === next.charAt(j) + prevMatches = prevMatches && curChar === prev.charAt(j) + if (!nextMatches && !prevMatches) { + j++ + break + } + } + prev = current + if (j === cl) { + abbrevs[current] = current + continue + } + for (let a = current.slice(0, j); j <= cl; j++) { + abbrevs[a] = current + a += current.charAt(j) + } + } + return abbrevs +} + +function lexSort (a, b) { + return a === b ? 0 : a > b ? 1 : -1 +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/abbrev/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/abbrev/package.json new file mode 100644 index 0000000000000000000000000000000000000000..077d4bccd0e69e455fcb843b2f3928ad6de172e9 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/abbrev/package.json @@ -0,0 +1,45 @@ +{ + "name": "abbrev", + "version": "3.0.1", + "description": "Like ruby's abbrev module, but in js", + "author": "GitHub Inc.", + "main": "lib/index.js", + "scripts": { + "test": "tap", + "lint": "npm run eslint", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run eslint -- --fix", + "snap": "tap", + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/abbrev-js.git" + }, + "license": "ISC", + "devDependencies": { + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.24.3", + "tap": "^16.3.0" + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^18.17.0 || >=20.5.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.24.3", + "publish": true + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/agent-base/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/agent-base/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..008728cb51847dcff9c68cf1d9c30d76b9323457 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/agent-base/LICENSE @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2013 Nathan Rajlich + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/agent-base/dist/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/agent-base/dist/index.js new file mode 100644 index 0000000000000000000000000000000000000000..57ac85205e8aba57e7bacc649a8b2f7e871b71f2 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/agent-base/dist/index.js @@ -0,0 +1,178 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Agent = void 0; +const net = __importStar(require("net")); +const http = __importStar(require("http")); +const https_1 = require("https"); +__exportStar(require("./helpers"), exports); +const INTERNAL = Symbol('AgentBaseInternalState'); +class Agent extends http.Agent { + constructor(opts) { + super(opts); + this[INTERNAL] = {}; + } + /** + * Determine whether this is an `http` or `https` request. + */ + isSecureEndpoint(options) { + if (options) { + // First check the `secureEndpoint` property explicitly, since this + // means that a parent `Agent` is "passing through" to this instance. + // eslint-disable-next-line @typescript-eslint/no-explicit-any + if (typeof options.secureEndpoint === 'boolean') { + return options.secureEndpoint; + } + // If no explicit `secure` endpoint, check if `protocol` property is + // set. This will usually be the case since using a full string URL + // or `URL` instance should be the most common usage. + if (typeof options.protocol === 'string') { + return options.protocol === 'https:'; + } + } + // Finally, if no `protocol` property was set, then fall back to + // checking the stack trace of the current call stack, and try to + // detect the "https" module. + const { stack } = new Error(); + if (typeof stack !== 'string') + return false; + return stack + .split('\n') + .some((l) => l.indexOf('(https.js:') !== -1 || + l.indexOf('node:https:') !== -1); + } + // In order to support async signatures in `connect()` and Node's native + // connection pooling in `http.Agent`, the array of sockets for each origin + // has to be updated synchronously. This is so the length of the array is + // accurate when `addRequest()` is next called. We achieve this by creating a + // fake socket and adding it to `sockets[origin]` and incrementing + // `totalSocketCount`. + incrementSockets(name) { + // If `maxSockets` and `maxTotalSockets` are both Infinity then there is no + // need to create a fake socket because Node.js native connection pooling + // will never be invoked. + if (this.maxSockets === Infinity && this.maxTotalSockets === Infinity) { + return null; + } + // All instances of `sockets` are expected TypeScript errors. The + // alternative is to add it as a private property of this class but that + // will break TypeScript subclassing. + if (!this.sockets[name]) { + // @ts-expect-error `sockets` is readonly in `@types/node` + this.sockets[name] = []; + } + const fakeSocket = new net.Socket({ writable: false }); + this.sockets[name].push(fakeSocket); + // @ts-expect-error `totalSocketCount` isn't defined in `@types/node` + this.totalSocketCount++; + return fakeSocket; + } + decrementSockets(name, socket) { + if (!this.sockets[name] || socket === null) { + return; + } + const sockets = this.sockets[name]; + const index = sockets.indexOf(socket); + if (index !== -1) { + sockets.splice(index, 1); + // @ts-expect-error `totalSocketCount` isn't defined in `@types/node` + this.totalSocketCount--; + if (sockets.length === 0) { + // @ts-expect-error `sockets` is readonly in `@types/node` + delete this.sockets[name]; + } + } + } + // In order to properly update the socket pool, we need to call `getName()` on + // the core `https.Agent` if it is a secureEndpoint. + getName(options) { + const secureEndpoint = this.isSecureEndpoint(options); + if (secureEndpoint) { + // @ts-expect-error `getName()` isn't defined in `@types/node` + return https_1.Agent.prototype.getName.call(this, options); + } + // @ts-expect-error `getName()` isn't defined in `@types/node` + return super.getName(options); + } + createSocket(req, options, cb) { + const connectOpts = { + ...options, + secureEndpoint: this.isSecureEndpoint(options), + }; + const name = this.getName(connectOpts); + const fakeSocket = this.incrementSockets(name); + Promise.resolve() + .then(() => this.connect(req, connectOpts)) + .then((socket) => { + this.decrementSockets(name, fakeSocket); + if (socket instanceof http.Agent) { + try { + // @ts-expect-error `addRequest()` isn't defined in `@types/node` + return socket.addRequest(req, connectOpts); + } + catch (err) { + return cb(err); + } + } + this[INTERNAL].currentSocket = socket; + // @ts-expect-error `createSocket()` isn't defined in `@types/node` + super.createSocket(req, options, cb); + }, (err) => { + this.decrementSockets(name, fakeSocket); + cb(err); + }); + } + createConnection() { + const socket = this[INTERNAL].currentSocket; + this[INTERNAL].currentSocket = undefined; + if (!socket) { + throw new Error('No socket was returned in the `connect()` function'); + } + return socket; + } + get defaultPort() { + return (this[INTERNAL].defaultPort ?? + (this.protocol === 'https:' ? 443 : 80)); + } + set defaultPort(v) { + if (this[INTERNAL]) { + this[INTERNAL].defaultPort = v; + } + } + get protocol() { + return (this[INTERNAL].protocol ?? + (this.isSecureEndpoint() ? 'https:' : 'http:')); + } + set protocol(v) { + if (this[INTERNAL]) { + this[INTERNAL].protocol = v; + } + } +} +exports.Agent = Agent; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/agent-base/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/agent-base/package.json new file mode 100644 index 0000000000000000000000000000000000000000..1b4964a83f66fddca33294c61829dcd7fb4c7d49 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/agent-base/package.json @@ -0,0 +1,46 @@ +{ + "name": "agent-base", + "version": "7.1.4", + "description": "Turn a function into an `http.Agent` instance", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "files": [ + "dist" + ], + "repository": { + "type": "git", + "url": "https://github.com/TooTallNate/proxy-agents.git", + "directory": "packages/agent-base" + }, + "keywords": [ + "http", + "agent", + "base", + "barebones", + "https" + ], + "author": "Nathan Rajlich (http://n8.io/)", + "license": "MIT", + "devDependencies": { + "@types/debug": "^4.1.7", + "@types/jest": "^29.5.1", + "@types/node": "^14.18.45", + "@types/semver": "^7.3.13", + "@types/ws": "^6.0.4", + "async-listen": "^3.0.0", + "jest": "^29.5.0", + "ts-jest": "^29.1.0", + "typescript": "^5.0.4", + "ws": "^5.2.4", + "tsconfig": "0.0.0" + }, + "engines": { + "node": ">= 14" + }, + "scripts": { + "build": "tsc", + "test": "jest --env node --verbose --bail", + "lint": "eslint . --ext .ts", + "pack": "node ../../scripts/pack.mjs" + } +} \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/archy/examples/beep.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/archy/examples/beep.js new file mode 100644 index 0000000000000000000000000000000000000000..9c0704797c80c1266d0af2e71cf560c7f43a7e37 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/archy/examples/beep.js @@ -0,0 +1,24 @@ +var archy = require('../'); +var s = archy({ + label : 'beep', + nodes : [ + 'ity', + { + label : 'boop', + nodes : [ + { + label : 'o_O', + nodes : [ + { + label : 'oh', + nodes : [ 'hello', 'puny' ] + }, + 'human' + ] + }, + 'party\ntime!' + ] + } + ] +}); +console.log(s); diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/archy/examples/multi_line.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/archy/examples/multi_line.js new file mode 100644 index 0000000000000000000000000000000000000000..8afdfada9102fe5eef2f76834b94bd3b791aca78 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/archy/examples/multi_line.js @@ -0,0 +1,25 @@ +var archy = require('../'); + +var s = archy({ + label : 'beep\none\ntwo', + nodes : [ + 'ity', + { + label : 'boop', + nodes : [ + { + label : 'o_O\nwheee', + nodes : [ + { + label : 'oh', + nodes : [ 'hello', 'puny\nmeat' ] + }, + 'creature' + ] + }, + 'party\ntime!' + ] + } + ] +}); +console.log(s); diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/bin-target.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/bin-target.js new file mode 100644 index 0000000000000000000000000000000000000000..0629285d5144c40d7696ed4ec5fe99b54b4ae001 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/bin-target.js @@ -0,0 +1,9 @@ +const isWindows = require('./is-windows.js') +const getPrefix = require('./get-prefix.js') +const getNodeModules = require('./get-node-modules.js') +const { dirname } = require('path') + +module.exports = ({ top, path }) => + !top ? getNodeModules(path) + '/.bin' + : isWindows ? getPrefix(path) + : dirname(getPrefix(path)) + '/bin' diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/check-bin.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/check-bin.js new file mode 100644 index 0000000000000000000000000000000000000000..c5b997bb96355c9249a5c9dc9a633296dd0a3a69 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/check-bin.js @@ -0,0 +1,74 @@ +// check to see if a bin is allowed to be overwritten +// either rejects or resolves to nothing. return value not relevant. +const isWindows = require('./is-windows.js') +const binTarget = require('./bin-target.js') +const { resolve, dirname } = require('path') +const readCmdShim = require('read-cmd-shim') +const { readlink } = require('fs/promises') + +const checkBin = async ({ bin, path, top, global, force }) => { + // always ok to clobber when forced + // always ok to clobber local bins, or when forced + if (force || !global || !top) { + return + } + + // ok, need to make sure, then + const target = resolve(binTarget({ path, top }), bin) + path = resolve(path) + return isWindows ? checkShim({ target, path }) : checkLink({ target, path }) +} + +// only enoent is allowed. anything else is a problem. +const handleReadLinkError = async ({ er, target }) => + er.code === 'ENOENT' ? null + : failEEXIST({ target }) + +const checkLink = async ({ target, path }) => { + const current = await readlink(target) + .catch(er => handleReadLinkError({ er, target })) + + if (!current) { + return + } + + const resolved = resolve(dirname(target), current) + + if (resolved.toLowerCase().indexOf(path.toLowerCase()) !== 0) { + return failEEXIST({ target }) + } +} + +const handleReadCmdShimError = ({ er, target }) => + er.code === 'ENOENT' ? null + : failEEXIST({ target }) + +const failEEXIST = ({ target }) => + Promise.reject(Object.assign(new Error('EEXIST: file already exists'), { + path: target, + code: 'EEXIST', + })) + +const checkShim = async ({ target, path }) => { + const shims = [ + target, + target + '.cmd', + target + '.ps1', + ] + await Promise.all(shims.map(async shim => { + const current = await readCmdShim(shim) + .catch(er => handleReadCmdShimError({ er, target: shim })) + + if (!current) { + return + } + + const resolved = resolve(dirname(shim), current.replace(/\\/g, '/')) + + if (resolved.toLowerCase().indexOf(path.toLowerCase()) !== 0) { + return failEEXIST({ target: shim }) + } + })) +} + +module.exports = checkBin diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/check-bins.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/check-bins.js new file mode 100644 index 0000000000000000000000000000000000000000..76a683c91d7c2259400f6ba6e55163f13d8a5ada --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/check-bins.js @@ -0,0 +1,18 @@ +const checkBin = require('./check-bin.js') +const normalize = require('npm-normalize-package-bin') +const checkBins = async ({ pkg, path, top, global, force }) => { + // always ok to clobber when forced + // always ok to clobber local bins, or when forced + if (force || !global || !top) { + return + } + + pkg = normalize(pkg) + if (!pkg.bin) { + return + } + + await Promise.all(Object.keys(pkg.bin) + .map(bin => checkBin({ bin, path, top, global, force }))) +} +module.exports = checkBins diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/fix-bin.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/fix-bin.js new file mode 100644 index 0000000000000000000000000000000000000000..453bd4f3e95b162d24afd9a85871f4bc19095325 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/fix-bin.js @@ -0,0 +1,42 @@ +// make sure that bins are executable, and that they don't have +// windows line-endings on the hashbang line. +const { + chmod, + open, + readFile, +} = require('fs/promises') + +const execMode = 0o777 & (~process.umask()) + +const writeFileAtomic = require('write-file-atomic') + +const isWindowsHashBang = buf => + buf[0] === '#'.charCodeAt(0) && + buf[1] === '!'.charCodeAt(0) && + /^#![^\n]+\r\n/.test(buf.toString()) + +const isWindowsHashbangFile = file => { + const FALSE = () => false + return open(file, 'r').then(fh => { + const buf = Buffer.alloc(2048) + return fh.read(buf, 0, 2048, 0) + .then( + () => { + const isWHB = isWindowsHashBang(buf) + return fh.close().then(() => isWHB, () => isWHB) + }, + // don't leak FD if read() fails + () => fh.close().then(FALSE, FALSE) + ) + }, FALSE) +} + +const dos2Unix = file => + readFile(file, 'utf8').then(content => + writeFileAtomic(file, content.replace(/^(#![^\n]+)\r\n/, '$1\n'))) + +const fixBin = (file, mode = execMode) => chmod(file, mode) + .then(() => isWindowsHashbangFile(file)) + .then(isWHB => isWHB ? dos2Unix(file) : null) + +module.exports = fixBin diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/get-node-modules.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/get-node-modules.js new file mode 100644 index 0000000000000000000000000000000000000000..5c16b3b8afbfb5d0151c0d4b40ed416745897f66 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/get-node-modules.js @@ -0,0 +1,19 @@ +// we know it's global and/or not top, so the path has to be +// {prefix}/node_modules/{name}. Can't rely on pkg.name, because +// it might be installed as an alias. + +const { dirname, basename } = require('path') +// this gets called a lot and can't change, so memoize it +const memo = new Map() +module.exports = path => { + if (memo.has(path)) { + return memo.get(path) + } + + const scopeOrNm = dirname(path) + const nm = basename(scopeOrNm) === 'node_modules' ? scopeOrNm + : dirname(scopeOrNm) + + memo.set(path, nm) + return nm +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/get-paths.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/get-paths.js new file mode 100644 index 0000000000000000000000000000000000000000..b93e6982dce2215819dbfedcda1d409f0e348f33 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/get-paths.js @@ -0,0 +1,42 @@ +// get all the paths that are (or might be) installed for a given pkg +// There's no guarantee that all of these will be installed, but if they +// are present, then we can assume that they're associated. +const binTarget = require('./bin-target.js') +const manTarget = require('./man-target.js') +const { resolve, basename, extname } = require('path') +const isWindows = require('./is-windows.js') +module.exports = ({ path, pkg, global, top }) => { + if (top && !global) { + return [] + } + + const binSet = [] + const binTarg = binTarget({ path, top }) + if (pkg.bin) { + for (const bin of Object.keys(pkg.bin)) { + const b = resolve(binTarg, bin) + binSet.push(b) + if (isWindows) { + binSet.push(b + '.cmd') + binSet.push(b + '.ps1') + } + } + } + + const manTarg = manTarget({ path, top }) + const manSet = [] + if (manTarg && pkg.man && Array.isArray(pkg.man) && pkg.man.length) { + for (const man of pkg.man) { + if (!/.\.[0-9]+(\.gz)?$/.test(man)) { + return binSet + } + + const section = extname(basename(man, '.gz')).slice(1) + const base = basename(man) + + manSet.push(resolve(manTarg, 'man' + section, base)) + } + } + + return manSet.length ? [...binSet, ...manSet] : binSet +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/get-prefix.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/get-prefix.js new file mode 100644 index 0000000000000000000000000000000000000000..d5cf9c9d01c2028f5d272eca2232082062f86444 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/get-prefix.js @@ -0,0 +1,3 @@ +const { dirname } = require('path') +const getNodeModules = require('./get-node-modules.js') +module.exports = path => dirname(getNodeModules(path)) diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/index.js new file mode 100644 index 0000000000000000000000000000000000000000..ab3bd13c0be6b32c06bc576a8248a24a23229b38 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/index.js @@ -0,0 +1,44 @@ +const linkBins = require('./link-bins.js') +const linkMans = require('./link-mans.js') + +const binLinks = opts => { + const { path, pkg, force, global, top } = opts + // global top pkgs on windows get bins installed in {prefix}, and no mans + // + // unix global top pkgs get their bins installed in {prefix}/bin, + // and mans in {prefix}/share/man + // + // non-top pkgs get their bins installed in {prefix}/node_modules/.bin, + // and do not install mans + // + // non-global top pkgs don't have any bins or mans linked. From here on + // out, if it's top, we know that it's global, so no need to pass that + // option further down the stack. + if (top && !global) { + return Promise.resolve() + } + + return Promise.all([ + // allow clobbering within the local node_modules/.bin folder. + // only global bins are protected in this way, or else it is + // yet another vector for excessive dependency conflicts. + linkBins({ path, pkg, top, force: force || !top }), + linkMans({ path, pkg, top, force }), + ]) +} + +const shimBin = require('./shim-bin.js') +const linkGently = require('./link-gently.js') +const resetSeen = () => { + shimBin.resetSeen() + linkGently.resetSeen() +} + +const checkBins = require('./check-bins.js') +const getPaths = require('./get-paths.js') + +module.exports = Object.assign(binLinks, { + checkBins, + resetSeen, + getPaths, +}) diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/is-windows.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/is-windows.js new file mode 100644 index 0000000000000000000000000000000000000000..da3b2fb220b0a680c489b9a50e6c7f88d2918c6b --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/is-windows.js @@ -0,0 +1,2 @@ +const platform = process.env.__TESTING_BIN_LINKS_PLATFORM__ || process.platform +module.exports = platform === 'win32' diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/link-bin.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/link-bin.js new file mode 100644 index 0000000000000000000000000000000000000000..fb579350994d033f290b63562aae59db9c5c619b --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/link-bin.js @@ -0,0 +1,9 @@ +const linkGently = require('./link-gently.js') +const fixBin = require('./fix-bin.js') + +// linking bins is simple. just symlink, and if we linked it, fix the bin up +const linkBin = ({ path, to, from, absFrom, force }) => + linkGently({ path, to, from, absFrom, force }) + .then(linked => linked && fixBin(absFrom)) + +module.exports = linkBin diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/link-bins.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/link-bins.js new file mode 100644 index 0000000000000000000000000000000000000000..9bf7d72117fbbe642c500548d8636e7beb469548 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/link-bins.js @@ -0,0 +1,23 @@ +const isWindows = require('./is-windows.js') +const binTarget = require('./bin-target.js') +const { dirname, resolve, relative } = require('path') +const linkBin = isWindows ? require('./shim-bin.js') : require('./link-bin.js') +const normalize = require('npm-normalize-package-bin') + +const linkBins = ({ path, pkg, top, force }) => { + pkg = normalize(pkg) + if (!pkg.bin) { + return Promise.resolve([]) + } + const promises = [] + const target = binTarget({ path, top }) + for (const [key, val] of Object.entries(pkg.bin)) { + const to = resolve(target, key) + const absFrom = resolve(path, val) + const from = relative(dirname(to), absFrom) + promises.push(linkBin({ path, from, to, absFrom, force })) + } + return Promise.all(promises) +} + +module.exports = linkBins diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/link-gently.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/link-gently.js new file mode 100644 index 0000000000000000000000000000000000000000..a39d3bced57b13424552fcb641da6accc00460b0 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/link-gently.js @@ -0,0 +1,96 @@ +// if the thing isn't there, skip it +// if there's a non-symlink there already, eexist +// if there's a symlink already, pointing somewhere else, eexist +// if there's a symlink already, pointing into our pkg, remove it first +// then create the symlink + +const { resolve, dirname } = require('path') +const { lstat, mkdir, readlink, rm, symlink } = require('fs/promises') +const { log } = require('proc-log') +const throwSignificant = er => { + if (er.code === 'ENOENT') { + return + } + if (er.code === 'EACCES') { + log.warn('error adding file', er.message) + return + } + throw er +} + +const rmOpts = { + recursive: true, + force: true, +} + +// even in --force mode, we never create a link over a link we've +// already created. you can have multiple packages in a tree trying +// to contend for the same bin, or the same manpage listed multiple times, +// which creates a race condition and nondeterminism. +const seen = new Set() + +const SKIP = Symbol('skip - missing or already installed') +const CLOBBER = Symbol('clobber - ours or in forceful mode') + +const linkGently = async ({ path, to, from, absFrom, force }) => { + if (seen.has(to)) { + return false + } + seen.add(to) + + // if the script or manpage isn't there, just ignore it. + // this arguably *should* be an install error of some sort, + // or at least a warning, but npm has always behaved this + // way in the past, so it'd be a breaking change + return Promise.all([ + lstat(absFrom).catch(throwSignificant), + lstat(to).catch(throwSignificant), + ]).then(([stFrom, stTo]) => { + // not present in package, skip it + if (!stFrom) { + return SKIP + } + + // exists! maybe clobber if we can + if (stTo) { + if (!stTo.isSymbolicLink()) { + return force && rm(to, rmOpts).then(() => CLOBBER) + } + + return readlink(to).then(target => { + if (target === from) { + return SKIP + } // skip it, already set up like we want it. + + target = resolve(dirname(to), target) + if (target.indexOf(path) === 0 || force) { + return rm(to, rmOpts).then(() => CLOBBER) + } + // neither skip nor clobber + return false + }) + } else { + // doesn't exist, dir might not either + return mkdir(dirname(to), { recursive: true }) + } + }) + .then(skipOrClobber => { + if (skipOrClobber === SKIP) { + return false + } + return symlink(from, to, 'file').catch(er => { + if (skipOrClobber === CLOBBER || force) { + return rm(to, rmOpts).then(() => symlink(from, to, 'file')) + } + throw er + }).then(() => true) + }) +} + +const resetSeen = () => { + for (const p of seen) { + seen.delete(p) + } +} + +module.exports = Object.assign(linkGently, { resetSeen }) diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/link-mans.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/link-mans.js new file mode 100644 index 0000000000000000000000000000000000000000..b6dd214cebdfe73666bb29165541c43e2562f5e9 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/link-mans.js @@ -0,0 +1,53 @@ +const { dirname, relative, join, resolve, basename } = require('path') +const linkGently = require('./link-gently.js') +const manTarget = require('./man-target.js') + +const linkMans = async ({ path, pkg, top, force }) => { + const target = manTarget({ path, top }) + if (!target || !Array.isArray(pkg?.man) || !pkg.man.length) { + return [] + } + + const links = [] + // `new Set` to filter out duplicates + for (let man of new Set(pkg.man)) { + if (!man || typeof man !== 'string') { + continue + } + // break any links to c:\\blah or /foo/blah or ../blah + man = join('/', man).replace(/\\|:/g, '/').slice(1) + const parseMan = man.match(/\.([0-9]+)(\.gz)?$/) + if (!parseMan) { + throw Object.assign(new Error('invalid man entry name\n' + + 'Man files must end with a number, ' + + 'and optionally a .gz suffix if they are compressed.' + ), { + code: 'EBADMAN', + path, + pkgid: pkg._id, + man, + }) + } + + const section = parseMan[1] + const base = basename(man) + const absFrom = resolve(path, man) + /* istanbul ignore if - that unpossible */ + if (absFrom.indexOf(path) !== 0) { + throw Object.assign(new Error('invalid man entry'), { + code: 'EBADMAN', + path, + pkgid: pkg._id, + man, + }) + } + + const to = resolve(target, 'man' + section, base) + const from = relative(dirname(to), absFrom) + + links.push(linkGently({ from, to, path, absFrom, force })) + } + return Promise.all(links) +} + +module.exports = linkMans diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/man-target.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/man-target.js new file mode 100644 index 0000000000000000000000000000000000000000..efe66f38a5543f285cd9cfe9c841ac84068c8849 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/man-target.js @@ -0,0 +1,6 @@ +const isWindows = require('./is-windows.js') +const getPrefix = require('./get-prefix.js') +const { dirname } = require('path') + +module.exports = ({ top, path }) => !top || isWindows ? null + : dirname(getPrefix(path)) + '/share/man' diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/shim-bin.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/shim-bin.js new file mode 100644 index 0000000000000000000000000000000000000000..67e2702702f0a8948a4419c9a767e5e8db22f11c --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/lib/shim-bin.js @@ -0,0 +1,86 @@ +const { resolve, dirname } = require('path') +const { lstat } = require('fs/promises') +const throwNonEnoent = er => { + if (er.code !== 'ENOENT') { + throw er + } +} + +const cmdShim = require('cmd-shim') +const readCmdShim = require('read-cmd-shim') + +const fixBin = require('./fix-bin.js') + +// even in --force mode, we never create a shim over a shim we've +// already created. you can have multiple packages in a tree trying +// to contend for the same bin, which creates a race condition and +// nondeterminism. +const seen = new Set() + +const failEEXIST = ({ to, from }) => + Promise.reject(Object.assign(new Error('EEXIST: file already exists'), { + path: to, + dest: from, + code: 'EEXIST', + })) + +const handleReadCmdShimError = ({ er, from, to }) => + er.code === 'ENOENT' ? null + : er.code === 'ENOTASHIM' ? failEEXIST({ from, to }) + : Promise.reject(er) + +const SKIP = Symbol('skip - missing or already installed') +const shimBin = ({ path, to, from, absFrom, force }) => { + const shims = [ + to, + to + '.cmd', + to + '.ps1', + ] + + for (const shim of shims) { + if (seen.has(shim)) { + return true + } + seen.add(shim) + } + + return Promise.all([ + ...shims, + absFrom, + ].map(f => lstat(f).catch(throwNonEnoent))).then((stats) => { + const [, , , stFrom] = stats + if (!stFrom) { + return SKIP + } + + if (force) { + return false + } + + return Promise.all(shims.map((s, i) => [s, stats[i]]).map(([s, st]) => { + if (!st) { + return false + } + return readCmdShim(s) + .then(target => { + target = resolve(dirname(to), target) + if (target.indexOf(resolve(path)) !== 0) { + return failEEXIST({ from, to, path }) + } + return false + }, er => handleReadCmdShimError({ er, from, to })) + })) + }) + .then(skip => skip !== SKIP && doShim(absFrom, to)) +} + +const doShim = (absFrom, to) => + cmdShim(absFrom, to).then(() => fixBin(absFrom)) + +const resetSeen = () => { + for (const p of seen) { + seen.delete(p) + } +} + +module.exports = Object.assign(shimBin, { resetSeen }) diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/brace-expansion/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/brace-expansion/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..de3226673c3874b1c6506db022393c753495655c --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/brace-expansion/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2013 Julian Gruber + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/brace-expansion/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/brace-expansion/index.js new file mode 100644 index 0000000000000000000000000000000000000000..254ca75dd9aba247cd912f1f19487308d5712e87 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/brace-expansion/index.js @@ -0,0 +1,203 @@ +var balanced = require('balanced-match'); + +module.exports = expandTop; + +var escSlash = '\0SLASH'+Math.random()+'\0'; +var escOpen = '\0OPEN'+Math.random()+'\0'; +var escClose = '\0CLOSE'+Math.random()+'\0'; +var escComma = '\0COMMA'+Math.random()+'\0'; +var escPeriod = '\0PERIOD'+Math.random()+'\0'; + +function numeric(str) { + return parseInt(str, 10) == str + ? parseInt(str, 10) + : str.charCodeAt(0); +} + +function escapeBraces(str) { + return str.split('\\\\').join(escSlash) + .split('\\{').join(escOpen) + .split('\\}').join(escClose) + .split('\\,').join(escComma) + .split('\\.').join(escPeriod); +} + +function unescapeBraces(str) { + return str.split(escSlash).join('\\') + .split(escOpen).join('{') + .split(escClose).join('}') + .split(escComma).join(',') + .split(escPeriod).join('.'); +} + + +// Basically just str.split(","), but handling cases +// where we have nested braced sections, which should be +// treated as individual members, like {a,{b,c},d} +function parseCommaParts(str) { + if (!str) + return ['']; + + var parts = []; + var m = balanced('{', '}', str); + + if (!m) + return str.split(','); + + var pre = m.pre; + var body = m.body; + var post = m.post; + var p = pre.split(','); + + p[p.length-1] += '{' + body + '}'; + var postParts = parseCommaParts(post); + if (post.length) { + p[p.length-1] += postParts.shift(); + p.push.apply(p, postParts); + } + + parts.push.apply(parts, p); + + return parts; +} + +function expandTop(str) { + if (!str) + return []; + + // I don't know why Bash 4.3 does this, but it does. + // Anything starting with {} will have the first two bytes preserved + // but *only* at the top level, so {},a}b will not expand to anything, + // but a{},b}c will be expanded to [a}c,abc]. + // One could argue that this is a bug in Bash, but since the goal of + // this module is to match Bash's rules, we escape a leading {} + if (str.substr(0, 2) === '{}') { + str = '\\{\\}' + str.substr(2); + } + + return expand(escapeBraces(str), true).map(unescapeBraces); +} + +function embrace(str) { + return '{' + str + '}'; +} +function isPadded(el) { + return /^-?0\d/.test(el); +} + +function lte(i, y) { + return i <= y; +} +function gte(i, y) { + return i >= y; +} + +function expand(str, isTop) { + var expansions = []; + + var m = balanced('{', '}', str); + if (!m) return [str]; + + // no need to expand pre, since it is guaranteed to be free of brace-sets + var pre = m.pre; + var post = m.post.length + ? expand(m.post, false) + : ['']; + + if (/\$$/.test(m.pre)) { + for (var k = 0; k < post.length; k++) { + var expansion = pre+ '{' + m.body + '}' + post[k]; + expansions.push(expansion); + } + } else { + var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); + var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); + var isSequence = isNumericSequence || isAlphaSequence; + var isOptions = m.body.indexOf(',') >= 0; + if (!isSequence && !isOptions) { + // {a},b} + if (m.post.match(/,(?!,).*\}/)) { + str = m.pre + '{' + m.body + escClose + m.post; + return expand(str); + } + return [str]; + } + + var n; + if (isSequence) { + n = m.body.split(/\.\./); + } else { + n = parseCommaParts(m.body); + if (n.length === 1) { + // x{{a,b}}y ==> x{a}y x{b}y + n = expand(n[0], false).map(embrace); + if (n.length === 1) { + return post.map(function(p) { + return m.pre + n[0] + p; + }); + } + } + } + + // at this point, n is the parts, and we know it's not a comma set + // with a single entry. + var N; + + if (isSequence) { + var x = numeric(n[0]); + var y = numeric(n[1]); + var width = Math.max(n[0].length, n[1].length) + var incr = n.length == 3 + ? Math.abs(numeric(n[2])) + : 1; + var test = lte; + var reverse = y < x; + if (reverse) { + incr *= -1; + test = gte; + } + var pad = n.some(isPadded); + + N = []; + + for (var i = x; test(i, y); i += incr) { + var c; + if (isAlphaSequence) { + c = String.fromCharCode(i); + if (c === '\\') + c = ''; + } else { + c = String(i); + if (pad) { + var need = width - c.length; + if (need > 0) { + var z = new Array(need + 1).join('0'); + if (i < 0) + c = '-' + z + c.slice(1); + else + c = z + c; + } + } + } + N.push(c); + } + } else { + N = []; + + for (var j = 0; j < n.length; j++) { + N.push.apply(N, expand(n[j], false)); + } + } + + for (var j = 0; j < N.length; j++) { + for (var k = 0; k < post.length; k++) { + var expansion = pre + N[j] + post[k]; + if (!isTop || isSequence || expansion) + expansions.push(expansion); + } + } + } + + return expansions; +} + diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/brace-expansion/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/brace-expansion/package.json new file mode 100644 index 0000000000000000000000000000000000000000..c7eee34511002aea1aab5be76c1bd79fe7ded291 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/brace-expansion/package.json @@ -0,0 +1,49 @@ +{ + "name": "brace-expansion", + "description": "Brace expansion as known from sh/bash", + "version": "2.0.2", + "repository": { + "type": "git", + "url": "git://github.com/juliangruber/brace-expansion.git" + }, + "homepage": "https://github.com/juliangruber/brace-expansion", + "main": "index.js", + "scripts": { + "test": "tape test/*.js", + "gentest": "bash test/generate.sh", + "bench": "matcha test/perf/bench.js" + }, + "dependencies": { + "balanced-match": "^1.0.0" + }, + "devDependencies": { + "@c4312/matcha": "^1.3.1", + "tape": "^4.6.0" + }, + "keywords": [], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT", + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/8..latest", + "firefox/20..latest", + "firefox/nightly", + "chrome/25..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + }, + "publishConfig": { + "tag": "2.x" + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/chalk/source/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/chalk/source/index.js new file mode 100644 index 0000000000000000000000000000000000000000..8bc993da5d6229aff2851fa01996e6164e90e10c --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/chalk/source/index.js @@ -0,0 +1,225 @@ +import ansiStyles from '#ansi-styles'; +import supportsColor from '#supports-color'; +import { // eslint-disable-line import/order + stringReplaceAll, + stringEncaseCRLFWithFirstIndex, +} from './utilities.js'; + +const {stdout: stdoutColor, stderr: stderrColor} = supportsColor; + +const GENERATOR = Symbol('GENERATOR'); +const STYLER = Symbol('STYLER'); +const IS_EMPTY = Symbol('IS_EMPTY'); + +// `supportsColor.level` → `ansiStyles.color[name]` mapping +const levelMapping = [ + 'ansi', + 'ansi', + 'ansi256', + 'ansi16m', +]; + +const styles = Object.create(null); + +const applyOptions = (object, options = {}) => { + if (options.level && !(Number.isInteger(options.level) && options.level >= 0 && options.level <= 3)) { + throw new Error('The `level` option should be an integer from 0 to 3'); + } + + // Detect level if not set manually + const colorLevel = stdoutColor ? stdoutColor.level : 0; + object.level = options.level === undefined ? colorLevel : options.level; +}; + +export class Chalk { + constructor(options) { + // eslint-disable-next-line no-constructor-return + return chalkFactory(options); + } +} + +const chalkFactory = options => { + const chalk = (...strings) => strings.join(' '); + applyOptions(chalk, options); + + Object.setPrototypeOf(chalk, createChalk.prototype); + + return chalk; +}; + +function createChalk(options) { + return chalkFactory(options); +} + +Object.setPrototypeOf(createChalk.prototype, Function.prototype); + +for (const [styleName, style] of Object.entries(ansiStyles)) { + styles[styleName] = { + get() { + const builder = createBuilder(this, createStyler(style.open, style.close, this[STYLER]), this[IS_EMPTY]); + Object.defineProperty(this, styleName, {value: builder}); + return builder; + }, + }; +} + +styles.visible = { + get() { + const builder = createBuilder(this, this[STYLER], true); + Object.defineProperty(this, 'visible', {value: builder}); + return builder; + }, +}; + +const getModelAnsi = (model, level, type, ...arguments_) => { + if (model === 'rgb') { + if (level === 'ansi16m') { + return ansiStyles[type].ansi16m(...arguments_); + } + + if (level === 'ansi256') { + return ansiStyles[type].ansi256(ansiStyles.rgbToAnsi256(...arguments_)); + } + + return ansiStyles[type].ansi(ansiStyles.rgbToAnsi(...arguments_)); + } + + if (model === 'hex') { + return getModelAnsi('rgb', level, type, ...ansiStyles.hexToRgb(...arguments_)); + } + + return ansiStyles[type][model](...arguments_); +}; + +const usedModels = ['rgb', 'hex', 'ansi256']; + +for (const model of usedModels) { + styles[model] = { + get() { + const {level} = this; + return function (...arguments_) { + const styler = createStyler(getModelAnsi(model, levelMapping[level], 'color', ...arguments_), ansiStyles.color.close, this[STYLER]); + return createBuilder(this, styler, this[IS_EMPTY]); + }; + }, + }; + + const bgModel = 'bg' + model[0].toUpperCase() + model.slice(1); + styles[bgModel] = { + get() { + const {level} = this; + return function (...arguments_) { + const styler = createStyler(getModelAnsi(model, levelMapping[level], 'bgColor', ...arguments_), ansiStyles.bgColor.close, this[STYLER]); + return createBuilder(this, styler, this[IS_EMPTY]); + }; + }, + }; +} + +const proto = Object.defineProperties(() => {}, { + ...styles, + level: { + enumerable: true, + get() { + return this[GENERATOR].level; + }, + set(level) { + this[GENERATOR].level = level; + }, + }, +}); + +const createStyler = (open, close, parent) => { + let openAll; + let closeAll; + if (parent === undefined) { + openAll = open; + closeAll = close; + } else { + openAll = parent.openAll + open; + closeAll = close + parent.closeAll; + } + + return { + open, + close, + openAll, + closeAll, + parent, + }; +}; + +const createBuilder = (self, _styler, _isEmpty) => { + // Single argument is hot path, implicit coercion is faster than anything + // eslint-disable-next-line no-implicit-coercion + const builder = (...arguments_) => applyStyle(builder, (arguments_.length === 1) ? ('' + arguments_[0]) : arguments_.join(' ')); + + // We alter the prototype because we must return a function, but there is + // no way to create a function with a different prototype + Object.setPrototypeOf(builder, proto); + + builder[GENERATOR] = self; + builder[STYLER] = _styler; + builder[IS_EMPTY] = _isEmpty; + + return builder; +}; + +const applyStyle = (self, string) => { + if (self.level <= 0 || !string) { + return self[IS_EMPTY] ? '' : string; + } + + let styler = self[STYLER]; + + if (styler === undefined) { + return string; + } + + const {openAll, closeAll} = styler; + if (string.includes('\u001B')) { + while (styler !== undefined) { + // Replace any instances already present with a re-opening code + // otherwise only the part of the string until said closing code + // will be colored, and the rest will simply be 'plain'. + string = stringReplaceAll(string, styler.close, styler.open); + + styler = styler.parent; + } + } + + // We can move both next actions out of loop, because remaining actions in loop won't have + // any/visible effect on parts we add here. Close the styling before a linebreak and reopen + // after next line to fix a bleed issue on macOS: https://github.com/chalk/chalk/pull/92 + const lfIndex = string.indexOf('\n'); + if (lfIndex !== -1) { + string = stringEncaseCRLFWithFirstIndex(string, closeAll, openAll, lfIndex); + } + + return openAll + string + closeAll; +}; + +Object.defineProperties(createChalk.prototype, styles); + +const chalk = createChalk(); +export const chalkStderr = createChalk({level: stderrColor ? stderrColor.level : 0}); + +export { + modifierNames, + foregroundColorNames, + backgroundColorNames, + colorNames, + + // TODO: Remove these aliases in the next major version + modifierNames as modifiers, + foregroundColorNames as foregroundColors, + backgroundColorNames as backgroundColors, + colorNames as colors, +} from './vendor/ansi-styles/index.js'; + +export { + stdoutColor as supportsColor, + stderrColor as supportsColorStderr, +}; + +export default chalk; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/chalk/source/utilities.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/chalk/source/utilities.js new file mode 100644 index 0000000000000000000000000000000000000000..4366dee0d84d72d21b30f3f3fd74c279cb28a102 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/chalk/source/utilities.js @@ -0,0 +1,33 @@ +// TODO: When targeting Node.js 16, use `String.prototype.replaceAll`. +export function stringReplaceAll(string, substring, replacer) { + let index = string.indexOf(substring); + if (index === -1) { + return string; + } + + const substringLength = substring.length; + let endIndex = 0; + let returnValue = ''; + do { + returnValue += string.slice(endIndex, index) + substring + replacer; + endIndex = index + substringLength; + index = string.indexOf(substring, endIndex); + } while (index !== -1); + + returnValue += string.slice(endIndex); + return returnValue; +} + +export function stringEncaseCRLFWithFirstIndex(string, prefix, postfix, index) { + let endIndex = 0; + let returnValue = ''; + do { + const gotCR = string[index - 1] === '\r'; + returnValue += string.slice(endIndex, (gotCR ? index - 1 : index)) + prefix + (gotCR ? '\r\n' : '\n') + postfix; + endIndex = index + 1; + index = string.indexOf('\n', endIndex); + } while (index !== -1); + + returnValue += string.slice(endIndex); + return returnValue; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/chalk/source/vendor/ansi-styles/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/chalk/source/vendor/ansi-styles/index.js new file mode 100644 index 0000000000000000000000000000000000000000..eaa7bed6cb1ed94a4eab0bf2ef6b61d69057998a --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/chalk/source/vendor/ansi-styles/index.js @@ -0,0 +1,223 @@ +const ANSI_BACKGROUND_OFFSET = 10; + +const wrapAnsi16 = (offset = 0) => code => `\u001B[${code + offset}m`; + +const wrapAnsi256 = (offset = 0) => code => `\u001B[${38 + offset};5;${code}m`; + +const wrapAnsi16m = (offset = 0) => (red, green, blue) => `\u001B[${38 + offset};2;${red};${green};${blue}m`; + +const styles = { + modifier: { + reset: [0, 0], + // 21 isn't widely supported and 22 does the same thing + bold: [1, 22], + dim: [2, 22], + italic: [3, 23], + underline: [4, 24], + overline: [53, 55], + inverse: [7, 27], + hidden: [8, 28], + strikethrough: [9, 29], + }, + color: { + black: [30, 39], + red: [31, 39], + green: [32, 39], + yellow: [33, 39], + blue: [34, 39], + magenta: [35, 39], + cyan: [36, 39], + white: [37, 39], + + // Bright color + blackBright: [90, 39], + gray: [90, 39], // Alias of `blackBright` + grey: [90, 39], // Alias of `blackBright` + redBright: [91, 39], + greenBright: [92, 39], + yellowBright: [93, 39], + blueBright: [94, 39], + magentaBright: [95, 39], + cyanBright: [96, 39], + whiteBright: [97, 39], + }, + bgColor: { + bgBlack: [40, 49], + bgRed: [41, 49], + bgGreen: [42, 49], + bgYellow: [43, 49], + bgBlue: [44, 49], + bgMagenta: [45, 49], + bgCyan: [46, 49], + bgWhite: [47, 49], + + // Bright color + bgBlackBright: [100, 49], + bgGray: [100, 49], // Alias of `bgBlackBright` + bgGrey: [100, 49], // Alias of `bgBlackBright` + bgRedBright: [101, 49], + bgGreenBright: [102, 49], + bgYellowBright: [103, 49], + bgBlueBright: [104, 49], + bgMagentaBright: [105, 49], + bgCyanBright: [106, 49], + bgWhiteBright: [107, 49], + }, +}; + +export const modifierNames = Object.keys(styles.modifier); +export const foregroundColorNames = Object.keys(styles.color); +export const backgroundColorNames = Object.keys(styles.bgColor); +export const colorNames = [...foregroundColorNames, ...backgroundColorNames]; + +function assembleStyles() { + const codes = new Map(); + + for (const [groupName, group] of Object.entries(styles)) { + for (const [styleName, style] of Object.entries(group)) { + styles[styleName] = { + open: `\u001B[${style[0]}m`, + close: `\u001B[${style[1]}m`, + }; + + group[styleName] = styles[styleName]; + + codes.set(style[0], style[1]); + } + + Object.defineProperty(styles, groupName, { + value: group, + enumerable: false, + }); + } + + Object.defineProperty(styles, 'codes', { + value: codes, + enumerable: false, + }); + + styles.color.close = '\u001B[39m'; + styles.bgColor.close = '\u001B[49m'; + + styles.color.ansi = wrapAnsi16(); + styles.color.ansi256 = wrapAnsi256(); + styles.color.ansi16m = wrapAnsi16m(); + styles.bgColor.ansi = wrapAnsi16(ANSI_BACKGROUND_OFFSET); + styles.bgColor.ansi256 = wrapAnsi256(ANSI_BACKGROUND_OFFSET); + styles.bgColor.ansi16m = wrapAnsi16m(ANSI_BACKGROUND_OFFSET); + + // From https://github.com/Qix-/color-convert/blob/3f0e0d4e92e235796ccb17f6e85c72094a651f49/conversions.js + Object.defineProperties(styles, { + rgbToAnsi256: { + value(red, green, blue) { + // We use the extended greyscale palette here, with the exception of + // black and white. normal palette only has 4 greyscale shades. + if (red === green && green === blue) { + if (red < 8) { + return 16; + } + + if (red > 248) { + return 231; + } + + return Math.round(((red - 8) / 247) * 24) + 232; + } + + return 16 + + (36 * Math.round(red / 255 * 5)) + + (6 * Math.round(green / 255 * 5)) + + Math.round(blue / 255 * 5); + }, + enumerable: false, + }, + hexToRgb: { + value(hex) { + const matches = /[a-f\d]{6}|[a-f\d]{3}/i.exec(hex.toString(16)); + if (!matches) { + return [0, 0, 0]; + } + + let [colorString] = matches; + + if (colorString.length === 3) { + colorString = [...colorString].map(character => character + character).join(''); + } + + const integer = Number.parseInt(colorString, 16); + + return [ + /* eslint-disable no-bitwise */ + (integer >> 16) & 0xFF, + (integer >> 8) & 0xFF, + integer & 0xFF, + /* eslint-enable no-bitwise */ + ]; + }, + enumerable: false, + }, + hexToAnsi256: { + value: hex => styles.rgbToAnsi256(...styles.hexToRgb(hex)), + enumerable: false, + }, + ansi256ToAnsi: { + value(code) { + if (code < 8) { + return 30 + code; + } + + if (code < 16) { + return 90 + (code - 8); + } + + let red; + let green; + let blue; + + if (code >= 232) { + red = (((code - 232) * 10) + 8) / 255; + green = red; + blue = red; + } else { + code -= 16; + + const remainder = code % 36; + + red = Math.floor(code / 36) / 5; + green = Math.floor(remainder / 6) / 5; + blue = (remainder % 6) / 5; + } + + const value = Math.max(red, green, blue) * 2; + + if (value === 0) { + return 30; + } + + // eslint-disable-next-line no-bitwise + let result = 30 + ((Math.round(blue) << 2) | (Math.round(green) << 1) | Math.round(red)); + + if (value === 2) { + result += 60; + } + + return result; + }, + enumerable: false, + }, + rgbToAnsi: { + value: (red, green, blue) => styles.ansi256ToAnsi(styles.rgbToAnsi256(red, green, blue)), + enumerable: false, + }, + hexToAnsi: { + value: hex => styles.ansi256ToAnsi(styles.hexToAnsi256(hex)), + enumerable: false, + }, + }); + + return styles; +} + +const ansiStyles = assembleStyles(); + +export default ansiStyles; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/chalk/source/vendor/supports-color/browser.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/chalk/source/vendor/supports-color/browser.js new file mode 100644 index 0000000000000000000000000000000000000000..fbb6ce0fc9ab91d74c8535a59405081d3240fd24 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/chalk/source/vendor/supports-color/browser.js @@ -0,0 +1,34 @@ +/* eslint-env browser */ + +const level = (() => { + if (!('navigator' in globalThis)) { + return 0; + } + + if (globalThis.navigator.userAgentData) { + const brand = navigator.userAgentData.brands.find(({brand}) => brand === 'Chromium'); + if (brand && brand.version > 93) { + return 3; + } + } + + if (/\b(Chrome|Chromium)\//.test(globalThis.navigator.userAgent)) { + return 1; + } + + return 0; +})(); + +const colorSupport = level !== 0 && { + level, + hasBasic: true, + has256: level >= 2, + has16m: level >= 3, +}; + +const supportsColor = { + stdout: colorSupport, + stderr: colorSupport, +}; + +export default supportsColor; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/chalk/source/vendor/supports-color/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/chalk/source/vendor/supports-color/index.js new file mode 100644 index 0000000000000000000000000000000000000000..265d7f85819536dd53c247881d367ef474c3ab8f --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/chalk/source/vendor/supports-color/index.js @@ -0,0 +1,190 @@ +import process from 'node:process'; +import os from 'node:os'; +import tty from 'node:tty'; + +// From: https://github.com/sindresorhus/has-flag/blob/main/index.js +/// function hasFlag(flag, argv = globalThis.Deno?.args ?? process.argv) { +function hasFlag(flag, argv = globalThis.Deno ? globalThis.Deno.args : process.argv) { + const prefix = flag.startsWith('-') ? '' : (flag.length === 1 ? '-' : '--'); + const position = argv.indexOf(prefix + flag); + const terminatorPosition = argv.indexOf('--'); + return position !== -1 && (terminatorPosition === -1 || position < terminatorPosition); +} + +const {env} = process; + +let flagForceColor; +if ( + hasFlag('no-color') + || hasFlag('no-colors') + || hasFlag('color=false') + || hasFlag('color=never') +) { + flagForceColor = 0; +} else if ( + hasFlag('color') + || hasFlag('colors') + || hasFlag('color=true') + || hasFlag('color=always') +) { + flagForceColor = 1; +} + +function envForceColor() { + if ('FORCE_COLOR' in env) { + if (env.FORCE_COLOR === 'true') { + return 1; + } + + if (env.FORCE_COLOR === 'false') { + return 0; + } + + return env.FORCE_COLOR.length === 0 ? 1 : Math.min(Number.parseInt(env.FORCE_COLOR, 10), 3); + } +} + +function translateLevel(level) { + if (level === 0) { + return false; + } + + return { + level, + hasBasic: true, + has256: level >= 2, + has16m: level >= 3, + }; +} + +function _supportsColor(haveStream, {streamIsTTY, sniffFlags = true} = {}) { + const noFlagForceColor = envForceColor(); + if (noFlagForceColor !== undefined) { + flagForceColor = noFlagForceColor; + } + + const forceColor = sniffFlags ? flagForceColor : noFlagForceColor; + + if (forceColor === 0) { + return 0; + } + + if (sniffFlags) { + if (hasFlag('color=16m') + || hasFlag('color=full') + || hasFlag('color=truecolor')) { + return 3; + } + + if (hasFlag('color=256')) { + return 2; + } + } + + // Check for Azure DevOps pipelines. + // Has to be above the `!streamIsTTY` check. + if ('TF_BUILD' in env && 'AGENT_NAME' in env) { + return 1; + } + + if (haveStream && !streamIsTTY && forceColor === undefined) { + return 0; + } + + const min = forceColor || 0; + + if (env.TERM === 'dumb') { + return min; + } + + if (process.platform === 'win32') { + // Windows 10 build 10586 is the first Windows release that supports 256 colors. + // Windows 10 build 14931 is the first release that supports 16m/TrueColor. + const osRelease = os.release().split('.'); + if ( + Number(osRelease[0]) >= 10 + && Number(osRelease[2]) >= 10_586 + ) { + return Number(osRelease[2]) >= 14_931 ? 3 : 2; + } + + return 1; + } + + if ('CI' in env) { + if (['GITHUB_ACTIONS', 'GITEA_ACTIONS', 'CIRCLECI'].some(key => key in env)) { + return 3; + } + + if (['TRAVIS', 'APPVEYOR', 'GITLAB_CI', 'BUILDKITE', 'DRONE'].some(sign => sign in env) || env.CI_NAME === 'codeship') { + return 1; + } + + return min; + } + + if ('TEAMCITY_VERSION' in env) { + return /^(9\.(0*[1-9]\d*)\.|\d{2,}\.)/.test(env.TEAMCITY_VERSION) ? 1 : 0; + } + + if (env.COLORTERM === 'truecolor') { + return 3; + } + + if (env.TERM === 'xterm-kitty') { + return 3; + } + + if (env.TERM === 'xterm-ghostty') { + return 3; + } + + if (env.TERM === 'wezterm') { + return 3; + } + + if ('TERM_PROGRAM' in env) { + const version = Number.parseInt((env.TERM_PROGRAM_VERSION || '').split('.')[0], 10); + + switch (env.TERM_PROGRAM) { + case 'iTerm.app': { + return version >= 3 ? 3 : 2; + } + + case 'Apple_Terminal': { + return 2; + } + // No default + } + } + + if (/-256(color)?$/i.test(env.TERM)) { + return 2; + } + + if (/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(env.TERM)) { + return 1; + } + + if ('COLORTERM' in env) { + return 1; + } + + return min; +} + +export function createSupportsColor(stream, options = {}) { + const level = _supportsColor(stream, { + streamIsTTY: stream && stream.isTTY, + ...options, + }); + + return translateLevel(level); +} + +const supportsColor = { + stdout: createSupportsColor({isTTY: tty.isatty(1)}), + stderr: createSupportsColor({isTTY: tty.isatty(2)}), +}; + +export default supportsColor; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/chownr/dist/commonjs/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/chownr/dist/commonjs/index.js new file mode 100644 index 0000000000000000000000000000000000000000..6a7b68d5eac26eb3b0f52edfccd209d1bf570ee4 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/chownr/dist/commonjs/index.js @@ -0,0 +1,93 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.chownrSync = exports.chownr = void 0; +const node_fs_1 = __importDefault(require("node:fs")); +const node_path_1 = __importDefault(require("node:path")); +const lchownSync = (path, uid, gid) => { + try { + return node_fs_1.default.lchownSync(path, uid, gid); + } + catch (er) { + if (er?.code !== 'ENOENT') + throw er; + } +}; +const chown = (cpath, uid, gid, cb) => { + node_fs_1.default.lchown(cpath, uid, gid, er => { + // Skip ENOENT error + cb(er && er?.code !== 'ENOENT' ? er : null); + }); +}; +const chownrKid = (p, child, uid, gid, cb) => { + if (child.isDirectory()) { + (0, exports.chownr)(node_path_1.default.resolve(p, child.name), uid, gid, (er) => { + if (er) + return cb(er); + const cpath = node_path_1.default.resolve(p, child.name); + chown(cpath, uid, gid, cb); + }); + } + else { + const cpath = node_path_1.default.resolve(p, child.name); + chown(cpath, uid, gid, cb); + } +}; +const chownr = (p, uid, gid, cb) => { + node_fs_1.default.readdir(p, { withFileTypes: true }, (er, children) => { + // any error other than ENOTDIR or ENOTSUP means it's not readable, + // or doesn't exist. give up. + if (er) { + if (er.code === 'ENOENT') + return cb(); + else if (er.code !== 'ENOTDIR' && er.code !== 'ENOTSUP') + return cb(er); + } + if (er || !children.length) + return chown(p, uid, gid, cb); + let len = children.length; + let errState = null; + const then = (er) => { + /* c8 ignore start */ + if (errState) + return; + /* c8 ignore stop */ + if (er) + return cb((errState = er)); + if (--len === 0) + return chown(p, uid, gid, cb); + }; + for (const child of children) { + chownrKid(p, child, uid, gid, then); + } + }); +}; +exports.chownr = chownr; +const chownrKidSync = (p, child, uid, gid) => { + if (child.isDirectory()) + (0, exports.chownrSync)(node_path_1.default.resolve(p, child.name), uid, gid); + lchownSync(node_path_1.default.resolve(p, child.name), uid, gid); +}; +const chownrSync = (p, uid, gid) => { + let children; + try { + children = node_fs_1.default.readdirSync(p, { withFileTypes: true }); + } + catch (er) { + const e = er; + if (e?.code === 'ENOENT') + return; + else if (e?.code === 'ENOTDIR' || e?.code === 'ENOTSUP') + return lchownSync(p, uid, gid); + else + throw e; + } + for (const child of children) { + chownrKidSync(p, child, uid, gid); + } + return lchownSync(p, uid, gid); +}; +exports.chownrSync = chownrSync; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/chownr/dist/commonjs/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/chownr/dist/commonjs/package.json new file mode 100644 index 0000000000000000000000000000000000000000..5bbefffbabee392d1855491b84dc0a716b6a3bf2 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/chownr/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/chownr/dist/esm/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/chownr/dist/esm/index.js new file mode 100644 index 0000000000000000000000000000000000000000..5c2815297a67cb9db466acb63798dd0b4c4bd6d2 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/chownr/dist/esm/index.js @@ -0,0 +1,85 @@ +import fs from 'node:fs'; +import path from 'node:path'; +const lchownSync = (path, uid, gid) => { + try { + return fs.lchownSync(path, uid, gid); + } + catch (er) { + if (er?.code !== 'ENOENT') + throw er; + } +}; +const chown = (cpath, uid, gid, cb) => { + fs.lchown(cpath, uid, gid, er => { + // Skip ENOENT error + cb(er && er?.code !== 'ENOENT' ? er : null); + }); +}; +const chownrKid = (p, child, uid, gid, cb) => { + if (child.isDirectory()) { + chownr(path.resolve(p, child.name), uid, gid, (er) => { + if (er) + return cb(er); + const cpath = path.resolve(p, child.name); + chown(cpath, uid, gid, cb); + }); + } + else { + const cpath = path.resolve(p, child.name); + chown(cpath, uid, gid, cb); + } +}; +export const chownr = (p, uid, gid, cb) => { + fs.readdir(p, { withFileTypes: true }, (er, children) => { + // any error other than ENOTDIR or ENOTSUP means it's not readable, + // or doesn't exist. give up. + if (er) { + if (er.code === 'ENOENT') + return cb(); + else if (er.code !== 'ENOTDIR' && er.code !== 'ENOTSUP') + return cb(er); + } + if (er || !children.length) + return chown(p, uid, gid, cb); + let len = children.length; + let errState = null; + const then = (er) => { + /* c8 ignore start */ + if (errState) + return; + /* c8 ignore stop */ + if (er) + return cb((errState = er)); + if (--len === 0) + return chown(p, uid, gid, cb); + }; + for (const child of children) { + chownrKid(p, child, uid, gid, then); + } + }); +}; +const chownrKidSync = (p, child, uid, gid) => { + if (child.isDirectory()) + chownrSync(path.resolve(p, child.name), uid, gid); + lchownSync(path.resolve(p, child.name), uid, gid); +}; +export const chownrSync = (p, uid, gid) => { + let children; + try { + children = fs.readdirSync(p, { withFileTypes: true }); + } + catch (er) { + const e = er; + if (e?.code === 'ENOENT') + return; + else if (e?.code === 'ENOTDIR' || e?.code === 'ENOTSUP') + return lchownSync(p, uid, gid); + else + throw e; + } + for (const child of children) { + chownrKidSync(p, child, uid, gid); + } + return lchownSync(p, uid, gid); +}; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/chownr/dist/esm/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/chownr/dist/esm/package.json new file mode 100644 index 0000000000000000000000000000000000000000..3dbc1ca591c0557e35b6004aeba250e6a70b56e3 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/chownr/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cidr-regex/dist/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cidr-regex/dist/index.js new file mode 100644 index 0000000000000000000000000000000000000000..2817f65eeb3cb8673eac2cd3329d86805c1b5e58 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cidr-regex/dist/index.js @@ -0,0 +1,15 @@ +import ipRegex from "ip-regex"; +const defaultOpts = { exact: false }; +const v4str = `${ipRegex.v4().source}\\/(3[0-2]|[12]?[0-9])`; +const v6str = `${ipRegex.v6().source}\\/(12[0-8]|1[01][0-9]|[1-9]?[0-9])`; +const v4exact = new RegExp(`^${v4str}$`); +const v6exact = new RegExp(`^${v6str}$`); +const v46exact = new RegExp(`(?:^${v4str}$)|(?:^${v6str}$)`); +const cidrRegex = ({ exact } = defaultOpts) => exact ? v46exact : new RegExp(`(?:${v4str})|(?:${v6str})`, "g"); +const v4 = cidrRegex.v4 = ({ exact } = defaultOpts) => exact ? v4exact : new RegExp(v4str, "g"); +const v6 = cidrRegex.v6 = ({ exact } = defaultOpts) => exact ? v6exact : new RegExp(v6str, "g"); +export { + cidrRegex as default, + v4, + v6 +}; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cli-columns/color.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cli-columns/color.js new file mode 100644 index 0000000000000000000000000000000000000000..11027047f24fe6b5731aba11c87d4326db6479a4 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cli-columns/color.js @@ -0,0 +1,16 @@ +const chalk = require('chalk'); +const columns = require('.'); + +// prettier-ignore +const values = [ + 'blue' + chalk.bgBlue('berry'), + '笔菠萝' + chalk.yellow('苹果笔'), + chalk.red('apple'), 'pomegranate', + 'durian', chalk.green('star fruit'), + 'パイナップル', 'apricot', 'banana', + 'pineapple', chalk.bgRed.yellow('orange') +]; + +console.log(''); +console.log(columns(values)); +console.log(''); diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cli-columns/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cli-columns/index.js new file mode 100644 index 0000000000000000000000000000000000000000..1090aa21c2bdf2c84ba50f84d40267c775ce562a --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cli-columns/index.js @@ -0,0 +1,82 @@ +'use strict'; + +const stringWidth = require('string-width'); +const stripAnsi = require('strip-ansi'); + +const concat = Array.prototype.concat; +const defaults = { + character: ' ', + newline: '\n', + padding: 2, + sort: true, + width: 0, +}; + +function byPlainText(a, b) { + const plainA = stripAnsi(a); + const plainB = stripAnsi(b); + + if (plainA === plainB) { + return 0; + } + + if (plainA > plainB) { + return 1; + } + + return -1; +} + +function makeArray() { + return []; +} + +function makeList(count) { + return Array.apply(null, Array(count)); +} + +function padCell(fullWidth, character, value) { + const valueWidth = stringWidth(value); + const filler = makeList(fullWidth - valueWidth + 1); + + return value + filler.join(character); +} + +function toRows(rows, cell, i) { + rows[i % rows.length].push(cell); + + return rows; +} + +function toString(arr) { + return arr.join(''); +} + +function columns(values, options) { + values = concat.apply([], values); + options = Object.assign({}, defaults, options); + + let cells = values.filter(Boolean).map(String); + + if (options.sort !== false) { + cells = cells.sort(byPlainText); + } + + const termWidth = options.width || process.stdout.columns; + const cellWidth = + Math.max.apply(null, cells.map(stringWidth)) + options.padding; + const columnCount = Math.floor(termWidth / cellWidth) || 1; + const rowCount = Math.ceil(cells.length / columnCount) || 1; + + if (columnCount === 1) { + return cells.join(options.newline); + } + + return cells + .map(padCell.bind(null, cellWidth, options.character)) + .reduce(toRows, makeList(rowCount).map(makeArray)) + .map(toString) + .join(options.newline); +} + +module.exports = columns; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cli-columns/license b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cli-columns/license new file mode 100644 index 0000000000000000000000000000000000000000..67147a987ea19bdbba017b356b52e9a755a63dc2 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cli-columns/license @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) Shannon Moeller (shannonmoeller.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cli-columns/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cli-columns/package.json new file mode 100644 index 0000000000000000000000000000000000000000..129f2c1316d2f076c541f8a98421b52367847ce0 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cli-columns/package.json @@ -0,0 +1,54 @@ +{ + "name": "cli-columns", + "version": "4.0.0", + "description": "Columnated lists for the CLI.", + "scripts": { + "lint": "npx eslint --fix '*.js' && npx prettier --write '*.js'", + "test": "node test.js && node color.js" + }, + "keywords": [ + "ansi", + "cli", + "column", + "columnate", + "columns", + "grid", + "list", + "log", + "ls", + "row", + "rows", + "unicode", + "unix" + ], + "author": "Shannon Moeller (http://shannonmoeller.com)", + "homepage": "https://github.com/shannonmoeller/cli-columns#readme", + "repository": "shannonmoeller/cli-columns", + "license": "MIT", + "main": "index.js", + "files": [ + "*.js" + ], + "dependencies": { + "string-width": "^4.2.3", + "strip-ansi": "^6.0.1" + }, + "devDependencies": { + "chalk": "^4.1.2" + }, + "engines": { + "node": ">= 10" + }, + "eslintConfig": { + "extends": "eslint:recommended", + "env": { + "node": true + }, + "parserOptions": { + "ecmaVersion": 8 + } + }, + "prettier": { + "singleQuote": true + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cli-columns/test.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cli-columns/test.js new file mode 100644 index 0000000000000000000000000000000000000000..4d95e7cf073230be3d44a84e7caa987ad85a607b --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cli-columns/test.js @@ -0,0 +1,101 @@ +'use strict'; + +const assert = require('assert'); +const chalk = require('chalk'); +const stripAnsi = require('strip-ansi'); +const columns = require('./index.js'); +const tests = []; + +function test(msg, fn) { + tests.push([msg, fn]); +} + +process.nextTick(async function run() { + for (const [msg, fn] of tests) { + try { + await fn(assert); + console.log(`pass - ${msg}`); + } catch (error) { + console.error(`fail - ${msg}`, error); + process.exit(1); + } + } +}); + +// prettier-ignore +test('should print one column list', t => { + const cols = columns(['foo', ['bar', 'baz'], ['bar', 'qux']], { + width: 1 + }); + + const expected = + 'bar\n' + + 'bar\n' + + 'baz\n' + + 'foo\n' + + 'qux'; + + t.equal(cols, expected); +}); + +// prettier-ignore +test('should print three column list', t => { + const cols = columns(['foo', ['bar', 'baz'], ['bat', 'qux']], { + width: 16 + }); + + const expected = + 'bar baz qux \n' + + 'bat foo '; + + t.equal(cols, expected); +}); + +// prettier-ignore +test('should print complex list', t => { + const cols = columns( + [ + 'foo', 'bar', 'baz', + chalk.cyan('嶜憃撊') + ' 噾噿嚁', + 'blue' + chalk.bgBlue('berry'), + chalk.red('apple'), 'pomegranate', + 'durian', chalk.green('star fruit'), + 'apricot', 'banana pineapple' + ], + { + width: 80 + } + ); + + const expected = + 'apple bar durian star fruit \n' + + 'apricot baz foo 嶜憃撊 噾噿嚁 \n' + + 'banana pineapple blueberry pomegranate '; + + t.equal(stripAnsi(cols), expected); +}); + +// prettier-ignore +test('should optionally not sort', t => { + const cols = columns( + [ + 'foo', 'bar', 'baz', + chalk.cyan('嶜憃撊') + ' 噾噿嚁', + 'blue' + chalk.bgBlue('berry'), + chalk.red('apple'), 'pomegranate', + 'durian', chalk.green('star fruit'), + 'apricot', 'banana pineapple' + ], + { + sort: false, + width: 80 + } + ); + + const expected = + 'foo 嶜憃撊 噾噿嚁 pomegranate apricot \n' + + 'bar blueberry durian banana pineapple \n' + + 'baz apple star fruit '; + + t.equal(stripAnsi(cols), expected); +}); diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cmd-shim/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cmd-shim/lib/index.js new file mode 100644 index 0000000000000000000000000000000000000000..c13890aed3263cc5efffc759c89732789e108b78 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cmd-shim/lib/index.js @@ -0,0 +1,247 @@ +// On windows, create a .cmd file. +// Read the #! in the file to see what it uses. The vast majority +// of the time, this will be either: +// "#!/usr/bin/env " +// or: +// "#! " +// +// Write a binroot/pkg.bin + ".cmd" file that has this line in it: +// @ %dp0% %* + +const { + chmod, + mkdir, + readFile, + stat, + unlink, + writeFile, +} = require('fs/promises') + +const { dirname, relative } = require('path') +const toBatchSyntax = require('./to-batch-syntax') +// linting disabled because this regex is really long +// eslint-disable-next-line max-len +const shebangExpr = /^#!\s*(?:\/usr\/bin\/env\s+(?:-S\s+)?((?:[^ \t=]+=[^ \t=]+\s+)*))?([^ \t]+)(.*)$/ + +const cmdShimIfExists = (from, to) => + stat(from).then(() => cmdShim(from, to), () => {}) + +// Try to unlink, but ignore errors. +// Any problems will surface later. +const rm = path => unlink(path).catch(() => {}) + +const cmdShim = (from, to) => + stat(from).then(() => cmdShim_(from, to)) + +const cmdShim_ = (from, to) => Promise.all([ + rm(to), + rm(to + '.cmd'), + rm(to + '.ps1'), +]).then(() => writeShim(from, to)) + +const writeShim = (from, to) => + // make a cmd file and a sh script + // First, check if the bin is a #! of some sort. + // If not, then assume it's something that'll be compiled, or some other + // sort of script, and just call it directly. + mkdir(dirname(to), { recursive: true }) + .then(() => readFile(from, 'utf8')) + .then(data => { + const firstLine = data.trim().split(/\r*\n/)[0] + const shebang = firstLine.match(shebangExpr) + if (!shebang) { + return writeShim_(from, to) + } + const vars = shebang[1] || '' + const prog = shebang[2] + const args = shebang[3] || '' + return writeShim_(from, to, prog, args, vars) + }, () => writeShim_(from, to)) + +const writeShim_ = (from, to, prog, args, variables) => { + let shTarget = relative(dirname(to), from) + let target = shTarget.split('/').join('\\') + let longProg + let shProg = prog && prog.split('\\').join('/') + let shLongProg + let pwshProg = shProg && `"${shProg}$exe"` + let pwshLongProg + shTarget = shTarget.split('\\').join('/') + args = args || '' + variables = variables || '' + if (!prog) { + prog = `"%dp0%\\${target}"` + shProg = `"$basedir/${shTarget}"` + pwshProg = shProg + args = '' + target = '' + shTarget = '' + } else { + longProg = `"%dp0%\\${prog}.exe"` + shLongProg = `"$basedir/${prog}"` + pwshLongProg = `"$basedir/${prog}$exe"` + target = `"%dp0%\\${target}"` + shTarget = `"$basedir/${shTarget}"` + } + + // Subroutine trick to fix https://github.com/npm/cmd-shim/issues/10 + // and https://github.com/npm/cli/issues/969 + const head = '@ECHO off\r\n' + + 'GOTO start\r\n' + + ':find_dp0\r\n' + + 'SET dp0=%~dp0\r\n' + + 'EXIT /b\r\n' + + ':start\r\n' + + 'SETLOCAL\r\n' + + 'CALL :find_dp0\r\n' + + let cmd + if (longProg) { + shLongProg = shLongProg.trim() + args = args.trim() + const variablesBatch = toBatchSyntax.convertToSetCommands(variables) + cmd = head + + variablesBatch + + '\r\n' + + `IF EXIST ${longProg} (\r\n` + + ` SET "_prog=${longProg.replace(/(^")|("$)/g, '')}"\r\n` + + ') ELSE (\r\n' + + ` SET "_prog=${prog.replace(/(^")|("$)/g, '')}"\r\n` + + ' SET PATHEXT=%PATHEXT:;.JS;=;%\r\n' + + ')\r\n' + + '\r\n' + // prevent "Terminate Batch Job? (Y/n)" message + // https://github.com/npm/cli/issues/969#issuecomment-737496588 + + 'endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & ' + + `"%_prog%" ${args} ${target} %*\r\n` + } else { + cmd = `${head}${prog} ${args} ${target} %*\r\n` + } + + // #!/bin/sh + // basedir=`dirname "$0"` + // + // case `uname` in + // *CYGWIN*|*MINGW*|*MSYS*) + // if command -v cygpath > /dev/null 2>&1; then + // basedir=`cygpath -w "$basedir"` + // fi + // ;; + // esac + // + // if [ -x "$basedir/node.exe" ]; then + // exec "$basedir/node.exe" "$basedir/node_modules/npm/bin/npm-cli.js" "$@" + // else + // exec node "$basedir/node_modules/npm/bin/npm-cli.js" "$@" + // fi + + let sh = '#!/bin/sh\n' + + sh = sh + + `basedir=$(dirname "$(echo "$0" | sed -e 's,\\\\,/,g')")\n` + + '\n' + + 'case `uname` in\n' + + ' *CYGWIN*|*MINGW*|*MSYS*)\n' + + ' if command -v cygpath > /dev/null 2>&1; then\n' + + ' basedir=`cygpath -w "$basedir"`\n' + + ' fi\n' + + ' ;;\n' + + 'esac\n' + + '\n' + + if (shLongProg) { + sh = sh + + `if [ -x ${shLongProg} ]; then\n` + + ` exec ${variables}${shLongProg} ${args} ${shTarget} "$@"\n` + + 'else \n' + + ` exec ${variables}${shProg} ${args} ${shTarget} "$@"\n` + + 'fi\n' + } else { + sh = sh + + `exec ${shProg} ${args} ${shTarget} "$@"\n` + } + + // #!/usr/bin/env pwsh + // $basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + // + // $ret=0 + // $exe = "" + // if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + // # Fix case when both the Windows and Linux builds of Node + // # are installed in the same directory + // $exe = ".exe" + // } + // if (Test-Path "$basedir/node") { + // # Suport pipeline input + // if ($MyInvocation.ExpectingInput) { + // input | & "$basedir/node$exe" "$basedir/node_modules/npm/bin/npm-cli.js" $args + // } else { + // & "$basedir/node$exe" "$basedir/node_modules/npm/bin/npm-cli.js" $args + // } + // $ret=$LASTEXITCODE + // } else { + // # Support pipeline input + // if ($MyInvocation.ExpectingInput) { + // $input | & "node$exe" "$basedir/node_modules/npm/bin/npm-cli.js" $args + // } else { + // & "node$exe" "$basedir/node_modules/npm/bin/npm-cli.js" $args + // } + // $ret=$LASTEXITCODE + // } + // exit $ret + let pwsh = '#!/usr/bin/env pwsh\n' + + '$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent\n' + + '\n' + + '$exe=""\n' + + 'if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {\n' + + ' # Fix case when both the Windows and Linux builds of Node\n' + + ' # are installed in the same directory\n' + + ' $exe=".exe"\n' + + '}\n' + if (shLongProg) { + pwsh = pwsh + + '$ret=0\n' + + `if (Test-Path ${pwshLongProg}) {\n` + + ' # Support pipeline input\n' + + ' if ($MyInvocation.ExpectingInput) {\n' + + ` $input | & ${pwshLongProg} ${args} ${shTarget} $args\n` + + ' } else {\n' + + ` & ${pwshLongProg} ${args} ${shTarget} $args\n` + + ' }\n' + + ' $ret=$LASTEXITCODE\n' + + '} else {\n' + + ' # Support pipeline input\n' + + ' if ($MyInvocation.ExpectingInput) {\n' + + ` $input | & ${pwshProg} ${args} ${shTarget} $args\n` + + ' } else {\n' + + ` & ${pwshProg} ${args} ${shTarget} $args\n` + + ' }\n' + + ' $ret=$LASTEXITCODE\n' + + '}\n' + + 'exit $ret\n' + } else { + pwsh = pwsh + + '# Support pipeline input\n' + + 'if ($MyInvocation.ExpectingInput) {\n' + + ` $input | & ${pwshProg} ${args} ${shTarget} $args\n` + + '} else {\n' + + ` & ${pwshProg} ${args} ${shTarget} $args\n` + + '}\n' + + 'exit $LASTEXITCODE\n' + } + + return Promise.all([ + writeFile(to + '.ps1', pwsh, 'utf8'), + writeFile(to + '.cmd', cmd, 'utf8'), + writeFile(to, sh, 'utf8'), + ]).then(() => chmodShim(to)) +} + +const chmodShim = to => Promise.all([ + chmod(to, 0o755), + chmod(to + '.cmd', 0o755), + chmod(to + '.ps1', 0o755), +]) + +module.exports = cmdShim +cmdShim.ifExists = cmdShimIfExists diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cmd-shim/lib/to-batch-syntax.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cmd-shim/lib/to-batch-syntax.js new file mode 100644 index 0000000000000000000000000000000000000000..86a3f01405de5ea4abbcf45f8c294584f1f85b4b --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cmd-shim/lib/to-batch-syntax.js @@ -0,0 +1,49 @@ +exports.replaceDollarWithPercentPair = replaceDollarWithPercentPair +exports.convertToSetCommand = convertToSetCommand +exports.convertToSetCommands = convertToSetCommands + +function convertToSetCommand (key, value) { + var line = '' + key = key || '' + key = key.trim() + value = value || '' + value = value.trim() + if (key && value && value.length > 0) { + line = '@SET ' + key + '=' + replaceDollarWithPercentPair(value) + '\r\n' + } + return line +} + +function extractVariableValuePairs (declarations) { + var pairs = {} + declarations.map(function (declaration) { + var split = declaration.split('=') + pairs[split[0]] = split[1] + }) + return pairs +} + +function convertToSetCommands (variableString) { + var variableValuePairs = extractVariableValuePairs(variableString.split(' ')) + var variableDeclarationsAsBatch = '' + Object.keys(variableValuePairs).forEach(function (key) { + variableDeclarationsAsBatch += convertToSetCommand(key, variableValuePairs[key]) + }) + return variableDeclarationsAsBatch +} + +function replaceDollarWithPercentPair (value) { + var dollarExpressions = /\$\{?([^$@#?\- \t{}:]+)\}?/g + var result = '' + var startIndex = 0 + do { + var match = dollarExpressions.exec(value) + if (match) { + var betweenMatches = value.substring(startIndex, match.index) || '' + result += betweenMatches + '%' + match[1] + '%' + startIndex = dollarExpressions.lastIndex + } + } while (dollarExpressions.lastIndex > 0) + result += value.slice(startIndex) + return result +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/lib/enoent.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/lib/enoent.js new file mode 100644 index 0000000000000000000000000000000000000000..da33471369c23f4016fb8f9deed742f71df033d2 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/lib/enoent.js @@ -0,0 +1,59 @@ +'use strict'; + +const isWin = process.platform === 'win32'; + +function notFoundError(original, syscall) { + return Object.assign(new Error(`${syscall} ${original.command} ENOENT`), { + code: 'ENOENT', + errno: 'ENOENT', + syscall: `${syscall} ${original.command}`, + path: original.command, + spawnargs: original.args, + }); +} + +function hookChildProcess(cp, parsed) { + if (!isWin) { + return; + } + + const originalEmit = cp.emit; + + cp.emit = function (name, arg1) { + // If emitting "exit" event and exit code is 1, we need to check if + // the command exists and emit an "error" instead + // See https://github.com/IndigoUnited/node-cross-spawn/issues/16 + if (name === 'exit') { + const err = verifyENOENT(arg1, parsed); + + if (err) { + return originalEmit.call(cp, 'error', err); + } + } + + return originalEmit.apply(cp, arguments); // eslint-disable-line prefer-rest-params + }; +} + +function verifyENOENT(status, parsed) { + if (isWin && status === 1 && !parsed.file) { + return notFoundError(parsed.original, 'spawn'); + } + + return null; +} + +function verifyENOENTSync(status, parsed) { + if (isWin && status === 1 && !parsed.file) { + return notFoundError(parsed.original, 'spawnSync'); + } + + return null; +} + +module.exports = { + hookChildProcess, + verifyENOENT, + verifyENOENTSync, + notFoundError, +}; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/lib/parse.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/lib/parse.js new file mode 100644 index 0000000000000000000000000000000000000000..0129d74774a8a08d8c5e92c2d602114e590d8db8 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/lib/parse.js @@ -0,0 +1,91 @@ +'use strict'; + +const path = require('path'); +const resolveCommand = require('./util/resolveCommand'); +const escape = require('./util/escape'); +const readShebang = require('./util/readShebang'); + +const isWin = process.platform === 'win32'; +const isExecutableRegExp = /\.(?:com|exe)$/i; +const isCmdShimRegExp = /node_modules[\\/].bin[\\/][^\\/]+\.cmd$/i; + +function detectShebang(parsed) { + parsed.file = resolveCommand(parsed); + + const shebang = parsed.file && readShebang(parsed.file); + + if (shebang) { + parsed.args.unshift(parsed.file); + parsed.command = shebang; + + return resolveCommand(parsed); + } + + return parsed.file; +} + +function parseNonShell(parsed) { + if (!isWin) { + return parsed; + } + + // Detect & add support for shebangs + const commandFile = detectShebang(parsed); + + // We don't need a shell if the command filename is an executable + const needsShell = !isExecutableRegExp.test(commandFile); + + // If a shell is required, use cmd.exe and take care of escaping everything correctly + // Note that `forceShell` is an hidden option used only in tests + if (parsed.options.forceShell || needsShell) { + // Need to double escape meta chars if the command is a cmd-shim located in `node_modules/.bin/` + // The cmd-shim simply calls execute the package bin file with NodeJS, proxying any argument + // Because the escape of metachars with ^ gets interpreted when the cmd.exe is first called, + // we need to double escape them + const needsDoubleEscapeMetaChars = isCmdShimRegExp.test(commandFile); + + // Normalize posix paths into OS compatible paths (e.g.: foo/bar -> foo\bar) + // This is necessary otherwise it will always fail with ENOENT in those cases + parsed.command = path.normalize(parsed.command); + + // Escape command & arguments + parsed.command = escape.command(parsed.command); + parsed.args = parsed.args.map((arg) => escape.argument(arg, needsDoubleEscapeMetaChars)); + + const shellCommand = [parsed.command].concat(parsed.args).join(' '); + + parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`]; + parsed.command = process.env.comspec || 'cmd.exe'; + parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped + } + + return parsed; +} + +function parse(command, args, options) { + // Normalize arguments, similar to nodejs + if (args && !Array.isArray(args)) { + options = args; + args = null; + } + + args = args ? args.slice(0) : []; // Clone array to avoid changing the original + options = Object.assign({}, options); // Clone object to avoid changing the original + + // Build our parsed object + const parsed = { + command, + args, + options, + file: undefined, + original: { + command, + args, + }, + }; + + // Delegate further parsing to shell or non-shell + return options.shell ? parsed : parseNonShell(parsed); +} + +module.exports = parse; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/lib/util/escape.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/lib/util/escape.js new file mode 100644 index 0000000000000000000000000000000000000000..7bf2905cd035ad517486b0b0549f660551a2e199 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/lib/util/escape.js @@ -0,0 +1,47 @@ +'use strict'; + +// See http://www.robvanderwoude.com/escapechars.php +const metaCharsRegExp = /([()\][%!^"`<>&|;, *?])/g; + +function escapeCommand(arg) { + // Escape meta chars + arg = arg.replace(metaCharsRegExp, '^$1'); + + return arg; +} + +function escapeArgument(arg, doubleEscapeMetaChars) { + // Convert to string + arg = `${arg}`; + + // Algorithm below is based on https://qntm.org/cmd + // It's slightly altered to disable JS backtracking to avoid hanging on specially crafted input + // Please see https://github.com/moxystudio/node-cross-spawn/pull/160 for more information + + // Sequence of backslashes followed by a double quote: + // double up all the backslashes and escape the double quote + arg = arg.replace(/(?=(\\+?)?)\1"/g, '$1$1\\"'); + + // Sequence of backslashes followed by the end of the string + // (which will become a double quote later): + // double up all the backslashes + arg = arg.replace(/(?=(\\+?)?)\1$/, '$1$1'); + + // All other backslashes occur literally + + // Quote the whole thing: + arg = `"${arg}"`; + + // Escape meta chars + arg = arg.replace(metaCharsRegExp, '^$1'); + + // Double escape meta chars if necessary + if (doubleEscapeMetaChars) { + arg = arg.replace(metaCharsRegExp, '^$1'); + } + + return arg; +} + +module.exports.command = escapeCommand; +module.exports.argument = escapeArgument; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/lib/util/readShebang.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/lib/util/readShebang.js new file mode 100644 index 0000000000000000000000000000000000000000..5e83733fef260f23f865d9cafa42b1192b91c9f8 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/lib/util/readShebang.js @@ -0,0 +1,23 @@ +'use strict'; + +const fs = require('fs'); +const shebangCommand = require('shebang-command'); + +function readShebang(command) { + // Read the first 150 bytes from the file + const size = 150; + const buffer = Buffer.alloc(size); + + let fd; + + try { + fd = fs.openSync(command, 'r'); + fs.readSync(fd, buffer, 0, size, 0); + fs.closeSync(fd); + } catch (e) { /* Empty */ } + + // Attempt to extract shebang (null is returned if not a shebang) + return shebangCommand(buffer.toString()); +} + +module.exports = readShebang; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/lib/util/resolveCommand.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/lib/util/resolveCommand.js new file mode 100644 index 0000000000000000000000000000000000000000..7972455008e917b651c1ca9a9a232ad61b0dc833 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/lib/util/resolveCommand.js @@ -0,0 +1,52 @@ +'use strict'; + +const path = require('path'); +const which = require('which'); +const getPathKey = require('path-key'); + +function resolveCommandAttempt(parsed, withoutPathExt) { + const env = parsed.options.env || process.env; + const cwd = process.cwd(); + const hasCustomCwd = parsed.options.cwd != null; + // Worker threads do not have process.chdir() + const shouldSwitchCwd = hasCustomCwd && process.chdir !== undefined && !process.chdir.disabled; + + // If a custom `cwd` was specified, we need to change the process cwd + // because `which` will do stat calls but does not support a custom cwd + if (shouldSwitchCwd) { + try { + process.chdir(parsed.options.cwd); + } catch (err) { + /* Empty */ + } + } + + let resolved; + + try { + resolved = which.sync(parsed.command, { + path: env[getPathKey({ env })], + pathExt: withoutPathExt ? path.delimiter : undefined, + }); + } catch (e) { + /* Empty */ + } finally { + if (shouldSwitchCwd) { + process.chdir(cwd); + } + } + + // If we successfully resolved, ensure that an absolute path is returned + // Note that when a custom `cwd` was used, we need to resolve to an absolute path based on it + if (resolved) { + resolved = path.resolve(hasCustomCwd ? parsed.options.cwd : '', resolved); + } + + return resolved; +} + +function resolveCommand(parsed) { + return resolveCommandAttempt(parsed) || resolveCommandAttempt(parsed, true); +} + +module.exports = resolveCommand; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/node_modules/isexe/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/node_modules/isexe/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..19129e315fe593965a2fdd50ec0d1253bcbd2ece --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/node_modules/isexe/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/node_modules/isexe/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/node_modules/isexe/index.js new file mode 100644 index 0000000000000000000000000000000000000000..553fb32b119bdf2efaeffeefe1bef4bfd10f3e43 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/node_modules/isexe/index.js @@ -0,0 +1,57 @@ +var fs = require('fs') +var core +if (process.platform === 'win32' || global.TESTING_WINDOWS) { + core = require('./windows.js') +} else { + core = require('./mode.js') +} + +module.exports = isexe +isexe.sync = sync + +function isexe (path, options, cb) { + if (typeof options === 'function') { + cb = options + options = {} + } + + if (!cb) { + if (typeof Promise !== 'function') { + throw new TypeError('callback not provided') + } + + return new Promise(function (resolve, reject) { + isexe(path, options || {}, function (er, is) { + if (er) { + reject(er) + } else { + resolve(is) + } + }) + }) + } + + core(path, options || {}, function (er, is) { + // ignore EACCES because that just means we aren't allowed to run it + if (er) { + if (er.code === 'EACCES' || options && options.ignoreErrors) { + er = null + is = false + } + } + cb(er, is) + }) +} + +function sync (path, options) { + // my kingdom for a filtered catch + try { + return core.sync(path, options || {}) + } catch (er) { + if (options && options.ignoreErrors || er.code === 'EACCES') { + return false + } else { + throw er + } + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/node_modules/isexe/mode.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/node_modules/isexe/mode.js new file mode 100644 index 0000000000000000000000000000000000000000..1995ea4a06aec4c1506509b11f8e9ae941b86439 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/node_modules/isexe/mode.js @@ -0,0 +1,41 @@ +module.exports = isexe +isexe.sync = sync + +var fs = require('fs') + +function isexe (path, options, cb) { + fs.stat(path, function (er, stat) { + cb(er, er ? false : checkStat(stat, options)) + }) +} + +function sync (path, options) { + return checkStat(fs.statSync(path), options) +} + +function checkStat (stat, options) { + return stat.isFile() && checkMode(stat, options) +} + +function checkMode (stat, options) { + var mod = stat.mode + var uid = stat.uid + var gid = stat.gid + + var myUid = options.uid !== undefined ? + options.uid : process.getuid && process.getuid() + var myGid = options.gid !== undefined ? + options.gid : process.getgid && process.getgid() + + var u = parseInt('100', 8) + var g = parseInt('010', 8) + var o = parseInt('001', 8) + var ug = u | g + + var ret = (mod & o) || + (mod & g) && gid === myGid || + (mod & u) && uid === myUid || + (mod & ug) && myUid === 0 + + return ret +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/node_modules/isexe/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/node_modules/isexe/package.json new file mode 100644 index 0000000000000000000000000000000000000000..e452689442f2018bfa2b9ef3885cf4ce6a6e7912 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/node_modules/isexe/package.json @@ -0,0 +1,31 @@ +{ + "name": "isexe", + "version": "2.0.0", + "description": "Minimal module to check if a file is executable.", + "main": "index.js", + "directories": { + "test": "test" + }, + "devDependencies": { + "mkdirp": "^0.5.1", + "rimraf": "^2.5.0", + "tap": "^10.3.0" + }, + "scripts": { + "test": "tap test/*.js --100", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --all; git push origin --tags" + }, + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/isexe.git" + }, + "keywords": [], + "bugs": { + "url": "https://github.com/isaacs/isexe/issues" + }, + "homepage": "https://github.com/isaacs/isexe#readme" +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/node_modules/isexe/windows.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/node_modules/isexe/windows.js new file mode 100644 index 0000000000000000000000000000000000000000..34996734d8ef3921c9590def676d79c58c06b61c --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/node_modules/isexe/windows.js @@ -0,0 +1,42 @@ +module.exports = isexe +isexe.sync = sync + +var fs = require('fs') + +function checkPathExt (path, options) { + var pathext = options.pathExt !== undefined ? + options.pathExt : process.env.PATHEXT + + if (!pathext) { + return true + } + + pathext = pathext.split(';') + if (pathext.indexOf('') !== -1) { + return true + } + for (var i = 0; i < pathext.length; i++) { + var p = pathext[i].toLowerCase() + if (p && path.substr(-p.length).toLowerCase() === p) { + return true + } + } + return false +} + +function checkStat (stat, path, options) { + if (!stat.isSymbolicLink() && !stat.isFile()) { + return false + } + return checkPathExt(path, options) +} + +function isexe (path, options, cb) { + fs.stat(path, function (er, stat) { + cb(er, er ? false : checkStat(stat, path, options)) + }) +} + +function sync (path, options) { + return checkStat(fs.statSync(path), path, options) +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/node_modules/which/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/node_modules/which/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..19129e315fe593965a2fdd50ec0d1253bcbd2ece --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/node_modules/which/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/node_modules/which/README.md b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/node_modules/which/README.md new file mode 100644 index 0000000000000000000000000000000000000000..cd833509f3bcc971ef2e4ddcdfdd9a26d5accfb8 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/node_modules/which/README.md @@ -0,0 +1,54 @@ +# which + +Like the unix `which` utility. + +Finds the first instance of a specified executable in the PATH +environment variable. Does not cache the results, so `hash -r` is not +needed when the PATH changes. + +## USAGE + +```javascript +var which = require('which') + +// async usage +which('node', function (er, resolvedPath) { + // er is returned if no "node" is found on the PATH + // if it is found, then the absolute path to the exec is returned +}) + +// or promise +which('node').then(resolvedPath => { ... }).catch(er => { ... not found ... }) + +// sync usage +// throws if not found +var resolved = which.sync('node') + +// if nothrow option is used, returns null if not found +resolved = which.sync('node', {nothrow: true}) + +// Pass options to override the PATH and PATHEXT environment vars. +which('node', { path: someOtherPath }, function (er, resolved) { + if (er) + throw er + console.log('found at %j', resolved) +}) +``` + +## CLI USAGE + +Same as the BSD `which(1)` binary. + +``` +usage: which [-as] program ... +``` + +## OPTIONS + +You may pass an options object as the second argument. + +- `path`: Use instead of the `PATH` environment variable. +- `pathExt`: Use instead of the `PATHEXT` environment variable. +- `all`: Return all matches, instead of just the first one. Note that + this means the function returns an array of strings instead of a + single string. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/node_modules/which/bin/node-which b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/node_modules/which/bin/node-which new file mode 100644 index 0000000000000000000000000000000000000000..7cee3729eebdd09e39bd891aa4d17483ea4a757a --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/node_modules/which/bin/node-which @@ -0,0 +1,52 @@ +#!/usr/bin/env node +var which = require("../") +if (process.argv.length < 3) + usage() + +function usage () { + console.error('usage: which [-as] program ...') + process.exit(1) +} + +var all = false +var silent = false +var dashdash = false +var args = process.argv.slice(2).filter(function (arg) { + if (dashdash || !/^-/.test(arg)) + return true + + if (arg === '--') { + dashdash = true + return false + } + + var flags = arg.substr(1).split('') + for (var f = 0; f < flags.length; f++) { + var flag = flags[f] + switch (flag) { + case 's': + silent = true + break + case 'a': + all = true + break + default: + console.error('which: illegal option -- ' + flag) + usage() + } + } + return false +}) + +process.exit(args.reduce(function (pv, current) { + try { + var f = which.sync(current, { all: all }) + if (all) + f = f.join('\n') + if (!silent) + console.log(f) + return pv; + } catch (e) { + return 1; + } +}, 0)) diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/node_modules/which/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/node_modules/which/package.json new file mode 100644 index 0000000000000000000000000000000000000000..97ad7fbabc52b54b1d9c4c50236c3a81d78218a7 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/node_modules/which/package.json @@ -0,0 +1,43 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me)", + "name": "which", + "description": "Like which(1) unix command. Find the first instance of an executable in the PATH.", + "version": "2.0.2", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/node-which.git" + }, + "main": "which.js", + "bin": { + "node-which": "./bin/node-which" + }, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "devDependencies": { + "mkdirp": "^0.5.0", + "rimraf": "^2.6.2", + "tap": "^14.6.9" + }, + "scripts": { + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublish": "npm run changelog", + "prechangelog": "bash gen-changelog.sh", + "changelog": "git add CHANGELOG.md", + "postchangelog": "git commit -m 'update changelog - '${npm_package_version}", + "postpublish": "git push origin --follow-tags" + }, + "files": [ + "which.js", + "bin/node-which" + ], + "tap": { + "check-coverage": true + }, + "engines": { + "node": ">= 8" + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/node_modules/which/which.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/node_modules/which/which.js new file mode 100644 index 0000000000000000000000000000000000000000..82afffd21437498a02aaaa1c03be7ea9178b2c48 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cross-spawn/node_modules/which/which.js @@ -0,0 +1,125 @@ +const isWindows = process.platform === 'win32' || + process.env.OSTYPE === 'cygwin' || + process.env.OSTYPE === 'msys' + +const path = require('path') +const COLON = isWindows ? ';' : ':' +const isexe = require('isexe') + +const getNotFoundError = (cmd) => + Object.assign(new Error(`not found: ${cmd}`), { code: 'ENOENT' }) + +const getPathInfo = (cmd, opt) => { + const colon = opt.colon || COLON + + // If it has a slash, then we don't bother searching the pathenv. + // just check the file itself, and that's it. + const pathEnv = cmd.match(/\//) || isWindows && cmd.match(/\\/) ? [''] + : ( + [ + // windows always checks the cwd first + ...(isWindows ? [process.cwd()] : []), + ...(opt.path || process.env.PATH || + /* istanbul ignore next: very unusual */ '').split(colon), + ] + ) + const pathExtExe = isWindows + ? opt.pathExt || process.env.PATHEXT || '.EXE;.CMD;.BAT;.COM' + : '' + const pathExt = isWindows ? pathExtExe.split(colon) : [''] + + if (isWindows) { + if (cmd.indexOf('.') !== -1 && pathExt[0] !== '') + pathExt.unshift('') + } + + return { + pathEnv, + pathExt, + pathExtExe, + } +} + +const which = (cmd, opt, cb) => { + if (typeof opt === 'function') { + cb = opt + opt = {} + } + if (!opt) + opt = {} + + const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt) + const found = [] + + const step = i => new Promise((resolve, reject) => { + if (i === pathEnv.length) + return opt.all && found.length ? resolve(found) + : reject(getNotFoundError(cmd)) + + const ppRaw = pathEnv[i] + const pathPart = /^".*"$/.test(ppRaw) ? ppRaw.slice(1, -1) : ppRaw + + const pCmd = path.join(pathPart, cmd) + const p = !pathPart && /^\.[\\\/]/.test(cmd) ? cmd.slice(0, 2) + pCmd + : pCmd + + resolve(subStep(p, i, 0)) + }) + + const subStep = (p, i, ii) => new Promise((resolve, reject) => { + if (ii === pathExt.length) + return resolve(step(i + 1)) + const ext = pathExt[ii] + isexe(p + ext, { pathExt: pathExtExe }, (er, is) => { + if (!er && is) { + if (opt.all) + found.push(p + ext) + else + return resolve(p + ext) + } + return resolve(subStep(p, i, ii + 1)) + }) + }) + + return cb ? step(0).then(res => cb(null, res), cb) : step(0) +} + +const whichSync = (cmd, opt) => { + opt = opt || {} + + const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt) + const found = [] + + for (let i = 0; i < pathEnv.length; i ++) { + const ppRaw = pathEnv[i] + const pathPart = /^".*"$/.test(ppRaw) ? ppRaw.slice(1, -1) : ppRaw + + const pCmd = path.join(pathPart, cmd) + const p = !pathPart && /^\.[\\\/]/.test(cmd) ? cmd.slice(0, 2) + pCmd + : pCmd + + for (let j = 0; j < pathExt.length; j ++) { + const cur = p + pathExt[j] + try { + const is = isexe.sync(cur, { pathExt: pathExtExe }) + if (is) { + if (opt.all) + found.push(cur) + else + return cur + } + } catch (ex) {} + } + } + + if (opt.all && found.length) + return found + + if (opt.nothrow) + return null + + throw getNotFoundError(cmd) +} + +module.exports = which +which.sync = whichSync diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cssesc/bin/cssesc b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cssesc/bin/cssesc new file mode 100644 index 0000000000000000000000000000000000000000..188c034ffe948300ab1a75496c2ea9e83c5a50bb --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cssesc/bin/cssesc @@ -0,0 +1,116 @@ +#!/usr/bin/env node +const fs = require('fs'); +const cssesc = require('../cssesc.js'); +const strings = process.argv.splice(2); +const stdin = process.stdin; +const options = {}; +const log = console.log; + +const main = function() { + const option = strings[0]; + + if (/^(?:-h|--help|undefined)$/.test(option)) { + log( + 'cssesc v%s - https://mths.be/cssesc', + cssesc.version + ); + log([ + '\nUsage:\n', + '\tcssesc [string]', + '\tcssesc [-i | --identifier] [string]', + '\tcssesc [-s | --single-quotes] [string]', + '\tcssesc [-d | --double-quotes] [string]', + '\tcssesc [-w | --wrap] [string]', + '\tcssesc [-e | --escape-everything] [string]', + '\tcssesc [-v | --version]', + '\tcssesc [-h | --help]', + '\nExamples:\n', + '\tcssesc \'f\xF6o \u2665 b\xE5r \uD834\uDF06 baz\'', + '\tcssesc --identifier \'f\xF6o \u2665 b\xE5r \uD834\uDF06 baz\'', + '\tcssesc --escape-everything \'f\xF6o \u2665 b\xE5r \uD834\uDF06 baz\'', + '\tcssesc --double-quotes --wrap \'f\xF6o \u2665 b\xE5r \uD834\uDF06 baz\'', + '\techo \'f\xF6o \u2665 b\xE5r \uD834\uDF06 baz\' | cssesc' + ].join('\n')); + return process.exit(1); + } + + if (/^(?:-v|--version)$/.test(option)) { + log('v%s', cssesc.version); + return process.exit(1); + } + + strings.forEach(function(string) { + // Process options + if (/^(?:-i|--identifier)$/.test(string)) { + options.isIdentifier = true; + return; + } + if (/^(?:-s|--single-quotes)$/.test(string)) { + options.quotes = 'single'; + return; + } + if (/^(?:-d|--double-quotes)$/.test(string)) { + options.quotes = 'double'; + return; + } + if (/^(?:-w|--wrap)$/.test(string)) { + options.wrap = true; + return; + } + if (/^(?:-e|--escape-everything)$/.test(string)) { + options.escapeEverything = true; + return; + } + + // Process string(s) + let result; + try { + result = cssesc(string, options); + log(result); + } catch (exception) { + log(exception.message + '\n'); + log('Error: failed to escape.'); + log('If you think this is a bug in cssesc, please report it:'); + log('https://github.com/mathiasbynens/cssesc/issues/new'); + log( + '\nStack trace using cssesc@%s:\n', + cssesc.version + ); + log(exception.stack); + return process.exit(1); + } + }); + // Return with exit status 0 outside of the `forEach` loop, in case + // multiple strings were passed in. + return process.exit(0); + +}; + +if (stdin.isTTY) { + // handle shell arguments + main(); +} else { + let timeout; + // Either the script is called from within a non-TTY context, or `stdin` + // content is being piped in. + if (!process.stdout.isTTY) { + // The script was called from a non-TTY context. This is a rather uncommon + // use case we don’t actively support. However, we don’t want the script + // to wait forever in such cases, so… + timeout = setTimeout(function() { + // …if no piped data arrived after a whole minute, handle shell + // arguments instead. + main(); + }, 60000); + } + let data = ''; + stdin.on('data', function(chunk) { + clearTimeout(timeout); + data += chunk; + }); + stdin.on('end', function() { + strings.push(data.trim()); + main(); + }); + stdin.resume(); +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cssesc/man/cssesc.1 b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cssesc/man/cssesc.1 new file mode 100644 index 0000000000000000000000000000000000000000..eee4996daf5436bca6676d404a8ae9c5969bd355 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cssesc/man/cssesc.1 @@ -0,0 +1,70 @@ +.Dd August 9, 2013 +.Dt cssesc 1 +.Sh NAME +.Nm cssesc +.Nd escape text for use in CSS string literals or identifiers +.Sh SYNOPSIS +.Nm +.Op Fl i | -identifier Ar string +.br +.Op Fl s | -single-quotes Ar string +.br +.Op Fl d | -double-quotes Ar string +.br +.Op Fl w | -wrap Ar string +.br +.Op Fl e | -escape-everything Ar string +.br +.Op Fl v | -version +.br +.Op Fl h | -help +.Sh DESCRIPTION +.Nm +escapes strings for use in CSS string literals or identifiers while generating the shortest possible valid ASCII-only output. +.Sh OPTIONS +.Bl -ohang -offset +.It Sy "-s, --single-quotes" +Escape any occurences of ' in the input string as \\', so that the output can be used in a CSS string literal wrapped in single quotes. +.It Sy "-d, --double-quotes" +Escape any occurences of " in the input string as \\", so that the output can be used in a CSS string literal wrapped in double quotes. +.It Sy "-w, --wrap" +Make sure the output is a valid CSS string literal wrapped in quotes. The type of quotes can be specified using the +.Ar -s | --single-quotes +or +.Ar -d | --double-quotes +settings. +.It Sy "-e, --escape-everything" +Escape all the symbols in the output, even printable ASCII symbols. +.It Sy "-v, --version" +Print cssesc's version. +.It Sy "-h, --help" +Show the help screen. +.El +.Sh EXIT STATUS +The +.Nm cssesc +utility exits with one of the following values: +.Pp +.Bl -tag -width flag -compact +.It Li 0 +.Nm +successfully escaped the given text and printed the result. +.It Li 1 +.Nm +wasn't instructed to escape anything (for example, the +.Ar --help +flag was set); or, an error occurred. +.El +.Sh EXAMPLES +.Bl -ohang -offset +.It Sy "cssesc 'foo bar baz'" +Print an escaped version of the given text. +.It Sy echo\ 'foo bar baz'\ |\ cssesc +Print an escaped version of the text that gets piped in. +.El +.Sh BUGS +cssesc's bug tracker is located at . +.Sh AUTHOR +Mathias Bynens +.Sh WWW + diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/debug/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/debug/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..1a9820e262b26b60fe71a4dcd9bc9cfd0a01f26e --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/debug/LICENSE @@ -0,0 +1,20 @@ +(The MIT License) + +Copyright (c) 2014-2017 TJ Holowaychuk +Copyright (c) 2018-2021 Josh Junon + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software +and associated documentation files (the 'Software'), to deal in the Software without restriction, +including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial +portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/debug/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/debug/package.json new file mode 100644 index 0000000000000000000000000000000000000000..ee8abb523dbe0ad51949ad8f864207164f38b051 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/debug/package.json @@ -0,0 +1,64 @@ +{ + "name": "debug", + "version": "4.4.3", + "repository": { + "type": "git", + "url": "git://github.com/debug-js/debug.git" + }, + "description": "Lightweight debugging utility for Node.js and the browser", + "keywords": [ + "debug", + "log", + "debugger" + ], + "files": [ + "src", + "LICENSE", + "README.md" + ], + "author": "Josh Junon (https://github.com/qix-)", + "contributors": [ + "TJ Holowaychuk ", + "Nathan Rajlich (http://n8.io)", + "Andrew Rhyne " + ], + "license": "MIT", + "scripts": { + "lint": "xo", + "test": "npm run test:node && npm run test:browser && npm run lint", + "test:node": "mocha test.js test.node.js", + "test:browser": "karma start --single-run", + "test:coverage": "cat ./coverage/lcov.info | coveralls" + }, + "dependencies": { + "ms": "^2.1.3" + }, + "devDependencies": { + "brfs": "^2.0.1", + "browserify": "^16.2.3", + "coveralls": "^3.0.2", + "karma": "^3.1.4", + "karma-browserify": "^6.0.0", + "karma-chrome-launcher": "^2.2.0", + "karma-mocha": "^1.3.0", + "mocha": "^5.2.0", + "mocha-lcov-reporter": "^1.2.0", + "sinon": "^14.0.0", + "xo": "^0.23.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + }, + "main": "./src/index.js", + "browser": "./src/browser.js", + "engines": { + "node": ">=6.0" + }, + "xo": { + "rules": { + "import/extensions": "off" + } + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/dist/diff.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/dist/diff.js new file mode 100644 index 0000000000000000000000000000000000000000..0d00e82e8ab2a82ea59da6b5fc206b3ec2decc14 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/dist/diff.js @@ -0,0 +1,1674 @@ +(function (global, factory) { + typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) : + typeof define === 'function' && define.amd ? define(['exports'], factory) : + (global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.Diff = {})); +})(this, (function (exports) { 'use strict'; + + class Diff { + diff(oldStr, newStr, + // Type below is not accurate/complete - see above for full possibilities - but it compiles + options = {}) { + let callback; + if (typeof options === 'function') { + callback = options; + options = {}; + } + else if ('callback' in options) { + callback = options.callback; + } + // Allow subclasses to massage the input prior to running + const oldString = this.castInput(oldStr, options); + const newString = this.castInput(newStr, options); + const oldTokens = this.removeEmpty(this.tokenize(oldString, options)); + const newTokens = this.removeEmpty(this.tokenize(newString, options)); + return this.diffWithOptionsObj(oldTokens, newTokens, options, callback); + } + diffWithOptionsObj(oldTokens, newTokens, options, callback) { + var _a; + const done = (value) => { + value = this.postProcess(value, options); + if (callback) { + setTimeout(function () { callback(value); }, 0); + return undefined; + } + else { + return value; + } + }; + const newLen = newTokens.length, oldLen = oldTokens.length; + let editLength = 1; + let maxEditLength = newLen + oldLen; + if (options.maxEditLength != null) { + maxEditLength = Math.min(maxEditLength, options.maxEditLength); + } + const maxExecutionTime = (_a = options.timeout) !== null && _a !== void 0 ? _a : Infinity; + const abortAfterTimestamp = Date.now() + maxExecutionTime; + const bestPath = [{ oldPos: -1, lastComponent: undefined }]; + // Seed editLength = 0, i.e. the content starts with the same values + let newPos = this.extractCommon(bestPath[0], newTokens, oldTokens, 0, options); + if (bestPath[0].oldPos + 1 >= oldLen && newPos + 1 >= newLen) { + // Identity per the equality and tokenizer + return done(this.buildValues(bestPath[0].lastComponent, newTokens, oldTokens)); + } + // Once we hit the right edge of the edit graph on some diagonal k, we can + // definitely reach the end of the edit graph in no more than k edits, so + // there's no point in considering any moves to diagonal k+1 any more (from + // which we're guaranteed to need at least k+1 more edits). + // Similarly, once we've reached the bottom of the edit graph, there's no + // point considering moves to lower diagonals. + // We record this fact by setting minDiagonalToConsider and + // maxDiagonalToConsider to some finite value once we've hit the edge of + // the edit graph. + // This optimization is not faithful to the original algorithm presented in + // Myers's paper, which instead pointlessly extends D-paths off the end of + // the edit graph - see page 7 of Myers's paper which notes this point + // explicitly and illustrates it with a diagram. This has major performance + // implications for some common scenarios. For instance, to compute a diff + // where the new text simply appends d characters on the end of the + // original text of length n, the true Myers algorithm will take O(n+d^2) + // time while this optimization needs only O(n+d) time. + let minDiagonalToConsider = -Infinity, maxDiagonalToConsider = Infinity; + // Main worker method. checks all permutations of a given edit length for acceptance. + const execEditLength = () => { + for (let diagonalPath = Math.max(minDiagonalToConsider, -editLength); diagonalPath <= Math.min(maxDiagonalToConsider, editLength); diagonalPath += 2) { + let basePath; + const removePath = bestPath[diagonalPath - 1], addPath = bestPath[diagonalPath + 1]; + if (removePath) { + // No one else is going to attempt to use this value, clear it + // @ts-expect-error - perf optimisation. This type-violating value will never be read. + bestPath[diagonalPath - 1] = undefined; + } + let canAdd = false; + if (addPath) { + // what newPos will be after we do an insertion: + const addPathNewPos = addPath.oldPos - diagonalPath; + canAdd = addPath && 0 <= addPathNewPos && addPathNewPos < newLen; + } + const canRemove = removePath && removePath.oldPos + 1 < oldLen; + if (!canAdd && !canRemove) { + // If this path is a terminal then prune + // @ts-expect-error - perf optimisation. This type-violating value will never be read. + bestPath[diagonalPath] = undefined; + continue; + } + // Select the diagonal that we want to branch from. We select the prior + // path whose position in the old string is the farthest from the origin + // and does not pass the bounds of the diff graph + if (!canRemove || (canAdd && removePath.oldPos < addPath.oldPos)) { + basePath = this.addToPath(addPath, true, false, 0, options); + } + else { + basePath = this.addToPath(removePath, false, true, 1, options); + } + newPos = this.extractCommon(basePath, newTokens, oldTokens, diagonalPath, options); + if (basePath.oldPos + 1 >= oldLen && newPos + 1 >= newLen) { + // If we have hit the end of both strings, then we are done + return done(this.buildValues(basePath.lastComponent, newTokens, oldTokens)) || true; + } + else { + bestPath[diagonalPath] = basePath; + if (basePath.oldPos + 1 >= oldLen) { + maxDiagonalToConsider = Math.min(maxDiagonalToConsider, diagonalPath - 1); + } + if (newPos + 1 >= newLen) { + minDiagonalToConsider = Math.max(minDiagonalToConsider, diagonalPath + 1); + } + } + } + editLength++; + }; + // Performs the length of edit iteration. Is a bit fugly as this has to support the + // sync and async mode which is never fun. Loops over execEditLength until a value + // is produced, or until the edit length exceeds options.maxEditLength (if given), + // in which case it will return undefined. + if (callback) { + (function exec() { + setTimeout(function () { + if (editLength > maxEditLength || Date.now() > abortAfterTimestamp) { + return callback(undefined); + } + if (!execEditLength()) { + exec(); + } + }, 0); + }()); + } + else { + while (editLength <= maxEditLength && Date.now() <= abortAfterTimestamp) { + const ret = execEditLength(); + if (ret) { + return ret; + } + } + } + } + addToPath(path, added, removed, oldPosInc, options) { + const last = path.lastComponent; + if (last && !options.oneChangePerToken && last.added === added && last.removed === removed) { + return { + oldPos: path.oldPos + oldPosInc, + lastComponent: { count: last.count + 1, added: added, removed: removed, previousComponent: last.previousComponent } + }; + } + else { + return { + oldPos: path.oldPos + oldPosInc, + lastComponent: { count: 1, added: added, removed: removed, previousComponent: last } + }; + } + } + extractCommon(basePath, newTokens, oldTokens, diagonalPath, options) { + const newLen = newTokens.length, oldLen = oldTokens.length; + let oldPos = basePath.oldPos, newPos = oldPos - diagonalPath, commonCount = 0; + while (newPos + 1 < newLen && oldPos + 1 < oldLen && this.equals(oldTokens[oldPos + 1], newTokens[newPos + 1], options)) { + newPos++; + oldPos++; + commonCount++; + if (options.oneChangePerToken) { + basePath.lastComponent = { count: 1, previousComponent: basePath.lastComponent, added: false, removed: false }; + } + } + if (commonCount && !options.oneChangePerToken) { + basePath.lastComponent = { count: commonCount, previousComponent: basePath.lastComponent, added: false, removed: false }; + } + basePath.oldPos = oldPos; + return newPos; + } + equals(left, right, options) { + if (options.comparator) { + return options.comparator(left, right); + } + else { + return left === right + || (!!options.ignoreCase && left.toLowerCase() === right.toLowerCase()); + } + } + removeEmpty(array) { + const ret = []; + for (let i = 0; i < array.length; i++) { + if (array[i]) { + ret.push(array[i]); + } + } + return ret; + } + // eslint-disable-next-line @typescript-eslint/no-unused-vars + castInput(value, options) { + return value; + } + // eslint-disable-next-line @typescript-eslint/no-unused-vars + tokenize(value, options) { + return Array.from(value); + } + join(chars) { + // Assumes ValueT is string, which is the case for most subclasses. + // When it's false, e.g. in diffArrays, this method needs to be overridden (e.g. with a no-op) + // Yes, the casts are verbose and ugly, because this pattern - of having the base class SORT OF + // assume tokens and values are strings, but not completely - is weird and janky. + return chars.join(''); + } + postProcess(changeObjects, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + options) { + return changeObjects; + } + get useLongestToken() { + return false; + } + buildValues(lastComponent, newTokens, oldTokens) { + // First we convert our linked list of components in reverse order to an + // array in the right order: + const components = []; + let nextComponent; + while (lastComponent) { + components.push(lastComponent); + nextComponent = lastComponent.previousComponent; + delete lastComponent.previousComponent; + lastComponent = nextComponent; + } + components.reverse(); + const componentLen = components.length; + let componentPos = 0, newPos = 0, oldPos = 0; + for (; componentPos < componentLen; componentPos++) { + const component = components[componentPos]; + if (!component.removed) { + if (!component.added && this.useLongestToken) { + let value = newTokens.slice(newPos, newPos + component.count); + value = value.map(function (value, i) { + const oldValue = oldTokens[oldPos + i]; + return oldValue.length > value.length ? oldValue : value; + }); + component.value = this.join(value); + } + else { + component.value = this.join(newTokens.slice(newPos, newPos + component.count)); + } + newPos += component.count; + // Common case + if (!component.added) { + oldPos += component.count; + } + } + else { + component.value = this.join(oldTokens.slice(oldPos, oldPos + component.count)); + oldPos += component.count; + } + } + return components; + } + } + + class CharacterDiff extends Diff { + } + const characterDiff = new CharacterDiff(); + function diffChars(oldStr, newStr, options) { + return characterDiff.diff(oldStr, newStr, options); + } + + function longestCommonPrefix(str1, str2) { + let i; + for (i = 0; i < str1.length && i < str2.length; i++) { + if (str1[i] != str2[i]) { + return str1.slice(0, i); + } + } + return str1.slice(0, i); + } + function longestCommonSuffix(str1, str2) { + let i; + // Unlike longestCommonPrefix, we need a special case to handle all scenarios + // where we return the empty string since str1.slice(-0) will return the + // entire string. + if (!str1 || !str2 || str1[str1.length - 1] != str2[str2.length - 1]) { + return ''; + } + for (i = 0; i < str1.length && i < str2.length; i++) { + if (str1[str1.length - (i + 1)] != str2[str2.length - (i + 1)]) { + return str1.slice(-i); + } + } + return str1.slice(-i); + } + function replacePrefix(string, oldPrefix, newPrefix) { + if (string.slice(0, oldPrefix.length) != oldPrefix) { + throw Error(`string ${JSON.stringify(string)} doesn't start with prefix ${JSON.stringify(oldPrefix)}; this is a bug`); + } + return newPrefix + string.slice(oldPrefix.length); + } + function replaceSuffix(string, oldSuffix, newSuffix) { + if (!oldSuffix) { + return string + newSuffix; + } + if (string.slice(-oldSuffix.length) != oldSuffix) { + throw Error(`string ${JSON.stringify(string)} doesn't end with suffix ${JSON.stringify(oldSuffix)}; this is a bug`); + } + return string.slice(0, -oldSuffix.length) + newSuffix; + } + function removePrefix(string, oldPrefix) { + return replacePrefix(string, oldPrefix, ''); + } + function removeSuffix(string, oldSuffix) { + return replaceSuffix(string, oldSuffix, ''); + } + function maximumOverlap(string1, string2) { + return string2.slice(0, overlapCount(string1, string2)); + } + // Nicked from https://stackoverflow.com/a/60422853/1709587 + function overlapCount(a, b) { + // Deal with cases where the strings differ in length + let startA = 0; + if (a.length > b.length) { + startA = a.length - b.length; + } + let endB = b.length; + if (a.length < b.length) { + endB = a.length; + } + // Create a back-reference for each index + // that should be followed in case of a mismatch. + // We only need B to make these references: + const map = Array(endB); + let k = 0; // Index that lags behind j + map[0] = 0; + for (let j = 1; j < endB; j++) { + if (b[j] == b[k]) { + map[j] = map[k]; // skip over the same character (optional optimisation) + } + else { + map[j] = k; + } + while (k > 0 && b[j] != b[k]) { + k = map[k]; + } + if (b[j] == b[k]) { + k++; + } + } + // Phase 2: use these references while iterating over A + k = 0; + for (let i = startA; i < a.length; i++) { + while (k > 0 && a[i] != b[k]) { + k = map[k]; + } + if (a[i] == b[k]) { + k++; + } + } + return k; + } + /** + * Returns true if the string consistently uses Windows line endings. + */ + function hasOnlyWinLineEndings(string) { + return string.includes('\r\n') && !string.startsWith('\n') && !string.match(/[^\r]\n/); + } + /** + * Returns true if the string consistently uses Unix line endings. + */ + function hasOnlyUnixLineEndings(string) { + return !string.includes('\r\n') && string.includes('\n'); + } + function trailingWs(string) { + // Yes, this looks overcomplicated and dumb - why not replace the whole function with + // return string match(/\s*$/)[0] + // you ask? Because: + // 1. the trap described at https://markamery.com/blog/quadratic-time-regexes/ would mean doing + // this would cause this function to take O(n²) time in the worst case (specifically when + // there is a massive run of NON-TRAILING whitespace in `string`), and + // 2. the fix proposed in the same blog post, of using a negative lookbehind, is incompatible + // with old Safari versions that we'd like to not break if possible (see + // https://github.com/kpdecker/jsdiff/pull/550) + // It feels absurd to do this with an explicit loop instead of a regex, but I really can't see a + // better way that doesn't result in broken behaviour. + let i; + for (i = string.length - 1; i >= 0; i--) { + if (!string[i].match(/\s/)) { + break; + } + } + return string.substring(i + 1); + } + function leadingWs(string) { + // Thankfully the annoying considerations described in trailingWs don't apply here: + const match = string.match(/^\s*/); + return match ? match[0] : ''; + } + + // Based on https://en.wikipedia.org/wiki/Latin_script_in_Unicode + // + // Ranges and exceptions: + // Latin-1 Supplement, 0080–00FF + // - U+00D7 × Multiplication sign + // - U+00F7 ÷ Division sign + // Latin Extended-A, 0100–017F + // Latin Extended-B, 0180–024F + // IPA Extensions, 0250–02AF + // Spacing Modifier Letters, 02B0–02FF + // - U+02C7 ˇ ˇ Caron + // - U+02D8 ˘ ˘ Breve + // - U+02D9 ˙ ˙ Dot Above + // - U+02DA ˚ ˚ Ring Above + // - U+02DB ˛ ˛ Ogonek + // - U+02DC ˜ ˜ Small Tilde + // - U+02DD ˝ ˝ Double Acute Accent + // Latin Extended Additional, 1E00–1EFF + const extendedWordChars = 'a-zA-Z0-9_\\u{C0}-\\u{FF}\\u{D8}-\\u{F6}\\u{F8}-\\u{2C6}\\u{2C8}-\\u{2D7}\\u{2DE}-\\u{2FF}\\u{1E00}-\\u{1EFF}'; + // Each token is one of the following: + // - A punctuation mark plus the surrounding whitespace + // - A word plus the surrounding whitespace + // - Pure whitespace (but only in the special case where this the entire text + // is just whitespace) + // + // We have to include surrounding whitespace in the tokens because the two + // alternative approaches produce horribly broken results: + // * If we just discard the whitespace, we can't fully reproduce the original + // text from the sequence of tokens and any attempt to render the diff will + // get the whitespace wrong. + // * If we have separate tokens for whitespace, then in a typical text every + // second token will be a single space character. But this often results in + // the optimal diff between two texts being a perverse one that preserves + // the spaces between words but deletes and reinserts actual common words. + // See https://github.com/kpdecker/jsdiff/issues/160#issuecomment-1866099640 + // for an example. + // + // Keeping the surrounding whitespace of course has implications for .equals + // and .join, not just .tokenize. + // This regex does NOT fully implement the tokenization rules described above. + // Instead, it gives runs of whitespace their own "token". The tokenize method + // then handles stitching whitespace tokens onto adjacent word or punctuation + // tokens. + const tokenizeIncludingWhitespace = new RegExp(`[${extendedWordChars}]+|\\s+|[^${extendedWordChars}]`, 'ug'); + class WordDiff extends Diff { + equals(left, right, options) { + if (options.ignoreCase) { + left = left.toLowerCase(); + right = right.toLowerCase(); + } + return left.trim() === right.trim(); + } + tokenize(value, options = {}) { + let parts; + if (options.intlSegmenter) { + const segmenter = options.intlSegmenter; + if (segmenter.resolvedOptions().granularity != 'word') { + throw new Error('The segmenter passed must have a granularity of "word"'); + } + parts = Array.from(segmenter.segment(value), segment => segment.segment); + } + else { + parts = value.match(tokenizeIncludingWhitespace) || []; + } + const tokens = []; + let prevPart = null; + parts.forEach(part => { + if ((/\s/).test(part)) { + if (prevPart == null) { + tokens.push(part); + } + else { + tokens.push(tokens.pop() + part); + } + } + else if (prevPart != null && (/\s/).test(prevPart)) { + if (tokens[tokens.length - 1] == prevPart) { + tokens.push(tokens.pop() + part); + } + else { + tokens.push(prevPart + part); + } + } + else { + tokens.push(part); + } + prevPart = part; + }); + return tokens; + } + join(tokens) { + // Tokens being joined here will always have appeared consecutively in the + // same text, so we can simply strip off the leading whitespace from all the + // tokens except the first (and except any whitespace-only tokens - but such + // a token will always be the first and only token anyway) and then join them + // and the whitespace around words and punctuation will end up correct. + return tokens.map((token, i) => { + if (i == 0) { + return token; + } + else { + return token.replace((/^\s+/), ''); + } + }).join(''); + } + postProcess(changes, options) { + if (!changes || options.oneChangePerToken) { + return changes; + } + let lastKeep = null; + // Change objects representing any insertion or deletion since the last + // "keep" change object. There can be at most one of each. + let insertion = null; + let deletion = null; + changes.forEach(change => { + if (change.added) { + insertion = change; + } + else if (change.removed) { + deletion = change; + } + else { + if (insertion || deletion) { // May be false at start of text + dedupeWhitespaceInChangeObjects(lastKeep, deletion, insertion, change); + } + lastKeep = change; + insertion = null; + deletion = null; + } + }); + if (insertion || deletion) { + dedupeWhitespaceInChangeObjects(lastKeep, deletion, insertion, null); + } + return changes; + } + } + const wordDiff = new WordDiff(); + function diffWords(oldStr, newStr, options) { + // This option has never been documented and never will be (it's clearer to + // just call `diffWordsWithSpace` directly if you need that behavior), but + // has existed in jsdiff for a long time, so we retain support for it here + // for the sake of backwards compatibility. + if ((options === null || options === void 0 ? void 0 : options.ignoreWhitespace) != null && !options.ignoreWhitespace) { + return diffWordsWithSpace(oldStr, newStr, options); + } + return wordDiff.diff(oldStr, newStr, options); + } + function dedupeWhitespaceInChangeObjects(startKeep, deletion, insertion, endKeep) { + // Before returning, we tidy up the leading and trailing whitespace of the + // change objects to eliminate cases where trailing whitespace in one object + // is repeated as leading whitespace in the next. + // Below are examples of the outcomes we want here to explain the code. + // I=insert, K=keep, D=delete + // 1. diffing 'foo bar baz' vs 'foo baz' + // Prior to cleanup, we have K:'foo ' D:' bar ' K:' baz' + // After cleanup, we want: K:'foo ' D:'bar ' K:'baz' + // + // 2. Diffing 'foo bar baz' vs 'foo qux baz' + // Prior to cleanup, we have K:'foo ' D:' bar ' I:' qux ' K:' baz' + // After cleanup, we want K:'foo ' D:'bar' I:'qux' K:' baz' + // + // 3. Diffing 'foo\nbar baz' vs 'foo baz' + // Prior to cleanup, we have K:'foo ' D:'\nbar ' K:' baz' + // After cleanup, we want K'foo' D:'\nbar' K:' baz' + // + // 4. Diffing 'foo baz' vs 'foo\nbar baz' + // Prior to cleanup, we have K:'foo\n' I:'\nbar ' K:' baz' + // After cleanup, we ideally want K'foo' I:'\nbar' K:' baz' + // but don't actually manage this currently (the pre-cleanup change + // objects don't contain enough information to make it possible). + // + // 5. Diffing 'foo bar baz' vs 'foo baz' + // Prior to cleanup, we have K:'foo ' D:' bar ' K:' baz' + // After cleanup, we want K:'foo ' D:' bar ' K:'baz' + // + // Our handling is unavoidably imperfect in the case where there's a single + // indel between keeps and the whitespace has changed. For instance, consider + // diffing 'foo\tbar\nbaz' vs 'foo baz'. Unless we create an extra change + // object to represent the insertion of the space character (which isn't even + // a token), we have no way to avoid losing information about the texts' + // original whitespace in the result we return. Still, we do our best to + // output something that will look sensible if we e.g. print it with + // insertions in green and deletions in red. + // Between two "keep" change objects (or before the first or after the last + // change object), we can have either: + // * A "delete" followed by an "insert" + // * Just an "insert" + // * Just a "delete" + // We handle the three cases separately. + if (deletion && insertion) { + const oldWsPrefix = leadingWs(deletion.value); + const oldWsSuffix = trailingWs(deletion.value); + const newWsPrefix = leadingWs(insertion.value); + const newWsSuffix = trailingWs(insertion.value); + if (startKeep) { + const commonWsPrefix = longestCommonPrefix(oldWsPrefix, newWsPrefix); + startKeep.value = replaceSuffix(startKeep.value, newWsPrefix, commonWsPrefix); + deletion.value = removePrefix(deletion.value, commonWsPrefix); + insertion.value = removePrefix(insertion.value, commonWsPrefix); + } + if (endKeep) { + const commonWsSuffix = longestCommonSuffix(oldWsSuffix, newWsSuffix); + endKeep.value = replacePrefix(endKeep.value, newWsSuffix, commonWsSuffix); + deletion.value = removeSuffix(deletion.value, commonWsSuffix); + insertion.value = removeSuffix(insertion.value, commonWsSuffix); + } + } + else if (insertion) { + // The whitespaces all reflect what was in the new text rather than + // the old, so we essentially have no information about whitespace + // insertion or deletion. We just want to dedupe the whitespace. + // We do that by having each change object keep its trailing + // whitespace and deleting duplicate leading whitespace where + // present. + if (startKeep) { + const ws = leadingWs(insertion.value); + insertion.value = insertion.value.substring(ws.length); + } + if (endKeep) { + const ws = leadingWs(endKeep.value); + endKeep.value = endKeep.value.substring(ws.length); + } + // otherwise we've got a deletion and no insertion + } + else if (startKeep && endKeep) { + const newWsFull = leadingWs(endKeep.value), delWsStart = leadingWs(deletion.value), delWsEnd = trailingWs(deletion.value); + // Any whitespace that comes straight after startKeep in both the old and + // new texts, assign to startKeep and remove from the deletion. + const newWsStart = longestCommonPrefix(newWsFull, delWsStart); + deletion.value = removePrefix(deletion.value, newWsStart); + // Any whitespace that comes straight before endKeep in both the old and + // new texts, and hasn't already been assigned to startKeep, assign to + // endKeep and remove from the deletion. + const newWsEnd = longestCommonSuffix(removePrefix(newWsFull, newWsStart), delWsEnd); + deletion.value = removeSuffix(deletion.value, newWsEnd); + endKeep.value = replacePrefix(endKeep.value, newWsFull, newWsEnd); + // If there's any whitespace from the new text that HASN'T already been + // assigned, assign it to the start: + startKeep.value = replaceSuffix(startKeep.value, newWsFull, newWsFull.slice(0, newWsFull.length - newWsEnd.length)); + } + else if (endKeep) { + // We are at the start of the text. Preserve all the whitespace on + // endKeep, and just remove whitespace from the end of deletion to the + // extent that it overlaps with the start of endKeep. + const endKeepWsPrefix = leadingWs(endKeep.value); + const deletionWsSuffix = trailingWs(deletion.value); + const overlap = maximumOverlap(deletionWsSuffix, endKeepWsPrefix); + deletion.value = removeSuffix(deletion.value, overlap); + } + else if (startKeep) { + // We are at the END of the text. Preserve all the whitespace on + // startKeep, and just remove whitespace from the start of deletion to + // the extent that it overlaps with the end of startKeep. + const startKeepWsSuffix = trailingWs(startKeep.value); + const deletionWsPrefix = leadingWs(deletion.value); + const overlap = maximumOverlap(startKeepWsSuffix, deletionWsPrefix); + deletion.value = removePrefix(deletion.value, overlap); + } + } + class WordsWithSpaceDiff extends Diff { + tokenize(value) { + // Slightly different to the tokenizeIncludingWhitespace regex used above in + // that this one treats each individual newline as a distinct tokens, rather + // than merging them into other surrounding whitespace. This was requested + // in https://github.com/kpdecker/jsdiff/issues/180 & + // https://github.com/kpdecker/jsdiff/issues/211 + const regex = new RegExp(`(\\r?\\n)|[${extendedWordChars}]+|[^\\S\\n\\r]+|[^${extendedWordChars}]`, 'ug'); + return value.match(regex) || []; + } + } + const wordsWithSpaceDiff = new WordsWithSpaceDiff(); + function diffWordsWithSpace(oldStr, newStr, options) { + return wordsWithSpaceDiff.diff(oldStr, newStr, options); + } + + function generateOptions(options, defaults) { + if (typeof options === 'function') { + defaults.callback = options; + } + else if (options) { + for (const name in options) { + /* istanbul ignore else */ + if (Object.prototype.hasOwnProperty.call(options, name)) { + defaults[name] = options[name]; + } + } + } + return defaults; + } + + class LineDiff extends Diff { + constructor() { + super(...arguments); + this.tokenize = tokenize; + } + equals(left, right, options) { + // If we're ignoring whitespace, we need to normalise lines by stripping + // whitespace before checking equality. (This has an annoying interaction + // with newlineIsToken that requires special handling: if newlines get their + // own token, then we DON'T want to trim the *newline* tokens down to empty + // strings, since this would cause us to treat whitespace-only line content + // as equal to a separator between lines, which would be weird and + // inconsistent with the documented behavior of the options.) + if (options.ignoreWhitespace) { + if (!options.newlineIsToken || !left.includes('\n')) { + left = left.trim(); + } + if (!options.newlineIsToken || !right.includes('\n')) { + right = right.trim(); + } + } + else if (options.ignoreNewlineAtEof && !options.newlineIsToken) { + if (left.endsWith('\n')) { + left = left.slice(0, -1); + } + if (right.endsWith('\n')) { + right = right.slice(0, -1); + } + } + return super.equals(left, right, options); + } + } + const lineDiff = new LineDiff(); + function diffLines(oldStr, newStr, options) { + return lineDiff.diff(oldStr, newStr, options); + } + function diffTrimmedLines(oldStr, newStr, options) { + options = generateOptions(options, { ignoreWhitespace: true }); + return lineDiff.diff(oldStr, newStr, options); + } + // Exported standalone so it can be used from jsonDiff too. + function tokenize(value, options) { + if (options.stripTrailingCr) { + // remove one \r before \n to match GNU diff's --strip-trailing-cr behavior + value = value.replace(/\r\n/g, '\n'); + } + const retLines = [], linesAndNewlines = value.split(/(\n|\r\n)/); + // Ignore the final empty token that occurs if the string ends with a new line + if (!linesAndNewlines[linesAndNewlines.length - 1]) { + linesAndNewlines.pop(); + } + // Merge the content and line separators into single tokens + for (let i = 0; i < linesAndNewlines.length; i++) { + const line = linesAndNewlines[i]; + if (i % 2 && !options.newlineIsToken) { + retLines[retLines.length - 1] += line; + } + else { + retLines.push(line); + } + } + return retLines; + } + + function isSentenceEndPunct(char) { + return char == '.' || char == '!' || char == '?'; + } + class SentenceDiff extends Diff { + tokenize(value) { + var _a; + // If in future we drop support for environments that don't support lookbehinds, we can replace + // this entire function with: + // return value.split(/(?<=[.!?])(\s+|$)/); + // but until then, for similar reasons to the trailingWs function in string.ts, we are forced + // to do this verbosely "by hand" instead of using a regex. + const result = []; + let tokenStartI = 0; + for (let i = 0; i < value.length; i++) { + if (i == value.length - 1) { + result.push(value.slice(tokenStartI)); + break; + } + if (isSentenceEndPunct(value[i]) && value[i + 1].match(/\s/)) { + // We've hit a sentence break - i.e. a punctuation mark followed by whitespace. + // We now want to push TWO tokens to the result: + // 1. the sentence + result.push(value.slice(tokenStartI, i + 1)); + // 2. the whitespace + i = tokenStartI = i + 1; + while ((_a = value[i + 1]) === null || _a === void 0 ? void 0 : _a.match(/\s/)) { + i++; + } + result.push(value.slice(tokenStartI, i + 1)); + // Then the next token (a sentence) starts on the character after the whitespace. + // (It's okay if this is off the end of the string - then the outer loop will terminate + // here anyway.) + tokenStartI = i + 1; + } + } + return result; + } + } + const sentenceDiff = new SentenceDiff(); + function diffSentences(oldStr, newStr, options) { + return sentenceDiff.diff(oldStr, newStr, options); + } + + class CssDiff extends Diff { + tokenize(value) { + return value.split(/([{}:;,]|\s+)/); + } + } + const cssDiff = new CssDiff(); + function diffCss(oldStr, newStr, options) { + return cssDiff.diff(oldStr, newStr, options); + } + + class JsonDiff extends Diff { + constructor() { + super(...arguments); + this.tokenize = tokenize; + } + get useLongestToken() { + // Discriminate between two lines of pretty-printed, serialized JSON where one of them has a + // dangling comma and the other doesn't. Turns out including the dangling comma yields the nicest output: + return true; + } + castInput(value, options) { + const { undefinedReplacement, stringifyReplacer = (k, v) => typeof v === 'undefined' ? undefinedReplacement : v } = options; + return typeof value === 'string' ? value : JSON.stringify(canonicalize(value, null, null, stringifyReplacer), null, ' '); + } + equals(left, right, options) { + return super.equals(left.replace(/,([\r\n])/g, '$1'), right.replace(/,([\r\n])/g, '$1'), options); + } + } + const jsonDiff = new JsonDiff(); + function diffJson(oldStr, newStr, options) { + return jsonDiff.diff(oldStr, newStr, options); + } + // This function handles the presence of circular references by bailing out when encountering an + // object that is already on the "stack" of items being processed. Accepts an optional replacer + function canonicalize(obj, stack, replacementStack, replacer, key) { + stack = stack || []; + replacementStack = replacementStack || []; + if (replacer) { + obj = replacer(key === undefined ? '' : key, obj); + } + let i; + for (i = 0; i < stack.length; i += 1) { + if (stack[i] === obj) { + return replacementStack[i]; + } + } + let canonicalizedObj; + if ('[object Array]' === Object.prototype.toString.call(obj)) { + stack.push(obj); + canonicalizedObj = new Array(obj.length); + replacementStack.push(canonicalizedObj); + for (i = 0; i < obj.length; i += 1) { + canonicalizedObj[i] = canonicalize(obj[i], stack, replacementStack, replacer, String(i)); + } + stack.pop(); + replacementStack.pop(); + return canonicalizedObj; + } + if (obj && obj.toJSON) { + obj = obj.toJSON(); + } + if (typeof obj === 'object' && obj !== null) { + stack.push(obj); + canonicalizedObj = {}; + replacementStack.push(canonicalizedObj); + const sortedKeys = []; + let key; + for (key in obj) { + /* istanbul ignore else */ + if (Object.prototype.hasOwnProperty.call(obj, key)) { + sortedKeys.push(key); + } + } + sortedKeys.sort(); + for (i = 0; i < sortedKeys.length; i += 1) { + key = sortedKeys[i]; + canonicalizedObj[key] = canonicalize(obj[key], stack, replacementStack, replacer, key); + } + stack.pop(); + replacementStack.pop(); + } + else { + canonicalizedObj = obj; + } + return canonicalizedObj; + } + + class ArrayDiff extends Diff { + tokenize(value) { + return value.slice(); + } + join(value) { + return value; + } + removeEmpty(value) { + return value; + } + } + const arrayDiff = new ArrayDiff(); + function diffArrays(oldArr, newArr, options) { + return arrayDiff.diff(oldArr, newArr, options); + } + + function unixToWin(patch) { + if (Array.isArray(patch)) { + // It would be cleaner if instead of the line below we could just write + // return patch.map(unixToWin) + // but mysteriously TypeScript (v5.7.3 at the time of writing) does not like this and it will + // refuse to compile, thinking that unixToWin could then return StructuredPatch[][] and the + // result would be incompatible with the overload signatures. + // See bug report at https://github.com/microsoft/TypeScript/issues/61398. + return patch.map(p => unixToWin(p)); + } + return Object.assign(Object.assign({}, patch), { hunks: patch.hunks.map(hunk => (Object.assign(Object.assign({}, hunk), { lines: hunk.lines.map((line, i) => { + var _a; + return (line.startsWith('\\') || line.endsWith('\r') || ((_a = hunk.lines[i + 1]) === null || _a === void 0 ? void 0 : _a.startsWith('\\'))) + ? line + : line + '\r'; + }) }))) }); + } + function winToUnix(patch) { + if (Array.isArray(patch)) { + // (See comment above equivalent line in unixToWin) + return patch.map(p => winToUnix(p)); + } + return Object.assign(Object.assign({}, patch), { hunks: patch.hunks.map(hunk => (Object.assign(Object.assign({}, hunk), { lines: hunk.lines.map(line => line.endsWith('\r') ? line.substring(0, line.length - 1) : line) }))) }); + } + /** + * Returns true if the patch consistently uses Unix line endings (or only involves one line and has + * no line endings). + */ + function isUnix(patch) { + if (!Array.isArray(patch)) { + patch = [patch]; + } + return !patch.some(index => index.hunks.some(hunk => hunk.lines.some(line => !line.startsWith('\\') && line.endsWith('\r')))); + } + /** + * Returns true if the patch uses Windows line endings and only Windows line endings. + */ + function isWin(patch) { + if (!Array.isArray(patch)) { + patch = [patch]; + } + return patch.some(index => index.hunks.some(hunk => hunk.lines.some(line => line.endsWith('\r')))) + && patch.every(index => index.hunks.every(hunk => hunk.lines.every((line, i) => { var _a; return line.startsWith('\\') || line.endsWith('\r') || ((_a = hunk.lines[i + 1]) === null || _a === void 0 ? void 0 : _a.startsWith('\\')); }))); + } + + /** + * Parses a patch into structured data, in the same structure returned by `structuredPatch`. + * + * @return a JSON object representation of the a patch, suitable for use with the `applyPatch` method. + */ + function parsePatch(uniDiff) { + const diffstr = uniDiff.split(/\n/), list = []; + let i = 0; + function parseIndex() { + const index = {}; + list.push(index); + // Parse diff metadata + while (i < diffstr.length) { + const line = diffstr[i]; + // File header found, end parsing diff metadata + if ((/^(---|\+\+\+|@@)\s/).test(line)) { + break; + } + // Diff index + const header = (/^(?:Index:|diff(?: -r \w+)+)\s+(.+?)\s*$/).exec(line); + if (header) { + index.index = header[1]; + } + i++; + } + // Parse file headers if they are defined. Unified diff requires them, but + // there's no technical issues to have an isolated hunk without file header + parseFileHeader(index); + parseFileHeader(index); + // Parse hunks + index.hunks = []; + while (i < diffstr.length) { + const line = diffstr[i]; + if ((/^(Index:\s|diff\s|---\s|\+\+\+\s|===================================================================)/).test(line)) { + break; + } + else if ((/^@@/).test(line)) { + index.hunks.push(parseHunk()); + } + else if (line) { + throw new Error('Unknown line ' + (i + 1) + ' ' + JSON.stringify(line)); + } + else { + i++; + } + } + } + // Parses the --- and +++ headers, if none are found, no lines + // are consumed. + function parseFileHeader(index) { + const fileHeader = (/^(---|\+\+\+)\s+(.*)\r?$/).exec(diffstr[i]); + if (fileHeader) { + const data = fileHeader[2].split('\t', 2), header = (data[1] || '').trim(); + let fileName = data[0].replace(/\\\\/g, '\\'); + if ((/^".*"$/).test(fileName)) { + fileName = fileName.substr(1, fileName.length - 2); + } + if (fileHeader[1] === '---') { + index.oldFileName = fileName; + index.oldHeader = header; + } + else { + index.newFileName = fileName; + index.newHeader = header; + } + i++; + } + } + // Parses a hunk + // This assumes that we are at the start of a hunk. + function parseHunk() { + var _a; + const chunkHeaderIndex = i, chunkHeaderLine = diffstr[i++], chunkHeader = chunkHeaderLine.split(/@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@/); + const hunk = { + oldStart: +chunkHeader[1], + oldLines: typeof chunkHeader[2] === 'undefined' ? 1 : +chunkHeader[2], + newStart: +chunkHeader[3], + newLines: typeof chunkHeader[4] === 'undefined' ? 1 : +chunkHeader[4], + lines: [] + }; + // Unified Diff Format quirk: If the chunk size is 0, + // the first number is one lower than one would expect. + // https://www.artima.com/weblogs/viewpost.jsp?thread=164293 + if (hunk.oldLines === 0) { + hunk.oldStart += 1; + } + if (hunk.newLines === 0) { + hunk.newStart += 1; + } + let addCount = 0, removeCount = 0; + for (; i < diffstr.length && (removeCount < hunk.oldLines || addCount < hunk.newLines || ((_a = diffstr[i]) === null || _a === void 0 ? void 0 : _a.startsWith('\\'))); i++) { + const operation = (diffstr[i].length == 0 && i != (diffstr.length - 1)) ? ' ' : diffstr[i][0]; + if (operation === '+' || operation === '-' || operation === ' ' || operation === '\\') { + hunk.lines.push(diffstr[i]); + if (operation === '+') { + addCount++; + } + else if (operation === '-') { + removeCount++; + } + else if (operation === ' ') { + addCount++; + removeCount++; + } + } + else { + throw new Error(`Hunk at line ${chunkHeaderIndex + 1} contained invalid line ${diffstr[i]}`); + } + } + // Handle the empty block count case + if (!addCount && hunk.newLines === 1) { + hunk.newLines = 0; + } + if (!removeCount && hunk.oldLines === 1) { + hunk.oldLines = 0; + } + // Perform sanity checking + if (addCount !== hunk.newLines) { + throw new Error('Added line count did not match for hunk at line ' + (chunkHeaderIndex + 1)); + } + if (removeCount !== hunk.oldLines) { + throw new Error('Removed line count did not match for hunk at line ' + (chunkHeaderIndex + 1)); + } + return hunk; + } + while (i < diffstr.length) { + parseIndex(); + } + return list; + } + + // Iterator that traverses in the range of [min, max], stepping + // by distance from a given start position. I.e. for [0, 4], with + // start of 2, this will iterate 2, 3, 1, 4, 0. + function distanceIterator (start, minLine, maxLine) { + let wantForward = true, backwardExhausted = false, forwardExhausted = false, localOffset = 1; + return function iterator() { + if (wantForward && !forwardExhausted) { + if (backwardExhausted) { + localOffset++; + } + else { + wantForward = false; + } + // Check if trying to fit beyond text length, and if not, check it fits + // after offset location (or desired location on first iteration) + if (start + localOffset <= maxLine) { + return start + localOffset; + } + forwardExhausted = true; + } + if (!backwardExhausted) { + if (!forwardExhausted) { + wantForward = true; + } + // Check if trying to fit before text beginning, and if not, check it fits + // before offset location + if (minLine <= start - localOffset) { + return start - localOffset++; + } + backwardExhausted = true; + return iterator(); + } + // We tried to fit hunk before text beginning and beyond text length, then + // hunk can't fit on the text. Return undefined + return undefined; + }; + } + + /** + * attempts to apply a unified diff patch. + * + * Hunks are applied first to last. + * `applyPatch` first tries to apply the first hunk at the line number specified in the hunk header, and with all context lines matching exactly. + * If that fails, it tries scanning backwards and forwards, one line at a time, to find a place to apply the hunk where the context lines match exactly. + * If that still fails, and `fuzzFactor` is greater than zero, it increments the maximum number of mismatches (missing, extra, or changed context lines) that there can be between the hunk context and a region where we are trying to apply the patch such that the hunk will still be considered to match. + * Regardless of `fuzzFactor`, lines to be deleted in the hunk *must* be present for a hunk to match, and the context lines *immediately* before and after an insertion must match exactly. + * + * Once a hunk is successfully fitted, the process begins again with the next hunk. + * Regardless of `fuzzFactor`, later hunks must be applied later in the file than earlier hunks. + * + * If a hunk cannot be successfully fitted *anywhere* with fewer than `fuzzFactor` mismatches, `applyPatch` fails and returns `false`. + * + * If a hunk is successfully fitted but not at the line number specified by the hunk header, all subsequent hunks have their target line number adjusted accordingly. + * (e.g. if the first hunk is applied 10 lines below where the hunk header said it should fit, `applyPatch` will *start* looking for somewhere to apply the second hunk 10 lines below where its hunk header says it goes.) + * + * If the patch was applied successfully, returns a string containing the patched text. + * If the patch could not be applied (because some hunks in the patch couldn't be fitted to the text in `source`), `applyPatch` returns false. + * + * @param patch a string diff or the output from the `parsePatch` or `structuredPatch` methods. + */ + function applyPatch(source, patch, options = {}) { + let patches; + if (typeof patch === 'string') { + patches = parsePatch(patch); + } + else if (Array.isArray(patch)) { + patches = patch; + } + else { + patches = [patch]; + } + if (patches.length > 1) { + throw new Error('applyPatch only works with a single input.'); + } + return applyStructuredPatch(source, patches[0], options); + } + function applyStructuredPatch(source, patch, options = {}) { + if (options.autoConvertLineEndings || options.autoConvertLineEndings == null) { + if (hasOnlyWinLineEndings(source) && isUnix(patch)) { + patch = unixToWin(patch); + } + else if (hasOnlyUnixLineEndings(source) && isWin(patch)) { + patch = winToUnix(patch); + } + } + // Apply the diff to the input + const lines = source.split('\n'), hunks = patch.hunks, compareLine = options.compareLine || ((lineNumber, line, operation, patchContent) => line === patchContent), fuzzFactor = options.fuzzFactor || 0; + let minLine = 0; + if (fuzzFactor < 0 || !Number.isInteger(fuzzFactor)) { + throw new Error('fuzzFactor must be a non-negative integer'); + } + // Special case for empty patch. + if (!hunks.length) { + return source; + } + // Before anything else, handle EOFNL insertion/removal. If the patch tells us to make a change + // to the EOFNL that is redundant/impossible - i.e. to remove a newline that's not there, or add a + // newline that already exists - then we either return false and fail to apply the patch (if + // fuzzFactor is 0) or simply ignore the problem and do nothing (if fuzzFactor is >0). + // If we do need to remove/add a newline at EOF, this will always be in the final hunk: + let prevLine = '', removeEOFNL = false, addEOFNL = false; + for (let i = 0; i < hunks[hunks.length - 1].lines.length; i++) { + const line = hunks[hunks.length - 1].lines[i]; + if (line[0] == '\\') { + if (prevLine[0] == '+') { + removeEOFNL = true; + } + else if (prevLine[0] == '-') { + addEOFNL = true; + } + } + prevLine = line; + } + if (removeEOFNL) { + if (addEOFNL) { + // This means the final line gets changed but doesn't have a trailing newline in either the + // original or patched version. In that case, we do nothing if fuzzFactor > 0, and if + // fuzzFactor is 0, we simply validate that the source file has no trailing newline. + if (!fuzzFactor && lines[lines.length - 1] == '') { + return false; + } + } + else if (lines[lines.length - 1] == '') { + lines.pop(); + } + else if (!fuzzFactor) { + return false; + } + } + else if (addEOFNL) { + if (lines[lines.length - 1] != '') { + lines.push(''); + } + else if (!fuzzFactor) { + return false; + } + } + /** + * Checks if the hunk can be made to fit at the provided location with at most `maxErrors` + * insertions, substitutions, or deletions, while ensuring also that: + * - lines deleted in the hunk match exactly, and + * - wherever an insertion operation or block of insertion operations appears in the hunk, the + * immediately preceding and following lines of context match exactly + * + * `toPos` should be set such that lines[toPos] is meant to match hunkLines[0]. + * + * If the hunk can be applied, returns an object with properties `oldLineLastI` and + * `replacementLines`. Otherwise, returns null. + */ + function applyHunk(hunkLines, toPos, maxErrors, hunkLinesI = 0, lastContextLineMatched = true, patchedLines = [], patchedLinesLength = 0) { + let nConsecutiveOldContextLines = 0; + let nextContextLineMustMatch = false; + for (; hunkLinesI < hunkLines.length; hunkLinesI++) { + const hunkLine = hunkLines[hunkLinesI], operation = (hunkLine.length > 0 ? hunkLine[0] : ' '), content = (hunkLine.length > 0 ? hunkLine.substr(1) : hunkLine); + if (operation === '-') { + if (compareLine(toPos + 1, lines[toPos], operation, content)) { + toPos++; + nConsecutiveOldContextLines = 0; + } + else { + if (!maxErrors || lines[toPos] == null) { + return null; + } + patchedLines[patchedLinesLength] = lines[toPos]; + return applyHunk(hunkLines, toPos + 1, maxErrors - 1, hunkLinesI, false, patchedLines, patchedLinesLength + 1); + } + } + if (operation === '+') { + if (!lastContextLineMatched) { + return null; + } + patchedLines[patchedLinesLength] = content; + patchedLinesLength++; + nConsecutiveOldContextLines = 0; + nextContextLineMustMatch = true; + } + if (operation === ' ') { + nConsecutiveOldContextLines++; + patchedLines[patchedLinesLength] = lines[toPos]; + if (compareLine(toPos + 1, lines[toPos], operation, content)) { + patchedLinesLength++; + lastContextLineMatched = true; + nextContextLineMustMatch = false; + toPos++; + } + else { + if (nextContextLineMustMatch || !maxErrors) { + return null; + } + // Consider 3 possibilities in sequence: + // 1. lines contains a *substitution* not included in the patch context, or + // 2. lines contains an *insertion* not included in the patch context, or + // 3. lines contains a *deletion* not included in the patch context + // The first two options are of course only possible if the line from lines is non-null - + // i.e. only option 3 is possible if we've overrun the end of the old file. + return (lines[toPos] && (applyHunk(hunkLines, toPos + 1, maxErrors - 1, hunkLinesI + 1, false, patchedLines, patchedLinesLength + 1) || applyHunk(hunkLines, toPos + 1, maxErrors - 1, hunkLinesI, false, patchedLines, patchedLinesLength + 1)) || applyHunk(hunkLines, toPos, maxErrors - 1, hunkLinesI + 1, false, patchedLines, patchedLinesLength)); + } + } + } + // Before returning, trim any unmodified context lines off the end of patchedLines and reduce + // toPos (and thus oldLineLastI) accordingly. This allows later hunks to be applied to a region + // that starts in this hunk's trailing context. + patchedLinesLength -= nConsecutiveOldContextLines; + toPos -= nConsecutiveOldContextLines; + patchedLines.length = patchedLinesLength; + return { + patchedLines, + oldLineLastI: toPos - 1 + }; + } + const resultLines = []; + // Search best fit offsets for each hunk based on the previous ones + let prevHunkOffset = 0; + for (let i = 0; i < hunks.length; i++) { + const hunk = hunks[i]; + let hunkResult; + const maxLine = lines.length - hunk.oldLines + fuzzFactor; + let toPos; + for (let maxErrors = 0; maxErrors <= fuzzFactor; maxErrors++) { + toPos = hunk.oldStart + prevHunkOffset - 1; + const iterator = distanceIterator(toPos, minLine, maxLine); + for (; toPos !== undefined; toPos = iterator()) { + hunkResult = applyHunk(hunk.lines, toPos, maxErrors); + if (hunkResult) { + break; + } + } + if (hunkResult) { + break; + } + } + if (!hunkResult) { + return false; + } + // Copy everything from the end of where we applied the last hunk to the start of this hunk + for (let i = minLine; i < toPos; i++) { + resultLines.push(lines[i]); + } + // Add the lines produced by applying the hunk: + for (let i = 0; i < hunkResult.patchedLines.length; i++) { + const line = hunkResult.patchedLines[i]; + resultLines.push(line); + } + // Set lower text limit to end of the current hunk, so next ones don't try + // to fit over already patched text + minLine = hunkResult.oldLineLastI + 1; + // Note the offset between where the patch said the hunk should've applied and where we + // applied it, so we can adjust future hunks accordingly: + prevHunkOffset = toPos + 1 - hunk.oldStart; + } + // Copy over the rest of the lines from the old text + for (let i = minLine; i < lines.length; i++) { + resultLines.push(lines[i]); + } + return resultLines.join('\n'); + } + /** + * applies one or more patches. + * + * `patch` may be either an array of structured patch objects, or a string representing a patch in unified diff format (which may patch one or more files). + * + * This method will iterate over the contents of the patch and apply to data provided through callbacks. The general flow for each patch index is: + * + * - `options.loadFile(index, callback)` is called. The caller should then load the contents of the file and then pass that to the `callback(err, data)` callback. Passing an `err` will terminate further patch execution. + * - `options.patched(index, content, callback)` is called once the patch has been applied. `content` will be the return value from `applyPatch`. When it's ready, the caller should call `callback(err)` callback. Passing an `err` will terminate further patch execution. + * + * Once all patches have been applied or an error occurs, the `options.complete(err)` callback is made. + */ + function applyPatches(uniDiff, options) { + const spDiff = typeof uniDiff === 'string' ? parsePatch(uniDiff) : uniDiff; + let currentIndex = 0; + function processIndex() { + const index = spDiff[currentIndex++]; + if (!index) { + return options.complete(); + } + options.loadFile(index, function (err, data) { + if (err) { + return options.complete(err); + } + const updatedContent = applyPatch(data, index, options); + options.patched(index, updatedContent, function (err) { + if (err) { + return options.complete(err); + } + processIndex(); + }); + }); + } + processIndex(); + } + + function reversePatch(structuredPatch) { + if (Array.isArray(structuredPatch)) { + // (See comment in unixToWin for why we need the pointless-looking anonymous function here) + return structuredPatch.map(patch => reversePatch(patch)).reverse(); + } + return Object.assign(Object.assign({}, structuredPatch), { oldFileName: structuredPatch.newFileName, oldHeader: structuredPatch.newHeader, newFileName: structuredPatch.oldFileName, newHeader: structuredPatch.oldHeader, hunks: structuredPatch.hunks.map(hunk => { + return { + oldLines: hunk.newLines, + oldStart: hunk.newStart, + newLines: hunk.oldLines, + newStart: hunk.oldStart, + lines: hunk.lines.map(l => { + if (l.startsWith('-')) { + return `+${l.slice(1)}`; + } + if (l.startsWith('+')) { + return `-${l.slice(1)}`; + } + return l; + }) + }; + }) }); + } + + function structuredPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options) { + let optionsObj; + if (!options) { + optionsObj = {}; + } + else if (typeof options === 'function') { + optionsObj = { callback: options }; + } + else { + optionsObj = options; + } + if (typeof optionsObj.context === 'undefined') { + optionsObj.context = 4; + } + // We copy this into its own variable to placate TypeScript, which thinks + // optionsObj.context might be undefined in the callbacks below. + const context = optionsObj.context; + // @ts-expect-error (runtime check for something that is correctly a static type error) + if (optionsObj.newlineIsToken) { + throw new Error('newlineIsToken may not be used with patch-generation functions, only with diffing functions'); + } + if (!optionsObj.callback) { + return diffLinesResultToPatch(diffLines(oldStr, newStr, optionsObj)); + } + else { + const { callback } = optionsObj; + diffLines(oldStr, newStr, Object.assign(Object.assign({}, optionsObj), { callback: (diff) => { + const patch = diffLinesResultToPatch(diff); + // TypeScript is unhappy without the cast because it does not understand that `patch` may + // be undefined here only if `callback` is StructuredPatchCallbackAbortable: + callback(patch); + } })); + } + function diffLinesResultToPatch(diff) { + // STEP 1: Build up the patch with no "\ No newline at end of file" lines and with the arrays + // of lines containing trailing newline characters. We'll tidy up later... + if (!diff) { + return; + } + diff.push({ value: '', lines: [] }); // Append an empty value to make cleanup easier + function contextLines(lines) { + return lines.map(function (entry) { return ' ' + entry; }); + } + const hunks = []; + let oldRangeStart = 0, newRangeStart = 0, curRange = [], oldLine = 1, newLine = 1; + for (let i = 0; i < diff.length; i++) { + const current = diff[i], lines = current.lines || splitLines(current.value); + current.lines = lines; + if (current.added || current.removed) { + // If we have previous context, start with that + if (!oldRangeStart) { + const prev = diff[i - 1]; + oldRangeStart = oldLine; + newRangeStart = newLine; + if (prev) { + curRange = context > 0 ? contextLines(prev.lines.slice(-context)) : []; + oldRangeStart -= curRange.length; + newRangeStart -= curRange.length; + } + } + // Output our changes + for (const line of lines) { + curRange.push((current.added ? '+' : '-') + line); + } + // Track the updated file position + if (current.added) { + newLine += lines.length; + } + else { + oldLine += lines.length; + } + } + else { + // Identical context lines. Track line changes + if (oldRangeStart) { + // Close out any changes that have been output (or join overlapping) + if (lines.length <= context * 2 && i < diff.length - 2) { + // Overlapping + for (const line of contextLines(lines)) { + curRange.push(line); + } + } + else { + // end the range and output + const contextSize = Math.min(lines.length, context); + for (const line of contextLines(lines.slice(0, contextSize))) { + curRange.push(line); + } + const hunk = { + oldStart: oldRangeStart, + oldLines: (oldLine - oldRangeStart + contextSize), + newStart: newRangeStart, + newLines: (newLine - newRangeStart + contextSize), + lines: curRange + }; + hunks.push(hunk); + oldRangeStart = 0; + newRangeStart = 0; + curRange = []; + } + } + oldLine += lines.length; + newLine += lines.length; + } + } + // Step 2: eliminate the trailing `\n` from each line of each hunk, and, where needed, add + // "\ No newline at end of file". + for (const hunk of hunks) { + for (let i = 0; i < hunk.lines.length; i++) { + if (hunk.lines[i].endsWith('\n')) { + hunk.lines[i] = hunk.lines[i].slice(0, -1); + } + else { + hunk.lines.splice(i + 1, 0, '\\ No newline at end of file'); + i++; // Skip the line we just added, then continue iterating + } + } + } + return { + oldFileName: oldFileName, newFileName: newFileName, + oldHeader: oldHeader, newHeader: newHeader, + hunks: hunks + }; + } + } + /** + * creates a unified diff patch. + * @param patch either a single structured patch object (as returned by `structuredPatch`) or an array of them (as returned by `parsePatch`) + */ + function formatPatch(patch) { + if (Array.isArray(patch)) { + return patch.map(formatPatch).join('\n'); + } + const ret = []; + if (patch.oldFileName == patch.newFileName) { + ret.push('Index: ' + patch.oldFileName); + } + ret.push('==================================================================='); + ret.push('--- ' + patch.oldFileName + (typeof patch.oldHeader === 'undefined' ? '' : '\t' + patch.oldHeader)); + ret.push('+++ ' + patch.newFileName + (typeof patch.newHeader === 'undefined' ? '' : '\t' + patch.newHeader)); + for (let i = 0; i < patch.hunks.length; i++) { + const hunk = patch.hunks[i]; + // Unified Diff Format quirk: If the chunk size is 0, + // the first number is one lower than one would expect. + // https://www.artima.com/weblogs/viewpost.jsp?thread=164293 + if (hunk.oldLines === 0) { + hunk.oldStart -= 1; + } + if (hunk.newLines === 0) { + hunk.newStart -= 1; + } + ret.push('@@ -' + hunk.oldStart + ',' + hunk.oldLines + + ' +' + hunk.newStart + ',' + hunk.newLines + + ' @@'); + for (const line of hunk.lines) { + ret.push(line); + } + } + return ret.join('\n') + '\n'; + } + function createTwoFilesPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options) { + if (typeof options === 'function') { + options = { callback: options }; + } + if (!(options === null || options === void 0 ? void 0 : options.callback)) { + const patchObj = structuredPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options); + if (!patchObj) { + return; + } + return formatPatch(patchObj); + } + else { + const { callback } = options; + structuredPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, Object.assign(Object.assign({}, options), { callback: patchObj => { + if (!patchObj) { + callback(undefined); + } + else { + callback(formatPatch(patchObj)); + } + } })); + } + } + function createPatch(fileName, oldStr, newStr, oldHeader, newHeader, options) { + return createTwoFilesPatch(fileName, fileName, oldStr, newStr, oldHeader, newHeader, options); + } + /** + * Split `text` into an array of lines, including the trailing newline character (where present) + */ + function splitLines(text) { + const hasTrailingNl = text.endsWith('\n'); + const result = text.split('\n').map(line => line + '\n'); + if (hasTrailingNl) { + result.pop(); + } + else { + result.push(result.pop().slice(0, -1)); + } + return result; + } + + /** + * converts a list of change objects to the format returned by Google's [diff-match-patch](https://github.com/google/diff-match-patch) library + */ + function convertChangesToDMP(changes) { + const ret = []; + let change, operation; + for (let i = 0; i < changes.length; i++) { + change = changes[i]; + if (change.added) { + operation = 1; + } + else if (change.removed) { + operation = -1; + } + else { + operation = 0; + } + ret.push([operation, change.value]); + } + return ret; + } + + /** + * converts a list of change objects to a serialized XML format + */ + function convertChangesToXML(changes) { + const ret = []; + for (let i = 0; i < changes.length; i++) { + const change = changes[i]; + if (change.added) { + ret.push(''); + } + else if (change.removed) { + ret.push(''); + } + ret.push(escapeHTML(change.value)); + if (change.added) { + ret.push(''); + } + else if (change.removed) { + ret.push(''); + } + } + return ret.join(''); + } + function escapeHTML(s) { + let n = s; + n = n.replace(/&/g, '&'); + n = n.replace(//g, '>'); + n = n.replace(/"/g, '"'); + return n; + } + + exports.Diff = Diff; + exports.applyPatch = applyPatch; + exports.applyPatches = applyPatches; + exports.arrayDiff = arrayDiff; + exports.canonicalize = canonicalize; + exports.characterDiff = characterDiff; + exports.convertChangesToDMP = convertChangesToDMP; + exports.convertChangesToXML = convertChangesToXML; + exports.createPatch = createPatch; + exports.createTwoFilesPatch = createTwoFilesPatch; + exports.cssDiff = cssDiff; + exports.diffArrays = diffArrays; + exports.diffChars = diffChars; + exports.diffCss = diffCss; + exports.diffJson = diffJson; + exports.diffLines = diffLines; + exports.diffSentences = diffSentences; + exports.diffTrimmedLines = diffTrimmedLines; + exports.diffWords = diffWords; + exports.diffWordsWithSpace = diffWordsWithSpace; + exports.formatPatch = formatPatch; + exports.jsonDiff = jsonDiff; + exports.lineDiff = lineDiff; + exports.parsePatch = parsePatch; + exports.reversePatch = reversePatch; + exports.sentenceDiff = sentenceDiff; + exports.structuredPatch = structuredPatch; + exports.wordDiff = wordDiff; + exports.wordsWithSpaceDiff = wordsWithSpaceDiff; + +})); diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/dist/diff.min.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/dist/diff.min.js new file mode 100644 index 0000000000000000000000000000000000000000..6fd5d020d282c4d1ffae135abf000b737f2c250a --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/dist/diff.min.js @@ -0,0 +1 @@ +((global,factory)=>{"object"==typeof exports&&"undefined"!=typeof module?factory(exports):"function"==typeof define&&define.amd?define(["exports"],factory):factory((global="undefined"!=typeof globalThis?globalThis:global||self).Diff={})})(this,function(exports){class Diff{diff(oldStr,newStr,options={}){let callback;"function"==typeof options?(callback=options,options={}):"callback"in options&&(callback=options.callback);oldStr=this.castInput(oldStr,options),newStr=this.castInput(newStr,options),oldStr=this.removeEmpty(this.tokenize(oldStr,options)),newStr=this.removeEmpty(this.tokenize(newStr,options));return this.diffWithOptionsObj(oldStr,newStr,options,callback)}diffWithOptionsObj(oldTokens,newTokens,options,callback){let _a,done=value=>{if(value=this.postProcess(value,options),!callback)return value;setTimeout(function(){callback(value)},0)},newLen=newTokens.length,oldLen=oldTokens.length,editLength=1,maxEditLength=newLen+oldLen;null!=options.maxEditLength&&(maxEditLength=Math.min(maxEditLength,options.maxEditLength));var maxExecutionTime=null!=(_a=options.timeout)?_a:1/0;let abortAfterTimestamp=Date.now()+maxExecutionTime,bestPath=[{oldPos:-1,lastComponent:void 0}],newPos=this.extractCommon(bestPath[0],newTokens,oldTokens,0,options);if(bestPath[0].oldPos+1>=oldLen&&newPos+1>=newLen)return done(this.buildValues(bestPath[0].lastComponent,newTokens,oldTokens));let minDiagonalToConsider=-1/0,maxDiagonalToConsider=1/0,execEditLength=()=>{for(let diagonalPath=Math.max(minDiagonalToConsider,-editLength);diagonalPath<=Math.min(maxDiagonalToConsider,editLength);diagonalPath+=2){let basePath;var removePath=bestPath[diagonalPath-1],addPath=bestPath[diagonalPath+1];removePath&&(bestPath[diagonalPath-1]=void 0);let canAdd=!1;addPath&&(addPathNewPos=addPath.oldPos-diagonalPath,canAdd=addPath&&0<=addPathNewPos&&addPathNewPos=oldLen&&newPos+1>=newLen)return done(this.buildValues(basePath.lastComponent,newTokens,oldTokens))||!0;(bestPath[diagonalPath]=basePath).oldPos+1>=oldLen&&(maxDiagonalToConsider=Math.min(maxDiagonalToConsider,diagonalPath-1)),newPos+1>=newLen&&(minDiagonalToConsider=Math.max(minDiagonalToConsider,diagonalPath+1))}else bestPath[diagonalPath]=void 0}editLength++};if(callback)!function exec(){setTimeout(function(){if(editLength>maxEditLength||Date.now()>abortAfterTimestamp)return callback(void 0);execEditLength()||exec()},0)}();else for(;editLength<=maxEditLength&&Date.now()<=abortAfterTimestamp;){var ret=execEditLength();if(ret)return ret}}addToPath(path,added,removed,oldPosInc,options){var last=path.lastComponent;return last&&!options.oneChangePerToken&&last.added===added&&last.removed===removed?{oldPos:path.oldPos+oldPosInc,lastComponent:{count:last.count+1,added:added,removed:removed,previousComponent:last.previousComponent}}:{oldPos:path.oldPos+oldPosInc,lastComponent:{count:1,added:added,removed:removed,previousComponent:last}}}extractCommon(basePath,newTokens,oldTokens,diagonalPath,options){var newLen=newTokens.length,oldLen=oldTokens.length;let oldPos=basePath.oldPos,newPos=oldPos-diagonalPath,commonCount=0;for(;newPos+1value.length?i:value}),component.value=this.join(value)}else component.value=this.join(newTokens.slice(newPos,newPos+component.count));newPos+=component.count,component.added||(oldPos+=component.count)}}return components}}class CharacterDiff extends Diff{}let characterDiff=new CharacterDiff;function longestCommonPrefix(str1,str2){let i;for(i=0;i{let startA=0,endB=(a.length>b.length&&(startA=a.length-b.length),b.length),map=(a.lengthsegment.segment)}else parts=value.match(tokenizeIncludingWhitespace)||[];let tokens=[],prevPart=null;return parts.forEach(part=>{/\s/.test(part)?null==prevPart?tokens.push(part):tokens.push(tokens.pop()+part):null!=prevPart&&/\s/.test(prevPart)?tokens[tokens.length-1]==prevPart?tokens.push(tokens.pop()+part):tokens.push(prevPart+part):tokens.push(part),prevPart=part}),tokens}join(tokens){return tokens.map((token,i)=>0==i?token:token.replace(/^\s+/,"")).join("")}postProcess(changes,options){if(changes&&!options.oneChangePerToken){let lastKeep=null,insertion=null,deletion=null;changes.forEach(change=>{change.added?insertion=change:deletion=change.removed?change:((insertion||deletion)&&dedupeWhitespaceInChangeObjects(lastKeep,deletion,insertion,change),lastKeep=change,insertion=null)}),(insertion||deletion)&&dedupeWhitespaceInChangeObjects(lastKeep,deletion,insertion,null)}return changes}}let wordDiff=new WordDiff;function dedupeWhitespaceInChangeObjects(startKeep,deletion,insertion,endKeep){if(deletion&&insertion){var oldWsPrefix=leadingWs(deletion.value),oldWsSuffix=trailingWs(deletion.value),newWsPrefix=leadingWs(insertion.value),newWsSuffix=trailingWs(insertion.value);startKeep&&(oldWsPrefix=longestCommonPrefix(oldWsPrefix,newWsPrefix),startKeep.value=replaceSuffix(startKeep.value,newWsPrefix,oldWsPrefix),deletion.value=removePrefix(deletion.value,oldWsPrefix),insertion.value=removePrefix(insertion.value,oldWsPrefix)),endKeep&&(newWsPrefix=longestCommonSuffix(oldWsSuffix,newWsSuffix),endKeep.value=replacePrefix(endKeep.value,newWsSuffix,newWsPrefix),deletion.value=removeSuffix(deletion.value,newWsPrefix),insertion.value=removeSuffix(insertion.value,newWsPrefix))}else if(insertion){if(startKeep&&(oldWsPrefix=leadingWs(insertion.value),insertion.value=insertion.value.substring(oldWsPrefix.length)),endKeep){let ws=leadingWs(endKeep.value);endKeep.value=endKeep.value.substring(ws.length)}}else if(startKeep&&endKeep){oldWsSuffix=leadingWs(endKeep.value),newWsSuffix=leadingWs(deletion.value),newWsPrefix=trailingWs(deletion.value),insertion=longestCommonPrefix(oldWsSuffix,newWsSuffix),oldWsPrefix=(deletion.value=removePrefix(deletion.value,insertion),longestCommonSuffix(removePrefix(oldWsSuffix,insertion),newWsPrefix));deletion.value=removeSuffix(deletion.value,oldWsPrefix),endKeep.value=replacePrefix(endKeep.value,oldWsSuffix,oldWsPrefix),startKeep.value=replaceSuffix(startKeep.value,oldWsSuffix,oldWsSuffix.slice(0,oldWsSuffix.length-oldWsPrefix.length))}else if(endKeep){newWsSuffix=leadingWs(endKeep.value),insertion=maximumOverlap(trailingWs(deletion.value),newWsSuffix);deletion.value=removeSuffix(deletion.value,insertion)}else if(startKeep){let overlap=maximumOverlap(trailingWs(startKeep.value),leadingWs(deletion.value));deletion.value=removePrefix(deletion.value,overlap)}}class WordsWithSpaceDiff extends Diff{tokenize(value){var regex=new RegExp(`(\\r?\\n)|[${extendedWordChars}]+|[^\\S\\n\\r]+|[^${extendedWordChars}]`,"ug");return value.match(regex)||[]}}let wordsWithSpaceDiff=new WordsWithSpaceDiff;function diffWordsWithSpace(oldStr,newStr,options){return wordsWithSpaceDiff.diff(oldStr,newStr,options)}class LineDiff extends Diff{constructor(){super(...arguments),this.tokenize=tokenize}equals(left,right,options){return options.ignoreWhitespace?(options.newlineIsToken&&left.includes("\n")||(left=left.trim()),options.newlineIsToken&&right.includes("\n")||(right=right.trim())):options.ignoreNewlineAtEof&&!options.newlineIsToken&&(left.endsWith("\n")&&(left=left.slice(0,-1)),right.endsWith("\n"))&&(right=right.slice(0,-1)),super.equals(left,right,options)}}let lineDiff=new LineDiff;function diffLines(oldStr,newStr,options){return lineDiff.diff(oldStr,newStr,options)}function tokenize(value,options){var retLines=[],linesAndNewlines=(value=options.stripTrailingCr?value.replace(/\r\n/g,"\n"):value).split(/(\n|\r\n)/);linesAndNewlines[linesAndNewlines.length-1]||linesAndNewlines.pop();for(let i=0;ivoid 0===v?undefinedReplacement:v}=options;return"string"==typeof value?value:JSON.stringify(canonicalize(value,null,null,stringifyReplacer),null," ")}equals(left,right,options){return super.equals(left.replace(/,([\r\n])/g,"$1"),right.replace(/,([\r\n])/g,"$1"),options)}}let jsonDiff=new JsonDiff;function canonicalize(obj,stack,replacementStack,replacer,key){stack=stack||[],replacementStack=replacementStack||[],replacer&&(obj=replacer(void 0===key?"":key,obj));let i;for(i=0;i{var chunkHeaderIndex=i,chunkHeaderLine=diffstr[i++],hunk={oldStart:+(chunkHeaderLine=chunkHeaderLine.split(/@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@/))[1],oldLines:void 0===chunkHeaderLine[2]?1:+chunkHeaderLine[2],newStart:+chunkHeaderLine[3],newLines:void 0===chunkHeaderLine[4]?1:+chunkHeaderLine[4],lines:[]};0===hunk.oldLines&&(hunk.oldStart+=1),0===hunk.newLines&&(hunk.newStart+=1);let addCount=0,removeCount=0;for(;i{!options.autoConvertLineEndings&&null!=options.autoConvertLineEndings||((string=>string.includes("\r\n")&&!string.startsWith("\n")&&!string.match(/[^\r]\n/))(source)&&(patch=>!(patch=Array.isArray(patch)?patch:[patch]).some(index=>index.hunks.some(hunk=>hunk.lines.some(line=>!line.startsWith("\\")&&line.endsWith("\r")))))(patch)?patch=function unixToWin(patch){return Array.isArray(patch)?patch.map(p=>unixToWin(p)):Object.assign(Object.assign({},patch),{hunks:patch.hunks.map(hunk=>Object.assign(Object.assign({},hunk),{lines:hunk.lines.map((line,i)=>line.startsWith("\\")||line.endsWith("\r")||null!=(i=hunk.lines[i+1])&&i.startsWith("\\")?line:line+"\r")}))})}(patch):(string=>!string.includes("\r\n")&&string.includes("\n"))(source)&&(patch=>(patch=Array.isArray(patch)?patch:[patch]).some(index=>index.hunks.some(hunk=>hunk.lines.some(line=>line.endsWith("\r"))))&&patch.every(index=>index.hunks.every(hunk=>hunk.lines.every((line,i)=>line.startsWith("\\")||line.endsWith("\r")||(null==(line=hunk.lines[i+1])?void 0:line.startsWith("\\"))))))(patch)&&(patch=function winToUnix(patch){return Array.isArray(patch)?patch.map(p=>winToUnix(p)):Object.assign(Object.assign({},patch),{hunks:patch.hunks.map(hunk=>Object.assign(Object.assign({},hunk),{lines:hunk.lines.map(line=>line.endsWith("\r")?line.substring(0,line.length-1):line)}))})}(patch)));let lines=source.split("\n"),hunks=patch.hunks,compareLine=options.compareLine||((lineNumber,line,operation,patchContent)=>line===patchContent),fuzzFactor=options.fuzzFactor||0,minLine=0;if(fuzzFactor<0||!Number.isInteger(fuzzFactor))throw new Error("fuzzFactor must be a non-negative integer");if(!hunks.length)return source;let prevLine="",removeEOFNL=!1,addEOFNL=!1;for(let i=0;i{let wantForward=!0,backwardExhausted=!1,forwardExhausted=!1,localOffset=1;return function iterator(){if(wantForward&&!forwardExhausted){if(backwardExhausted?localOffset++:wantForward=!1,start+localOffset<=maxLine)return start+localOffset;forwardExhausted=!0}if(!backwardExhausted)return forwardExhausted||(wantForward=!0),minLine<=start-localOffset?start-localOffset++:(backwardExhausted=!0,iterator())}})(toPos=hunk.oldStart+prevHunkOffset-1,minLine,maxLine);void 0!==toPos&&!(hunkResult=function applyHunk(hunkLines,toPos,maxErrors,hunkLinesI=0,lastContextLineMatched=!0,patchedLines=[],patchedLinesLength=0){let nConsecutiveOldContextLines=0,nextContextLineMustMatch=!1;for(;hunkLinesI{diff=diffLinesResultToPatch(diff);callback(diff)}}))}function diffLinesResultToPatch(diff){if(diff){diff.push({value:"",lines:[]});var hunks=[];let oldRangeStart=0,newRangeStart=0,curRange=[],oldLine=1,newLine=1;for(let i=0;i{var hasTrailingNl=text.endsWith("\n"),text=text.split("\n").map(line=>line+"\n");return hasTrailingNl?text.pop():text.push(text.pop().slice(0,-1)),text})(current.value);if(current.lines=lines,current.added||current.removed){oldRangeStart||(prev=diff[i-1],oldRangeStart=oldLine,newRangeStart=newLine,prev&&(curRange=0{patchObj?callback(formatPatch(patchObj)):callback(void 0)}}))}else{oldFileName=structuredPatch(oldFileName,newFileName,oldStr,newStr,oldHeader,newHeader,options);if(oldFileName)return formatPatch(oldFileName)}}exports.Diff=Diff,exports.applyPatch=applyPatch,exports.applyPatches=function(uniDiff,options){let spDiff="string"==typeof uniDiff?parsePatch(uniDiff):uniDiff,currentIndex=0;!function processIndex(){let index=spDiff[currentIndex++];if(!index)return options.complete();options.loadFile(index,function(err,data){if(err)return options.complete(err);err=applyPatch(data,index,options),options.patched(index,err,function(err){if(err)return options.complete(err);processIndex()})})}()},exports.arrayDiff=arrayDiff,exports.canonicalize=canonicalize,exports.characterDiff=characterDiff,exports.convertChangesToDMP=function(changes){var ret=[];let change,operation;for(let i=0;i"):change.removed&&ret.push(""),ret.push((s=>{let n=s;return n=(n=(n=(n=n.replace(/&/g,"&")).replace(//g,">")).replace(/"/g,""")})(change.value)),change.added?ret.push(""):change.removed&&ret.push("")}return ret.join("")},exports.createPatch=function(fileName,oldStr,newStr,oldHeader,newHeader,options){return createTwoFilesPatch(fileName,fileName,oldStr,newStr,oldHeader,newHeader,options)},exports.createTwoFilesPatch=createTwoFilesPatch,exports.cssDiff=cssDiff,exports.diffArrays=function(oldArr,newArr,options){return arrayDiff.diff(oldArr,newArr,options)},exports.diffChars=function(oldStr,newStr,options){return characterDiff.diff(oldStr,newStr,options)},exports.diffCss=function(oldStr,newStr,options){return cssDiff.diff(oldStr,newStr,options)},exports.diffJson=function(oldStr,newStr,options){return jsonDiff.diff(oldStr,newStr,options)},exports.diffLines=diffLines,exports.diffSentences=function(oldStr,newStr,options){return sentenceDiff.diff(oldStr,newStr,options)},exports.diffTrimmedLines=function(oldStr,newStr,options){return options=((options,defaults)=>{if("function"==typeof options)defaults.callback=options;else if(options)for(var name in options)Object.prototype.hasOwnProperty.call(options,name)&&(defaults[name]=options[name]);return defaults})(options,{ignoreWhitespace:!0}),lineDiff.diff(oldStr,newStr,options)},exports.diffWords=function(oldStr,newStr,options){return null==(null==options?void 0:options.ignoreWhitespace)||options.ignoreWhitespace?wordDiff.diff(oldStr,newStr,options):diffWordsWithSpace(oldStr,newStr,options)},exports.diffWordsWithSpace=diffWordsWithSpace,exports.formatPatch=formatPatch,exports.jsonDiff=jsonDiff,exports.lineDiff=lineDiff,exports.parsePatch=parsePatch,exports.reversePatch=function reversePatch(structuredPatch){return Array.isArray(structuredPatch)?structuredPatch.map(patch=>reversePatch(patch)).reverse():Object.assign(Object.assign({},structuredPatch),{oldFileName:structuredPatch.newFileName,oldHeader:structuredPatch.newHeader,newFileName:structuredPatch.oldFileName,newHeader:structuredPatch.oldHeader,hunks:structuredPatch.hunks.map(hunk=>({oldLines:hunk.newLines,oldStart:hunk.newStart,newLines:hunk.oldLines,newStart:hunk.oldStart,lines:hunk.lines.map(l=>l.startsWith("-")?"+"+l.slice(1):l.startsWith("+")?"-"+l.slice(1):l)}))})},exports.sentenceDiff=sentenceDiff,exports.structuredPatch=structuredPatch,exports.wordDiff=wordDiff,exports.wordsWithSpaceDiff=wordsWithSpaceDiff}); \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/convert/dmp.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/convert/dmp.js new file mode 100644 index 0000000000000000000000000000000000000000..10680ff38801fb3840dccc37bde765ecf63c1c80 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/convert/dmp.js @@ -0,0 +1,24 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.convertChangesToDMP = convertChangesToDMP; +/** + * converts a list of change objects to the format returned by Google's [diff-match-patch](https://github.com/google/diff-match-patch) library + */ +function convertChangesToDMP(changes) { + var ret = []; + var change, operation; + for (var i = 0; i < changes.length; i++) { + change = changes[i]; + if (change.added) { + operation = 1; + } + else if (change.removed) { + operation = -1; + } + else { + operation = 0; + } + ret.push([operation, change.value]); + } + return ret; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/convert/xml.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/convert/xml.js new file mode 100644 index 0000000000000000000000000000000000000000..5ecd8aa255b861f9b97cf81aa0097e76daa533ba --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/convert/xml.js @@ -0,0 +1,34 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.convertChangesToXML = convertChangesToXML; +/** + * converts a list of change objects to a serialized XML format + */ +function convertChangesToXML(changes) { + var ret = []; + for (var i = 0; i < changes.length; i++) { + var change = changes[i]; + if (change.added) { + ret.push(''); + } + else if (change.removed) { + ret.push(''); + } + ret.push(escapeHTML(change.value)); + if (change.added) { + ret.push(''); + } + else if (change.removed) { + ret.push(''); + } + } + return ret.join(''); +} +function escapeHTML(s) { + var n = s; + n = n.replace(/&/g, '&'); + n = n.replace(//g, '>'); + n = n.replace(/"/g, '"'); + return n; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/diff/array.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/diff/array.js new file mode 100644 index 0000000000000000000000000000000000000000..2050261be823fe55cf2f7a15fd6b23a1431230b4 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/diff/array.js @@ -0,0 +1,40 @@ +"use strict"; +var __extends = (this && this.__extends) || (function () { + var extendStatics = function (d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); + }; + return function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; +})(); +Object.defineProperty(exports, "__esModule", { value: true }); +exports.arrayDiff = void 0; +exports.diffArrays = diffArrays; +var base_js_1 = require("./base.js"); +var ArrayDiff = /** @class */ (function (_super) { + __extends(ArrayDiff, _super); + function ArrayDiff() { + return _super !== null && _super.apply(this, arguments) || this; + } + ArrayDiff.prototype.tokenize = function (value) { + return value.slice(); + }; + ArrayDiff.prototype.join = function (value) { + return value; + }; + ArrayDiff.prototype.removeEmpty = function (value) { + return value; + }; + return ArrayDiff; +}(base_js_1.default)); +exports.arrayDiff = new ArrayDiff(); +function diffArrays(oldArr, newArr, options) { + return exports.arrayDiff.diff(oldArr, newArr, options); +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/diff/base.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/diff/base.js new file mode 100644 index 0000000000000000000000000000000000000000..b8473a435bb8474ee2ec1d62703b8b848292ed17 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/diff/base.js @@ -0,0 +1,265 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +var Diff = /** @class */ (function () { + function Diff() { + } + Diff.prototype.diff = function (oldStr, newStr, + // Type below is not accurate/complete - see above for full possibilities - but it compiles + options) { + if (options === void 0) { options = {}; } + var callback; + if (typeof options === 'function') { + callback = options; + options = {}; + } + else if ('callback' in options) { + callback = options.callback; + } + // Allow subclasses to massage the input prior to running + var oldString = this.castInput(oldStr, options); + var newString = this.castInput(newStr, options); + var oldTokens = this.removeEmpty(this.tokenize(oldString, options)); + var newTokens = this.removeEmpty(this.tokenize(newString, options)); + return this.diffWithOptionsObj(oldTokens, newTokens, options, callback); + }; + Diff.prototype.diffWithOptionsObj = function (oldTokens, newTokens, options, callback) { + var _this = this; + var _a; + var done = function (value) { + value = _this.postProcess(value, options); + if (callback) { + setTimeout(function () { callback(value); }, 0); + return undefined; + } + else { + return value; + } + }; + var newLen = newTokens.length, oldLen = oldTokens.length; + var editLength = 1; + var maxEditLength = newLen + oldLen; + if (options.maxEditLength != null) { + maxEditLength = Math.min(maxEditLength, options.maxEditLength); + } + var maxExecutionTime = (_a = options.timeout) !== null && _a !== void 0 ? _a : Infinity; + var abortAfterTimestamp = Date.now() + maxExecutionTime; + var bestPath = [{ oldPos: -1, lastComponent: undefined }]; + // Seed editLength = 0, i.e. the content starts with the same values + var newPos = this.extractCommon(bestPath[0], newTokens, oldTokens, 0, options); + if (bestPath[0].oldPos + 1 >= oldLen && newPos + 1 >= newLen) { + // Identity per the equality and tokenizer + return done(this.buildValues(bestPath[0].lastComponent, newTokens, oldTokens)); + } + // Once we hit the right edge of the edit graph on some diagonal k, we can + // definitely reach the end of the edit graph in no more than k edits, so + // there's no point in considering any moves to diagonal k+1 any more (from + // which we're guaranteed to need at least k+1 more edits). + // Similarly, once we've reached the bottom of the edit graph, there's no + // point considering moves to lower diagonals. + // We record this fact by setting minDiagonalToConsider and + // maxDiagonalToConsider to some finite value once we've hit the edge of + // the edit graph. + // This optimization is not faithful to the original algorithm presented in + // Myers's paper, which instead pointlessly extends D-paths off the end of + // the edit graph - see page 7 of Myers's paper which notes this point + // explicitly and illustrates it with a diagram. This has major performance + // implications for some common scenarios. For instance, to compute a diff + // where the new text simply appends d characters on the end of the + // original text of length n, the true Myers algorithm will take O(n+d^2) + // time while this optimization needs only O(n+d) time. + var minDiagonalToConsider = -Infinity, maxDiagonalToConsider = Infinity; + // Main worker method. checks all permutations of a given edit length for acceptance. + var execEditLength = function () { + for (var diagonalPath = Math.max(minDiagonalToConsider, -editLength); diagonalPath <= Math.min(maxDiagonalToConsider, editLength); diagonalPath += 2) { + var basePath = void 0; + var removePath = bestPath[diagonalPath - 1], addPath = bestPath[diagonalPath + 1]; + if (removePath) { + // No one else is going to attempt to use this value, clear it + // @ts-expect-error - perf optimisation. This type-violating value will never be read. + bestPath[diagonalPath - 1] = undefined; + } + var canAdd = false; + if (addPath) { + // what newPos will be after we do an insertion: + var addPathNewPos = addPath.oldPos - diagonalPath; + canAdd = addPath && 0 <= addPathNewPos && addPathNewPos < newLen; + } + var canRemove = removePath && removePath.oldPos + 1 < oldLen; + if (!canAdd && !canRemove) { + // If this path is a terminal then prune + // @ts-expect-error - perf optimisation. This type-violating value will never be read. + bestPath[diagonalPath] = undefined; + continue; + } + // Select the diagonal that we want to branch from. We select the prior + // path whose position in the old string is the farthest from the origin + // and does not pass the bounds of the diff graph + if (!canRemove || (canAdd && removePath.oldPos < addPath.oldPos)) { + basePath = _this.addToPath(addPath, true, false, 0, options); + } + else { + basePath = _this.addToPath(removePath, false, true, 1, options); + } + newPos = _this.extractCommon(basePath, newTokens, oldTokens, diagonalPath, options); + if (basePath.oldPos + 1 >= oldLen && newPos + 1 >= newLen) { + // If we have hit the end of both strings, then we are done + return done(_this.buildValues(basePath.lastComponent, newTokens, oldTokens)) || true; + } + else { + bestPath[diagonalPath] = basePath; + if (basePath.oldPos + 1 >= oldLen) { + maxDiagonalToConsider = Math.min(maxDiagonalToConsider, diagonalPath - 1); + } + if (newPos + 1 >= newLen) { + minDiagonalToConsider = Math.max(minDiagonalToConsider, diagonalPath + 1); + } + } + } + editLength++; + }; + // Performs the length of edit iteration. Is a bit fugly as this has to support the + // sync and async mode which is never fun. Loops over execEditLength until a value + // is produced, or until the edit length exceeds options.maxEditLength (if given), + // in which case it will return undefined. + if (callback) { + (function exec() { + setTimeout(function () { + if (editLength > maxEditLength || Date.now() > abortAfterTimestamp) { + return callback(undefined); + } + if (!execEditLength()) { + exec(); + } + }, 0); + }()); + } + else { + while (editLength <= maxEditLength && Date.now() <= abortAfterTimestamp) { + var ret = execEditLength(); + if (ret) { + return ret; + } + } + } + }; + Diff.prototype.addToPath = function (path, added, removed, oldPosInc, options) { + var last = path.lastComponent; + if (last && !options.oneChangePerToken && last.added === added && last.removed === removed) { + return { + oldPos: path.oldPos + oldPosInc, + lastComponent: { count: last.count + 1, added: added, removed: removed, previousComponent: last.previousComponent } + }; + } + else { + return { + oldPos: path.oldPos + oldPosInc, + lastComponent: { count: 1, added: added, removed: removed, previousComponent: last } + }; + } + }; + Diff.prototype.extractCommon = function (basePath, newTokens, oldTokens, diagonalPath, options) { + var newLen = newTokens.length, oldLen = oldTokens.length; + var oldPos = basePath.oldPos, newPos = oldPos - diagonalPath, commonCount = 0; + while (newPos + 1 < newLen && oldPos + 1 < oldLen && this.equals(oldTokens[oldPos + 1], newTokens[newPos + 1], options)) { + newPos++; + oldPos++; + commonCount++; + if (options.oneChangePerToken) { + basePath.lastComponent = { count: 1, previousComponent: basePath.lastComponent, added: false, removed: false }; + } + } + if (commonCount && !options.oneChangePerToken) { + basePath.lastComponent = { count: commonCount, previousComponent: basePath.lastComponent, added: false, removed: false }; + } + basePath.oldPos = oldPos; + return newPos; + }; + Diff.prototype.equals = function (left, right, options) { + if (options.comparator) { + return options.comparator(left, right); + } + else { + return left === right + || (!!options.ignoreCase && left.toLowerCase() === right.toLowerCase()); + } + }; + Diff.prototype.removeEmpty = function (array) { + var ret = []; + for (var i = 0; i < array.length; i++) { + if (array[i]) { + ret.push(array[i]); + } + } + return ret; + }; + // eslint-disable-next-line @typescript-eslint/no-unused-vars + Diff.prototype.castInput = function (value, options) { + return value; + }; + // eslint-disable-next-line @typescript-eslint/no-unused-vars + Diff.prototype.tokenize = function (value, options) { + return Array.from(value); + }; + Diff.prototype.join = function (chars) { + // Assumes ValueT is string, which is the case for most subclasses. + // When it's false, e.g. in diffArrays, this method needs to be overridden (e.g. with a no-op) + // Yes, the casts are verbose and ugly, because this pattern - of having the base class SORT OF + // assume tokens and values are strings, but not completely - is weird and janky. + return chars.join(''); + }; + Diff.prototype.postProcess = function (changeObjects, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + options) { + return changeObjects; + }; + Object.defineProperty(Diff.prototype, "useLongestToken", { + get: function () { + return false; + }, + enumerable: false, + configurable: true + }); + Diff.prototype.buildValues = function (lastComponent, newTokens, oldTokens) { + // First we convert our linked list of components in reverse order to an + // array in the right order: + var components = []; + var nextComponent; + while (lastComponent) { + components.push(lastComponent); + nextComponent = lastComponent.previousComponent; + delete lastComponent.previousComponent; + lastComponent = nextComponent; + } + components.reverse(); + var componentLen = components.length; + var componentPos = 0, newPos = 0, oldPos = 0; + for (; componentPos < componentLen; componentPos++) { + var component = components[componentPos]; + if (!component.removed) { + if (!component.added && this.useLongestToken) { + var value = newTokens.slice(newPos, newPos + component.count); + value = value.map(function (value, i) { + var oldValue = oldTokens[oldPos + i]; + return oldValue.length > value.length ? oldValue : value; + }); + component.value = this.join(value); + } + else { + component.value = this.join(newTokens.slice(newPos, newPos + component.count)); + } + newPos += component.count; + // Common case + if (!component.added) { + oldPos += component.count; + } + } + else { + component.value = this.join(oldTokens.slice(oldPos, oldPos + component.count)); + oldPos += component.count; + } + } + return components; + }; + return Diff; +}()); +exports.default = Diff; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/diff/character.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/diff/character.js new file mode 100644 index 0000000000000000000000000000000000000000..8e974ef9ad551a4048d41453666e8be7621df1cb --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/diff/character.js @@ -0,0 +1,31 @@ +"use strict"; +var __extends = (this && this.__extends) || (function () { + var extendStatics = function (d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); + }; + return function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; +})(); +Object.defineProperty(exports, "__esModule", { value: true }); +exports.characterDiff = void 0; +exports.diffChars = diffChars; +var base_js_1 = require("./base.js"); +var CharacterDiff = /** @class */ (function (_super) { + __extends(CharacterDiff, _super); + function CharacterDiff() { + return _super !== null && _super.apply(this, arguments) || this; + } + return CharacterDiff; +}(base_js_1.default)); +exports.characterDiff = new CharacterDiff(); +function diffChars(oldStr, newStr, options) { + return exports.characterDiff.diff(oldStr, newStr, options); +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/diff/css.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/diff/css.js new file mode 100644 index 0000000000000000000000000000000000000000..45c5559c00cc133969c73032a7db8fc2798d4ce7 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/diff/css.js @@ -0,0 +1,34 @@ +"use strict"; +var __extends = (this && this.__extends) || (function () { + var extendStatics = function (d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); + }; + return function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; +})(); +Object.defineProperty(exports, "__esModule", { value: true }); +exports.cssDiff = void 0; +exports.diffCss = diffCss; +var base_js_1 = require("./base.js"); +var CssDiff = /** @class */ (function (_super) { + __extends(CssDiff, _super); + function CssDiff() { + return _super !== null && _super.apply(this, arguments) || this; + } + CssDiff.prototype.tokenize = function (value) { + return value.split(/([{}:;,]|\s+)/); + }; + return CssDiff; +}(base_js_1.default)); +exports.cssDiff = new CssDiff(); +function diffCss(oldStr, newStr, options) { + return exports.cssDiff.diff(oldStr, newStr, options); +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/diff/json.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/diff/json.js new file mode 100644 index 0000000000000000000000000000000000000000..15f942b4b91681337c05a2b7a05f6580ff2c9ff2 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/diff/json.js @@ -0,0 +1,105 @@ +"use strict"; +var __extends = (this && this.__extends) || (function () { + var extendStatics = function (d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); + }; + return function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; +})(); +Object.defineProperty(exports, "__esModule", { value: true }); +exports.jsonDiff = void 0; +exports.diffJson = diffJson; +exports.canonicalize = canonicalize; +var base_js_1 = require("./base.js"); +var line_js_1 = require("./line.js"); +var JsonDiff = /** @class */ (function (_super) { + __extends(JsonDiff, _super); + function JsonDiff() { + var _this = _super !== null && _super.apply(this, arguments) || this; + _this.tokenize = line_js_1.tokenize; + return _this; + } + Object.defineProperty(JsonDiff.prototype, "useLongestToken", { + get: function () { + // Discriminate between two lines of pretty-printed, serialized JSON where one of them has a + // dangling comma and the other doesn't. Turns out including the dangling comma yields the nicest output: + return true; + }, + enumerable: false, + configurable: true + }); + JsonDiff.prototype.castInput = function (value, options) { + var undefinedReplacement = options.undefinedReplacement, _a = options.stringifyReplacer, stringifyReplacer = _a === void 0 ? function (k, v) { return typeof v === 'undefined' ? undefinedReplacement : v; } : _a; + return typeof value === 'string' ? value : JSON.stringify(canonicalize(value, null, null, stringifyReplacer), null, ' '); + }; + JsonDiff.prototype.equals = function (left, right, options) { + return _super.prototype.equals.call(this, left.replace(/,([\r\n])/g, '$1'), right.replace(/,([\r\n])/g, '$1'), options); + }; + return JsonDiff; +}(base_js_1.default)); +exports.jsonDiff = new JsonDiff(); +function diffJson(oldStr, newStr, options) { + return exports.jsonDiff.diff(oldStr, newStr, options); +} +// This function handles the presence of circular references by bailing out when encountering an +// object that is already on the "stack" of items being processed. Accepts an optional replacer +function canonicalize(obj, stack, replacementStack, replacer, key) { + stack = stack || []; + replacementStack = replacementStack || []; + if (replacer) { + obj = replacer(key === undefined ? '' : key, obj); + } + var i; + for (i = 0; i < stack.length; i += 1) { + if (stack[i] === obj) { + return replacementStack[i]; + } + } + var canonicalizedObj; + if ('[object Array]' === Object.prototype.toString.call(obj)) { + stack.push(obj); + canonicalizedObj = new Array(obj.length); + replacementStack.push(canonicalizedObj); + for (i = 0; i < obj.length; i += 1) { + canonicalizedObj[i] = canonicalize(obj[i], stack, replacementStack, replacer, String(i)); + } + stack.pop(); + replacementStack.pop(); + return canonicalizedObj; + } + if (obj && obj.toJSON) { + obj = obj.toJSON(); + } + if (typeof obj === 'object' && obj !== null) { + stack.push(obj); + canonicalizedObj = {}; + replacementStack.push(canonicalizedObj); + var sortedKeys = []; + var key_1; + for (key_1 in obj) { + /* istanbul ignore else */ + if (Object.prototype.hasOwnProperty.call(obj, key_1)) { + sortedKeys.push(key_1); + } + } + sortedKeys.sort(); + for (i = 0; i < sortedKeys.length; i += 1) { + key_1 = sortedKeys[i]; + canonicalizedObj[key_1] = canonicalize(obj[key_1], stack, replacementStack, replacer, key_1); + } + stack.pop(); + replacementStack.pop(); + } + else { + canonicalizedObj = obj; + } + return canonicalizedObj; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/diff/line.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/diff/line.js new file mode 100644 index 0000000000000000000000000000000000000000..8f4a1f412c1718f6a3e6e16c6dbc59fcfeb2d26a --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/diff/line.js @@ -0,0 +1,89 @@ +"use strict"; +var __extends = (this && this.__extends) || (function () { + var extendStatics = function (d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); + }; + return function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; +})(); +Object.defineProperty(exports, "__esModule", { value: true }); +exports.lineDiff = void 0; +exports.diffLines = diffLines; +exports.diffTrimmedLines = diffTrimmedLines; +exports.tokenize = tokenize; +var base_js_1 = require("./base.js"); +var params_js_1 = require("../util/params.js"); +var LineDiff = /** @class */ (function (_super) { + __extends(LineDiff, _super); + function LineDiff() { + var _this = _super !== null && _super.apply(this, arguments) || this; + _this.tokenize = tokenize; + return _this; + } + LineDiff.prototype.equals = function (left, right, options) { + // If we're ignoring whitespace, we need to normalise lines by stripping + // whitespace before checking equality. (This has an annoying interaction + // with newlineIsToken that requires special handling: if newlines get their + // own token, then we DON'T want to trim the *newline* tokens down to empty + // strings, since this would cause us to treat whitespace-only line content + // as equal to a separator between lines, which would be weird and + // inconsistent with the documented behavior of the options.) + if (options.ignoreWhitespace) { + if (!options.newlineIsToken || !left.includes('\n')) { + left = left.trim(); + } + if (!options.newlineIsToken || !right.includes('\n')) { + right = right.trim(); + } + } + else if (options.ignoreNewlineAtEof && !options.newlineIsToken) { + if (left.endsWith('\n')) { + left = left.slice(0, -1); + } + if (right.endsWith('\n')) { + right = right.slice(0, -1); + } + } + return _super.prototype.equals.call(this, left, right, options); + }; + return LineDiff; +}(base_js_1.default)); +exports.lineDiff = new LineDiff(); +function diffLines(oldStr, newStr, options) { + return exports.lineDiff.diff(oldStr, newStr, options); +} +function diffTrimmedLines(oldStr, newStr, options) { + options = (0, params_js_1.generateOptions)(options, { ignoreWhitespace: true }); + return exports.lineDiff.diff(oldStr, newStr, options); +} +// Exported standalone so it can be used from jsonDiff too. +function tokenize(value, options) { + if (options.stripTrailingCr) { + // remove one \r before \n to match GNU diff's --strip-trailing-cr behavior + value = value.replace(/\r\n/g, '\n'); + } + var retLines = [], linesAndNewlines = value.split(/(\n|\r\n)/); + // Ignore the final empty token that occurs if the string ends with a new line + if (!linesAndNewlines[linesAndNewlines.length - 1]) { + linesAndNewlines.pop(); + } + // Merge the content and line separators into single tokens + for (var i = 0; i < linesAndNewlines.length; i++) { + var line = linesAndNewlines[i]; + if (i % 2 && !options.newlineIsToken) { + retLines[retLines.length - 1] += line; + } + else { + retLines.push(line); + } + } + return retLines; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/diff/sentence.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/diff/sentence.js new file mode 100644 index 0000000000000000000000000000000000000000..dac837fbdc90a32d3394c23864c15b0b5923dbce --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/diff/sentence.js @@ -0,0 +1,67 @@ +"use strict"; +var __extends = (this && this.__extends) || (function () { + var extendStatics = function (d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); + }; + return function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; +})(); +Object.defineProperty(exports, "__esModule", { value: true }); +exports.sentenceDiff = void 0; +exports.diffSentences = diffSentences; +var base_js_1 = require("./base.js"); +function isSentenceEndPunct(char) { + return char == '.' || char == '!' || char == '?'; +} +var SentenceDiff = /** @class */ (function (_super) { + __extends(SentenceDiff, _super); + function SentenceDiff() { + return _super !== null && _super.apply(this, arguments) || this; + } + SentenceDiff.prototype.tokenize = function (value) { + var _a; + // If in future we drop support for environments that don't support lookbehinds, we can replace + // this entire function with: + // return value.split(/(?<=[.!?])(\s+|$)/); + // but until then, for similar reasons to the trailingWs function in string.ts, we are forced + // to do this verbosely "by hand" instead of using a regex. + var result = []; + var tokenStartI = 0; + for (var i = 0; i < value.length; i++) { + if (i == value.length - 1) { + result.push(value.slice(tokenStartI)); + break; + } + if (isSentenceEndPunct(value[i]) && value[i + 1].match(/\s/)) { + // We've hit a sentence break - i.e. a punctuation mark followed by whitespace. + // We now want to push TWO tokens to the result: + // 1. the sentence + result.push(value.slice(tokenStartI, i + 1)); + // 2. the whitespace + i = tokenStartI = i + 1; + while ((_a = value[i + 1]) === null || _a === void 0 ? void 0 : _a.match(/\s/)) { + i++; + } + result.push(value.slice(tokenStartI, i + 1)); + // Then the next token (a sentence) starts on the character after the whitespace. + // (It's okay if this is off the end of the string - then the outer loop will terminate + // here anyway.) + tokenStartI = i + 1; + } + } + return result; + }; + return SentenceDiff; +}(base_js_1.default)); +exports.sentenceDiff = new SentenceDiff(); +function diffSentences(oldStr, newStr, options) { + return exports.sentenceDiff.diff(oldStr, newStr, options); +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/diff/word.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/diff/word.js new file mode 100644 index 0000000000000000000000000000000000000000..8c76eb2691a644c405dfde6929f8cc42c4145249 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/diff/word.js @@ -0,0 +1,307 @@ +"use strict"; +var __extends = (this && this.__extends) || (function () { + var extendStatics = function (d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); + }; + return function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; +})(); +Object.defineProperty(exports, "__esModule", { value: true }); +exports.wordsWithSpaceDiff = exports.wordDiff = void 0; +exports.diffWords = diffWords; +exports.diffWordsWithSpace = diffWordsWithSpace; +var base_js_1 = require("./base.js"); +var string_js_1 = require("../util/string.js"); +// Based on https://en.wikipedia.org/wiki/Latin_script_in_Unicode +// +// Ranges and exceptions: +// Latin-1 Supplement, 0080–00FF +// - U+00D7 × Multiplication sign +// - U+00F7 ÷ Division sign +// Latin Extended-A, 0100–017F +// Latin Extended-B, 0180–024F +// IPA Extensions, 0250–02AF +// Spacing Modifier Letters, 02B0–02FF +// - U+02C7 ˇ ˇ Caron +// - U+02D8 ˘ ˘ Breve +// - U+02D9 ˙ ˙ Dot Above +// - U+02DA ˚ ˚ Ring Above +// - U+02DB ˛ ˛ Ogonek +// - U+02DC ˜ ˜ Small Tilde +// - U+02DD ˝ ˝ Double Acute Accent +// Latin Extended Additional, 1E00–1EFF +var extendedWordChars = 'a-zA-Z0-9_\\u{C0}-\\u{FF}\\u{D8}-\\u{F6}\\u{F8}-\\u{2C6}\\u{2C8}-\\u{2D7}\\u{2DE}-\\u{2FF}\\u{1E00}-\\u{1EFF}'; +// Each token is one of the following: +// - A punctuation mark plus the surrounding whitespace +// - A word plus the surrounding whitespace +// - Pure whitespace (but only in the special case where this the entire text +// is just whitespace) +// +// We have to include surrounding whitespace in the tokens because the two +// alternative approaches produce horribly broken results: +// * If we just discard the whitespace, we can't fully reproduce the original +// text from the sequence of tokens and any attempt to render the diff will +// get the whitespace wrong. +// * If we have separate tokens for whitespace, then in a typical text every +// second token will be a single space character. But this often results in +// the optimal diff between two texts being a perverse one that preserves +// the spaces between words but deletes and reinserts actual common words. +// See https://github.com/kpdecker/jsdiff/issues/160#issuecomment-1866099640 +// for an example. +// +// Keeping the surrounding whitespace of course has implications for .equals +// and .join, not just .tokenize. +// This regex does NOT fully implement the tokenization rules described above. +// Instead, it gives runs of whitespace their own "token". The tokenize method +// then handles stitching whitespace tokens onto adjacent word or punctuation +// tokens. +var tokenizeIncludingWhitespace = new RegExp("[".concat(extendedWordChars, "]+|\\s+|[^").concat(extendedWordChars, "]"), 'ug'); +var WordDiff = /** @class */ (function (_super) { + __extends(WordDiff, _super); + function WordDiff() { + return _super !== null && _super.apply(this, arguments) || this; + } + WordDiff.prototype.equals = function (left, right, options) { + if (options.ignoreCase) { + left = left.toLowerCase(); + right = right.toLowerCase(); + } + return left.trim() === right.trim(); + }; + WordDiff.prototype.tokenize = function (value, options) { + if (options === void 0) { options = {}; } + var parts; + if (options.intlSegmenter) { + var segmenter = options.intlSegmenter; + if (segmenter.resolvedOptions().granularity != 'word') { + throw new Error('The segmenter passed must have a granularity of "word"'); + } + parts = Array.from(segmenter.segment(value), function (segment) { return segment.segment; }); + } + else { + parts = value.match(tokenizeIncludingWhitespace) || []; + } + var tokens = []; + var prevPart = null; + parts.forEach(function (part) { + if ((/\s/).test(part)) { + if (prevPart == null) { + tokens.push(part); + } + else { + tokens.push(tokens.pop() + part); + } + } + else if (prevPart != null && (/\s/).test(prevPart)) { + if (tokens[tokens.length - 1] == prevPart) { + tokens.push(tokens.pop() + part); + } + else { + tokens.push(prevPart + part); + } + } + else { + tokens.push(part); + } + prevPart = part; + }); + return tokens; + }; + WordDiff.prototype.join = function (tokens) { + // Tokens being joined here will always have appeared consecutively in the + // same text, so we can simply strip off the leading whitespace from all the + // tokens except the first (and except any whitespace-only tokens - but such + // a token will always be the first and only token anyway) and then join them + // and the whitespace around words and punctuation will end up correct. + return tokens.map(function (token, i) { + if (i == 0) { + return token; + } + else { + return token.replace((/^\s+/), ''); + } + }).join(''); + }; + WordDiff.prototype.postProcess = function (changes, options) { + if (!changes || options.oneChangePerToken) { + return changes; + } + var lastKeep = null; + // Change objects representing any insertion or deletion since the last + // "keep" change object. There can be at most one of each. + var insertion = null; + var deletion = null; + changes.forEach(function (change) { + if (change.added) { + insertion = change; + } + else if (change.removed) { + deletion = change; + } + else { + if (insertion || deletion) { // May be false at start of text + dedupeWhitespaceInChangeObjects(lastKeep, deletion, insertion, change); + } + lastKeep = change; + insertion = null; + deletion = null; + } + }); + if (insertion || deletion) { + dedupeWhitespaceInChangeObjects(lastKeep, deletion, insertion, null); + } + return changes; + }; + return WordDiff; +}(base_js_1.default)); +exports.wordDiff = new WordDiff(); +function diffWords(oldStr, newStr, options) { + // This option has never been documented and never will be (it's clearer to + // just call `diffWordsWithSpace` directly if you need that behavior), but + // has existed in jsdiff for a long time, so we retain support for it here + // for the sake of backwards compatibility. + if ((options === null || options === void 0 ? void 0 : options.ignoreWhitespace) != null && !options.ignoreWhitespace) { + return diffWordsWithSpace(oldStr, newStr, options); + } + return exports.wordDiff.diff(oldStr, newStr, options); +} +function dedupeWhitespaceInChangeObjects(startKeep, deletion, insertion, endKeep) { + // Before returning, we tidy up the leading and trailing whitespace of the + // change objects to eliminate cases where trailing whitespace in one object + // is repeated as leading whitespace in the next. + // Below are examples of the outcomes we want here to explain the code. + // I=insert, K=keep, D=delete + // 1. diffing 'foo bar baz' vs 'foo baz' + // Prior to cleanup, we have K:'foo ' D:' bar ' K:' baz' + // After cleanup, we want: K:'foo ' D:'bar ' K:'baz' + // + // 2. Diffing 'foo bar baz' vs 'foo qux baz' + // Prior to cleanup, we have K:'foo ' D:' bar ' I:' qux ' K:' baz' + // After cleanup, we want K:'foo ' D:'bar' I:'qux' K:' baz' + // + // 3. Diffing 'foo\nbar baz' vs 'foo baz' + // Prior to cleanup, we have K:'foo ' D:'\nbar ' K:' baz' + // After cleanup, we want K'foo' D:'\nbar' K:' baz' + // + // 4. Diffing 'foo baz' vs 'foo\nbar baz' + // Prior to cleanup, we have K:'foo\n' I:'\nbar ' K:' baz' + // After cleanup, we ideally want K'foo' I:'\nbar' K:' baz' + // but don't actually manage this currently (the pre-cleanup change + // objects don't contain enough information to make it possible). + // + // 5. Diffing 'foo bar baz' vs 'foo baz' + // Prior to cleanup, we have K:'foo ' D:' bar ' K:' baz' + // After cleanup, we want K:'foo ' D:' bar ' K:'baz' + // + // Our handling is unavoidably imperfect in the case where there's a single + // indel between keeps and the whitespace has changed. For instance, consider + // diffing 'foo\tbar\nbaz' vs 'foo baz'. Unless we create an extra change + // object to represent the insertion of the space character (which isn't even + // a token), we have no way to avoid losing information about the texts' + // original whitespace in the result we return. Still, we do our best to + // output something that will look sensible if we e.g. print it with + // insertions in green and deletions in red. + // Between two "keep" change objects (or before the first or after the last + // change object), we can have either: + // * A "delete" followed by an "insert" + // * Just an "insert" + // * Just a "delete" + // We handle the three cases separately. + if (deletion && insertion) { + var oldWsPrefix = (0, string_js_1.leadingWs)(deletion.value); + var oldWsSuffix = (0, string_js_1.trailingWs)(deletion.value); + var newWsPrefix = (0, string_js_1.leadingWs)(insertion.value); + var newWsSuffix = (0, string_js_1.trailingWs)(insertion.value); + if (startKeep) { + var commonWsPrefix = (0, string_js_1.longestCommonPrefix)(oldWsPrefix, newWsPrefix); + startKeep.value = (0, string_js_1.replaceSuffix)(startKeep.value, newWsPrefix, commonWsPrefix); + deletion.value = (0, string_js_1.removePrefix)(deletion.value, commonWsPrefix); + insertion.value = (0, string_js_1.removePrefix)(insertion.value, commonWsPrefix); + } + if (endKeep) { + var commonWsSuffix = (0, string_js_1.longestCommonSuffix)(oldWsSuffix, newWsSuffix); + endKeep.value = (0, string_js_1.replacePrefix)(endKeep.value, newWsSuffix, commonWsSuffix); + deletion.value = (0, string_js_1.removeSuffix)(deletion.value, commonWsSuffix); + insertion.value = (0, string_js_1.removeSuffix)(insertion.value, commonWsSuffix); + } + } + else if (insertion) { + // The whitespaces all reflect what was in the new text rather than + // the old, so we essentially have no information about whitespace + // insertion or deletion. We just want to dedupe the whitespace. + // We do that by having each change object keep its trailing + // whitespace and deleting duplicate leading whitespace where + // present. + if (startKeep) { + var ws = (0, string_js_1.leadingWs)(insertion.value); + insertion.value = insertion.value.substring(ws.length); + } + if (endKeep) { + var ws = (0, string_js_1.leadingWs)(endKeep.value); + endKeep.value = endKeep.value.substring(ws.length); + } + // otherwise we've got a deletion and no insertion + } + else if (startKeep && endKeep) { + var newWsFull = (0, string_js_1.leadingWs)(endKeep.value), delWsStart = (0, string_js_1.leadingWs)(deletion.value), delWsEnd = (0, string_js_1.trailingWs)(deletion.value); + // Any whitespace that comes straight after startKeep in both the old and + // new texts, assign to startKeep and remove from the deletion. + var newWsStart = (0, string_js_1.longestCommonPrefix)(newWsFull, delWsStart); + deletion.value = (0, string_js_1.removePrefix)(deletion.value, newWsStart); + // Any whitespace that comes straight before endKeep in both the old and + // new texts, and hasn't already been assigned to startKeep, assign to + // endKeep and remove from the deletion. + var newWsEnd = (0, string_js_1.longestCommonSuffix)((0, string_js_1.removePrefix)(newWsFull, newWsStart), delWsEnd); + deletion.value = (0, string_js_1.removeSuffix)(deletion.value, newWsEnd); + endKeep.value = (0, string_js_1.replacePrefix)(endKeep.value, newWsFull, newWsEnd); + // If there's any whitespace from the new text that HASN'T already been + // assigned, assign it to the start: + startKeep.value = (0, string_js_1.replaceSuffix)(startKeep.value, newWsFull, newWsFull.slice(0, newWsFull.length - newWsEnd.length)); + } + else if (endKeep) { + // We are at the start of the text. Preserve all the whitespace on + // endKeep, and just remove whitespace from the end of deletion to the + // extent that it overlaps with the start of endKeep. + var endKeepWsPrefix = (0, string_js_1.leadingWs)(endKeep.value); + var deletionWsSuffix = (0, string_js_1.trailingWs)(deletion.value); + var overlap = (0, string_js_1.maximumOverlap)(deletionWsSuffix, endKeepWsPrefix); + deletion.value = (0, string_js_1.removeSuffix)(deletion.value, overlap); + } + else if (startKeep) { + // We are at the END of the text. Preserve all the whitespace on + // startKeep, and just remove whitespace from the start of deletion to + // the extent that it overlaps with the end of startKeep. + var startKeepWsSuffix = (0, string_js_1.trailingWs)(startKeep.value); + var deletionWsPrefix = (0, string_js_1.leadingWs)(deletion.value); + var overlap = (0, string_js_1.maximumOverlap)(startKeepWsSuffix, deletionWsPrefix); + deletion.value = (0, string_js_1.removePrefix)(deletion.value, overlap); + } +} +var WordsWithSpaceDiff = /** @class */ (function (_super) { + __extends(WordsWithSpaceDiff, _super); + function WordsWithSpaceDiff() { + return _super !== null && _super.apply(this, arguments) || this; + } + WordsWithSpaceDiff.prototype.tokenize = function (value) { + // Slightly different to the tokenizeIncludingWhitespace regex used above in + // that this one treats each individual newline as a distinct tokens, rather + // than merging them into other surrounding whitespace. This was requested + // in https://github.com/kpdecker/jsdiff/issues/180 & + // https://github.com/kpdecker/jsdiff/issues/211 + var regex = new RegExp("(\\r?\\n)|[".concat(extendedWordChars, "]+|[^\\S\\n\\r]+|[^").concat(extendedWordChars, "]"), 'ug'); + return value.match(regex) || []; + }; + return WordsWithSpaceDiff; +}(base_js_1.default)); +exports.wordsWithSpaceDiff = new WordsWithSpaceDiff(); +function diffWordsWithSpace(oldStr, newStr, options) { + return exports.wordsWithSpaceDiff.diff(oldStr, newStr, options); +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/index.js new file mode 100644 index 0000000000000000000000000000000000000000..e07c46b0dd40469c36ef59a30dc490b132630fa4 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/index.js @@ -0,0 +1,61 @@ +"use strict"; +/* See LICENSE file for terms of use */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.canonicalize = exports.convertChangesToXML = exports.convertChangesToDMP = exports.reversePatch = exports.parsePatch = exports.applyPatches = exports.applyPatch = exports.formatPatch = exports.createPatch = exports.createTwoFilesPatch = exports.structuredPatch = exports.arrayDiff = exports.diffArrays = exports.jsonDiff = exports.diffJson = exports.cssDiff = exports.diffCss = exports.sentenceDiff = exports.diffSentences = exports.diffTrimmedLines = exports.lineDiff = exports.diffLines = exports.wordsWithSpaceDiff = exports.diffWordsWithSpace = exports.wordDiff = exports.diffWords = exports.characterDiff = exports.diffChars = exports.Diff = void 0; +/* + * Text diff implementation. + * + * This library supports the following APIs: + * Diff.diffChars: Character by character diff + * Diff.diffWords: Word (as defined by \b regex) diff which ignores whitespace + * Diff.diffLines: Line based diff + * + * Diff.diffCss: Diff targeted at CSS content + * + * These methods are based on the implementation proposed in + * "An O(ND) Difference Algorithm and its Variations" (Myers, 1986). + * http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.4.6927 + */ +var base_js_1 = require("./diff/base.js"); +exports.Diff = base_js_1.default; +var character_js_1 = require("./diff/character.js"); +Object.defineProperty(exports, "diffChars", { enumerable: true, get: function () { return character_js_1.diffChars; } }); +Object.defineProperty(exports, "characterDiff", { enumerable: true, get: function () { return character_js_1.characterDiff; } }); +var word_js_1 = require("./diff/word.js"); +Object.defineProperty(exports, "diffWords", { enumerable: true, get: function () { return word_js_1.diffWords; } }); +Object.defineProperty(exports, "diffWordsWithSpace", { enumerable: true, get: function () { return word_js_1.diffWordsWithSpace; } }); +Object.defineProperty(exports, "wordDiff", { enumerable: true, get: function () { return word_js_1.wordDiff; } }); +Object.defineProperty(exports, "wordsWithSpaceDiff", { enumerable: true, get: function () { return word_js_1.wordsWithSpaceDiff; } }); +var line_js_1 = require("./diff/line.js"); +Object.defineProperty(exports, "diffLines", { enumerable: true, get: function () { return line_js_1.diffLines; } }); +Object.defineProperty(exports, "diffTrimmedLines", { enumerable: true, get: function () { return line_js_1.diffTrimmedLines; } }); +Object.defineProperty(exports, "lineDiff", { enumerable: true, get: function () { return line_js_1.lineDiff; } }); +var sentence_js_1 = require("./diff/sentence.js"); +Object.defineProperty(exports, "diffSentences", { enumerable: true, get: function () { return sentence_js_1.diffSentences; } }); +Object.defineProperty(exports, "sentenceDiff", { enumerable: true, get: function () { return sentence_js_1.sentenceDiff; } }); +var css_js_1 = require("./diff/css.js"); +Object.defineProperty(exports, "diffCss", { enumerable: true, get: function () { return css_js_1.diffCss; } }); +Object.defineProperty(exports, "cssDiff", { enumerable: true, get: function () { return css_js_1.cssDiff; } }); +var json_js_1 = require("./diff/json.js"); +Object.defineProperty(exports, "diffJson", { enumerable: true, get: function () { return json_js_1.diffJson; } }); +Object.defineProperty(exports, "canonicalize", { enumerable: true, get: function () { return json_js_1.canonicalize; } }); +Object.defineProperty(exports, "jsonDiff", { enumerable: true, get: function () { return json_js_1.jsonDiff; } }); +var array_js_1 = require("./diff/array.js"); +Object.defineProperty(exports, "diffArrays", { enumerable: true, get: function () { return array_js_1.diffArrays; } }); +Object.defineProperty(exports, "arrayDiff", { enumerable: true, get: function () { return array_js_1.arrayDiff; } }); +var apply_js_1 = require("./patch/apply.js"); +Object.defineProperty(exports, "applyPatch", { enumerable: true, get: function () { return apply_js_1.applyPatch; } }); +Object.defineProperty(exports, "applyPatches", { enumerable: true, get: function () { return apply_js_1.applyPatches; } }); +var parse_js_1 = require("./patch/parse.js"); +Object.defineProperty(exports, "parsePatch", { enumerable: true, get: function () { return parse_js_1.parsePatch; } }); +var reverse_js_1 = require("./patch/reverse.js"); +Object.defineProperty(exports, "reversePatch", { enumerable: true, get: function () { return reverse_js_1.reversePatch; } }); +var create_js_1 = require("./patch/create.js"); +Object.defineProperty(exports, "structuredPatch", { enumerable: true, get: function () { return create_js_1.structuredPatch; } }); +Object.defineProperty(exports, "createTwoFilesPatch", { enumerable: true, get: function () { return create_js_1.createTwoFilesPatch; } }); +Object.defineProperty(exports, "createPatch", { enumerable: true, get: function () { return create_js_1.createPatch; } }); +Object.defineProperty(exports, "formatPatch", { enumerable: true, get: function () { return create_js_1.formatPatch; } }); +var dmp_js_1 = require("./convert/dmp.js"); +Object.defineProperty(exports, "convertChangesToDMP", { enumerable: true, get: function () { return dmp_js_1.convertChangesToDMP; } }); +var xml_js_1 = require("./convert/xml.js"); +Object.defineProperty(exports, "convertChangesToXML", { enumerable: true, get: function () { return xml_js_1.convertChangesToXML; } }); diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/package.json new file mode 100644 index 0000000000000000000000000000000000000000..731cf3f1d319dbfdd3c62ec8fe955d732f3d27a0 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/package.json @@ -0,0 +1 @@ +{"type":"commonjs","sideEffects":false} \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/patch/apply.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/patch/apply.js new file mode 100644 index 0000000000000000000000000000000000000000..4f49c7c6d08b48b1b89eb0ec787c59432288a39b --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/patch/apply.js @@ -0,0 +1,267 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.applyPatch = applyPatch; +exports.applyPatches = applyPatches; +var string_js_1 = require("../util/string.js"); +var line_endings_js_1 = require("./line-endings.js"); +var parse_js_1 = require("./parse.js"); +var distance_iterator_js_1 = require("../util/distance-iterator.js"); +/** + * attempts to apply a unified diff patch. + * + * Hunks are applied first to last. + * `applyPatch` first tries to apply the first hunk at the line number specified in the hunk header, and with all context lines matching exactly. + * If that fails, it tries scanning backwards and forwards, one line at a time, to find a place to apply the hunk where the context lines match exactly. + * If that still fails, and `fuzzFactor` is greater than zero, it increments the maximum number of mismatches (missing, extra, or changed context lines) that there can be between the hunk context and a region where we are trying to apply the patch such that the hunk will still be considered to match. + * Regardless of `fuzzFactor`, lines to be deleted in the hunk *must* be present for a hunk to match, and the context lines *immediately* before and after an insertion must match exactly. + * + * Once a hunk is successfully fitted, the process begins again with the next hunk. + * Regardless of `fuzzFactor`, later hunks must be applied later in the file than earlier hunks. + * + * If a hunk cannot be successfully fitted *anywhere* with fewer than `fuzzFactor` mismatches, `applyPatch` fails and returns `false`. + * + * If a hunk is successfully fitted but not at the line number specified by the hunk header, all subsequent hunks have their target line number adjusted accordingly. + * (e.g. if the first hunk is applied 10 lines below where the hunk header said it should fit, `applyPatch` will *start* looking for somewhere to apply the second hunk 10 lines below where its hunk header says it goes.) + * + * If the patch was applied successfully, returns a string containing the patched text. + * If the patch could not be applied (because some hunks in the patch couldn't be fitted to the text in `source`), `applyPatch` returns false. + * + * @param patch a string diff or the output from the `parsePatch` or `structuredPatch` methods. + */ +function applyPatch(source, patch, options) { + if (options === void 0) { options = {}; } + var patches; + if (typeof patch === 'string') { + patches = (0, parse_js_1.parsePatch)(patch); + } + else if (Array.isArray(patch)) { + patches = patch; + } + else { + patches = [patch]; + } + if (patches.length > 1) { + throw new Error('applyPatch only works with a single input.'); + } + return applyStructuredPatch(source, patches[0], options); +} +function applyStructuredPatch(source, patch, options) { + if (options === void 0) { options = {}; } + if (options.autoConvertLineEndings || options.autoConvertLineEndings == null) { + if ((0, string_js_1.hasOnlyWinLineEndings)(source) && (0, line_endings_js_1.isUnix)(patch)) { + patch = (0, line_endings_js_1.unixToWin)(patch); + } + else if ((0, string_js_1.hasOnlyUnixLineEndings)(source) && (0, line_endings_js_1.isWin)(patch)) { + patch = (0, line_endings_js_1.winToUnix)(patch); + } + } + // Apply the diff to the input + var lines = source.split('\n'), hunks = patch.hunks, compareLine = options.compareLine || (function (lineNumber, line, operation, patchContent) { return line === patchContent; }), fuzzFactor = options.fuzzFactor || 0; + var minLine = 0; + if (fuzzFactor < 0 || !Number.isInteger(fuzzFactor)) { + throw new Error('fuzzFactor must be a non-negative integer'); + } + // Special case for empty patch. + if (!hunks.length) { + return source; + } + // Before anything else, handle EOFNL insertion/removal. If the patch tells us to make a change + // to the EOFNL that is redundant/impossible - i.e. to remove a newline that's not there, or add a + // newline that already exists - then we either return false and fail to apply the patch (if + // fuzzFactor is 0) or simply ignore the problem and do nothing (if fuzzFactor is >0). + // If we do need to remove/add a newline at EOF, this will always be in the final hunk: + var prevLine = '', removeEOFNL = false, addEOFNL = false; + for (var i = 0; i < hunks[hunks.length - 1].lines.length; i++) { + var line = hunks[hunks.length - 1].lines[i]; + if (line[0] == '\\') { + if (prevLine[0] == '+') { + removeEOFNL = true; + } + else if (prevLine[0] == '-') { + addEOFNL = true; + } + } + prevLine = line; + } + if (removeEOFNL) { + if (addEOFNL) { + // This means the final line gets changed but doesn't have a trailing newline in either the + // original or patched version. In that case, we do nothing if fuzzFactor > 0, and if + // fuzzFactor is 0, we simply validate that the source file has no trailing newline. + if (!fuzzFactor && lines[lines.length - 1] == '') { + return false; + } + } + else if (lines[lines.length - 1] == '') { + lines.pop(); + } + else if (!fuzzFactor) { + return false; + } + } + else if (addEOFNL) { + if (lines[lines.length - 1] != '') { + lines.push(''); + } + else if (!fuzzFactor) { + return false; + } + } + /** + * Checks if the hunk can be made to fit at the provided location with at most `maxErrors` + * insertions, substitutions, or deletions, while ensuring also that: + * - lines deleted in the hunk match exactly, and + * - wherever an insertion operation or block of insertion operations appears in the hunk, the + * immediately preceding and following lines of context match exactly + * + * `toPos` should be set such that lines[toPos] is meant to match hunkLines[0]. + * + * If the hunk can be applied, returns an object with properties `oldLineLastI` and + * `replacementLines`. Otherwise, returns null. + */ + function applyHunk(hunkLines, toPos, maxErrors, hunkLinesI, lastContextLineMatched, patchedLines, patchedLinesLength) { + if (hunkLinesI === void 0) { hunkLinesI = 0; } + if (lastContextLineMatched === void 0) { lastContextLineMatched = true; } + if (patchedLines === void 0) { patchedLines = []; } + if (patchedLinesLength === void 0) { patchedLinesLength = 0; } + var nConsecutiveOldContextLines = 0; + var nextContextLineMustMatch = false; + for (; hunkLinesI < hunkLines.length; hunkLinesI++) { + var hunkLine = hunkLines[hunkLinesI], operation = (hunkLine.length > 0 ? hunkLine[0] : ' '), content = (hunkLine.length > 0 ? hunkLine.substr(1) : hunkLine); + if (operation === '-') { + if (compareLine(toPos + 1, lines[toPos], operation, content)) { + toPos++; + nConsecutiveOldContextLines = 0; + } + else { + if (!maxErrors || lines[toPos] == null) { + return null; + } + patchedLines[patchedLinesLength] = lines[toPos]; + return applyHunk(hunkLines, toPos + 1, maxErrors - 1, hunkLinesI, false, patchedLines, patchedLinesLength + 1); + } + } + if (operation === '+') { + if (!lastContextLineMatched) { + return null; + } + patchedLines[patchedLinesLength] = content; + patchedLinesLength++; + nConsecutiveOldContextLines = 0; + nextContextLineMustMatch = true; + } + if (operation === ' ') { + nConsecutiveOldContextLines++; + patchedLines[patchedLinesLength] = lines[toPos]; + if (compareLine(toPos + 1, lines[toPos], operation, content)) { + patchedLinesLength++; + lastContextLineMatched = true; + nextContextLineMustMatch = false; + toPos++; + } + else { + if (nextContextLineMustMatch || !maxErrors) { + return null; + } + // Consider 3 possibilities in sequence: + // 1. lines contains a *substitution* not included in the patch context, or + // 2. lines contains an *insertion* not included in the patch context, or + // 3. lines contains a *deletion* not included in the patch context + // The first two options are of course only possible if the line from lines is non-null - + // i.e. only option 3 is possible if we've overrun the end of the old file. + return (lines[toPos] && (applyHunk(hunkLines, toPos + 1, maxErrors - 1, hunkLinesI + 1, false, patchedLines, patchedLinesLength + 1) || applyHunk(hunkLines, toPos + 1, maxErrors - 1, hunkLinesI, false, patchedLines, patchedLinesLength + 1)) || applyHunk(hunkLines, toPos, maxErrors - 1, hunkLinesI + 1, false, patchedLines, patchedLinesLength)); + } + } + } + // Before returning, trim any unmodified context lines off the end of patchedLines and reduce + // toPos (and thus oldLineLastI) accordingly. This allows later hunks to be applied to a region + // that starts in this hunk's trailing context. + patchedLinesLength -= nConsecutiveOldContextLines; + toPos -= nConsecutiveOldContextLines; + patchedLines.length = patchedLinesLength; + return { + patchedLines: patchedLines, + oldLineLastI: toPos - 1 + }; + } + var resultLines = []; + // Search best fit offsets for each hunk based on the previous ones + var prevHunkOffset = 0; + for (var i = 0; i < hunks.length; i++) { + var hunk = hunks[i]; + var hunkResult = void 0; + var maxLine = lines.length - hunk.oldLines + fuzzFactor; + var toPos = void 0; + for (var maxErrors = 0; maxErrors <= fuzzFactor; maxErrors++) { + toPos = hunk.oldStart + prevHunkOffset - 1; + var iterator = (0, distance_iterator_js_1.default)(toPos, minLine, maxLine); + for (; toPos !== undefined; toPos = iterator()) { + hunkResult = applyHunk(hunk.lines, toPos, maxErrors); + if (hunkResult) { + break; + } + } + if (hunkResult) { + break; + } + } + if (!hunkResult) { + return false; + } + // Copy everything from the end of where we applied the last hunk to the start of this hunk + for (var i_1 = minLine; i_1 < toPos; i_1++) { + resultLines.push(lines[i_1]); + } + // Add the lines produced by applying the hunk: + for (var i_2 = 0; i_2 < hunkResult.patchedLines.length; i_2++) { + var line = hunkResult.patchedLines[i_2]; + resultLines.push(line); + } + // Set lower text limit to end of the current hunk, so next ones don't try + // to fit over already patched text + minLine = hunkResult.oldLineLastI + 1; + // Note the offset between where the patch said the hunk should've applied and where we + // applied it, so we can adjust future hunks accordingly: + prevHunkOffset = toPos + 1 - hunk.oldStart; + } + // Copy over the rest of the lines from the old text + for (var i = minLine; i < lines.length; i++) { + resultLines.push(lines[i]); + } + return resultLines.join('\n'); +} +/** + * applies one or more patches. + * + * `patch` may be either an array of structured patch objects, or a string representing a patch in unified diff format (which may patch one or more files). + * + * This method will iterate over the contents of the patch and apply to data provided through callbacks. The general flow for each patch index is: + * + * - `options.loadFile(index, callback)` is called. The caller should then load the contents of the file and then pass that to the `callback(err, data)` callback. Passing an `err` will terminate further patch execution. + * - `options.patched(index, content, callback)` is called once the patch has been applied. `content` will be the return value from `applyPatch`. When it's ready, the caller should call `callback(err)` callback. Passing an `err` will terminate further patch execution. + * + * Once all patches have been applied or an error occurs, the `options.complete(err)` callback is made. + */ +function applyPatches(uniDiff, options) { + var spDiff = typeof uniDiff === 'string' ? (0, parse_js_1.parsePatch)(uniDiff) : uniDiff; + var currentIndex = 0; + function processIndex() { + var index = spDiff[currentIndex++]; + if (!index) { + return options.complete(); + } + options.loadFile(index, function (err, data) { + if (err) { + return options.complete(err); + } + var updatedContent = applyPatch(data, index, options); + options.patched(index, updatedContent, function (err) { + if (err) { + return options.complete(err); + } + processIndex(); + }); + }); + } + processIndex(); +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/patch/create.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/patch/create.js new file mode 100644 index 0000000000000000000000000000000000000000..0f0a9ee72392832a9a48c5674a419236f207c484 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/patch/create.js @@ -0,0 +1,223 @@ +"use strict"; +var __assign = (this && this.__assign) || function () { + __assign = Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; + }; + return __assign.apply(this, arguments); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.structuredPatch = structuredPatch; +exports.formatPatch = formatPatch; +exports.createTwoFilesPatch = createTwoFilesPatch; +exports.createPatch = createPatch; +var line_js_1 = require("../diff/line.js"); +function structuredPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options) { + var optionsObj; + if (!options) { + optionsObj = {}; + } + else if (typeof options === 'function') { + optionsObj = { callback: options }; + } + else { + optionsObj = options; + } + if (typeof optionsObj.context === 'undefined') { + optionsObj.context = 4; + } + // We copy this into its own variable to placate TypeScript, which thinks + // optionsObj.context might be undefined in the callbacks below. + var context = optionsObj.context; + // @ts-expect-error (runtime check for something that is correctly a static type error) + if (optionsObj.newlineIsToken) { + throw new Error('newlineIsToken may not be used with patch-generation functions, only with diffing functions'); + } + if (!optionsObj.callback) { + return diffLinesResultToPatch((0, line_js_1.diffLines)(oldStr, newStr, optionsObj)); + } + else { + var callback_1 = optionsObj.callback; + (0, line_js_1.diffLines)(oldStr, newStr, __assign(__assign({}, optionsObj), { callback: function (diff) { + var patch = diffLinesResultToPatch(diff); + // TypeScript is unhappy without the cast because it does not understand that `patch` may + // be undefined here only if `callback` is StructuredPatchCallbackAbortable: + callback_1(patch); + } })); + } + function diffLinesResultToPatch(diff) { + // STEP 1: Build up the patch with no "\ No newline at end of file" lines and with the arrays + // of lines containing trailing newline characters. We'll tidy up later... + if (!diff) { + return; + } + diff.push({ value: '', lines: [] }); // Append an empty value to make cleanup easier + function contextLines(lines) { + return lines.map(function (entry) { return ' ' + entry; }); + } + var hunks = []; + var oldRangeStart = 0, newRangeStart = 0, curRange = [], oldLine = 1, newLine = 1; + for (var i = 0; i < diff.length; i++) { + var current = diff[i], lines = current.lines || splitLines(current.value); + current.lines = lines; + if (current.added || current.removed) { + // If we have previous context, start with that + if (!oldRangeStart) { + var prev = diff[i - 1]; + oldRangeStart = oldLine; + newRangeStart = newLine; + if (prev) { + curRange = context > 0 ? contextLines(prev.lines.slice(-context)) : []; + oldRangeStart -= curRange.length; + newRangeStart -= curRange.length; + } + } + // Output our changes + for (var _i = 0, lines_1 = lines; _i < lines_1.length; _i++) { + var line = lines_1[_i]; + curRange.push((current.added ? '+' : '-') + line); + } + // Track the updated file position + if (current.added) { + newLine += lines.length; + } + else { + oldLine += lines.length; + } + } + else { + // Identical context lines. Track line changes + if (oldRangeStart) { + // Close out any changes that have been output (or join overlapping) + if (lines.length <= context * 2 && i < diff.length - 2) { + // Overlapping + for (var _a = 0, _b = contextLines(lines); _a < _b.length; _a++) { + var line = _b[_a]; + curRange.push(line); + } + } + else { + // end the range and output + var contextSize = Math.min(lines.length, context); + for (var _c = 0, _d = contextLines(lines.slice(0, contextSize)); _c < _d.length; _c++) { + var line = _d[_c]; + curRange.push(line); + } + var hunk = { + oldStart: oldRangeStart, + oldLines: (oldLine - oldRangeStart + contextSize), + newStart: newRangeStart, + newLines: (newLine - newRangeStart + contextSize), + lines: curRange + }; + hunks.push(hunk); + oldRangeStart = 0; + newRangeStart = 0; + curRange = []; + } + } + oldLine += lines.length; + newLine += lines.length; + } + } + // Step 2: eliminate the trailing `\n` from each line of each hunk, and, where needed, add + // "\ No newline at end of file". + for (var _e = 0, hunks_1 = hunks; _e < hunks_1.length; _e++) { + var hunk = hunks_1[_e]; + for (var i = 0; i < hunk.lines.length; i++) { + if (hunk.lines[i].endsWith('\n')) { + hunk.lines[i] = hunk.lines[i].slice(0, -1); + } + else { + hunk.lines.splice(i + 1, 0, '\\ No newline at end of file'); + i++; // Skip the line we just added, then continue iterating + } + } + } + return { + oldFileName: oldFileName, newFileName: newFileName, + oldHeader: oldHeader, newHeader: newHeader, + hunks: hunks + }; + } +} +/** + * creates a unified diff patch. + * @param patch either a single structured patch object (as returned by `structuredPatch`) or an array of them (as returned by `parsePatch`) + */ +function formatPatch(patch) { + if (Array.isArray(patch)) { + return patch.map(formatPatch).join('\n'); + } + var ret = []; + if (patch.oldFileName == patch.newFileName) { + ret.push('Index: ' + patch.oldFileName); + } + ret.push('==================================================================='); + ret.push('--- ' + patch.oldFileName + (typeof patch.oldHeader === 'undefined' ? '' : '\t' + patch.oldHeader)); + ret.push('+++ ' + patch.newFileName + (typeof patch.newHeader === 'undefined' ? '' : '\t' + patch.newHeader)); + for (var i = 0; i < patch.hunks.length; i++) { + var hunk = patch.hunks[i]; + // Unified Diff Format quirk: If the chunk size is 0, + // the first number is one lower than one would expect. + // https://www.artima.com/weblogs/viewpost.jsp?thread=164293 + if (hunk.oldLines === 0) { + hunk.oldStart -= 1; + } + if (hunk.newLines === 0) { + hunk.newStart -= 1; + } + ret.push('@@ -' + hunk.oldStart + ',' + hunk.oldLines + + ' +' + hunk.newStart + ',' + hunk.newLines + + ' @@'); + for (var _i = 0, _a = hunk.lines; _i < _a.length; _i++) { + var line = _a[_i]; + ret.push(line); + } + } + return ret.join('\n') + '\n'; +} +function createTwoFilesPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options) { + if (typeof options === 'function') { + options = { callback: options }; + } + if (!(options === null || options === void 0 ? void 0 : options.callback)) { + var patchObj = structuredPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options); + if (!patchObj) { + return; + } + return formatPatch(patchObj); + } + else { + var callback_2 = options.callback; + structuredPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, __assign(__assign({}, options), { callback: function (patchObj) { + if (!patchObj) { + callback_2(undefined); + } + else { + callback_2(formatPatch(patchObj)); + } + } })); + } +} +function createPatch(fileName, oldStr, newStr, oldHeader, newHeader, options) { + return createTwoFilesPatch(fileName, fileName, oldStr, newStr, oldHeader, newHeader, options); +} +/** + * Split `text` into an array of lines, including the trailing newline character (where present) + */ +function splitLines(text) { + var hasTrailingNl = text.endsWith('\n'); + var result = text.split('\n').map(function (line) { return line + '\n'; }); + if (hasTrailingNl) { + result.pop(); + } + else { + result.push(result.pop().slice(0, -1)); + } + return result; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/patch/line-endings.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/patch/line-endings.js new file mode 100644 index 0000000000000000000000000000000000000000..be45f0c8a326f711b601ea1d64e4686fb904677e --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/patch/line-endings.js @@ -0,0 +1,61 @@ +"use strict"; +var __assign = (this && this.__assign) || function () { + __assign = Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; + }; + return __assign.apply(this, arguments); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.unixToWin = unixToWin; +exports.winToUnix = winToUnix; +exports.isUnix = isUnix; +exports.isWin = isWin; +function unixToWin(patch) { + if (Array.isArray(patch)) { + // It would be cleaner if instead of the line below we could just write + // return patch.map(unixToWin) + // but mysteriously TypeScript (v5.7.3 at the time of writing) does not like this and it will + // refuse to compile, thinking that unixToWin could then return StructuredPatch[][] and the + // result would be incompatible with the overload signatures. + // See bug report at https://github.com/microsoft/TypeScript/issues/61398. + return patch.map(function (p) { return unixToWin(p); }); + } + return __assign(__assign({}, patch), { hunks: patch.hunks.map(function (hunk) { return (__assign(__assign({}, hunk), { lines: hunk.lines.map(function (line, i) { + var _a; + return (line.startsWith('\\') || line.endsWith('\r') || ((_a = hunk.lines[i + 1]) === null || _a === void 0 ? void 0 : _a.startsWith('\\'))) + ? line + : line + '\r'; + }) })); }) }); +} +function winToUnix(patch) { + if (Array.isArray(patch)) { + // (See comment above equivalent line in unixToWin) + return patch.map(function (p) { return winToUnix(p); }); + } + return __assign(__assign({}, patch), { hunks: patch.hunks.map(function (hunk) { return (__assign(__assign({}, hunk), { lines: hunk.lines.map(function (line) { return line.endsWith('\r') ? line.substring(0, line.length - 1) : line; }) })); }) }); +} +/** + * Returns true if the patch consistently uses Unix line endings (or only involves one line and has + * no line endings). + */ +function isUnix(patch) { + if (!Array.isArray(patch)) { + patch = [patch]; + } + return !patch.some(function (index) { return index.hunks.some(function (hunk) { return hunk.lines.some(function (line) { return !line.startsWith('\\') && line.endsWith('\r'); }); }); }); +} +/** + * Returns true if the patch uses Windows line endings and only Windows line endings. + */ +function isWin(patch) { + if (!Array.isArray(patch)) { + patch = [patch]; + } + return patch.some(function (index) { return index.hunks.some(function (hunk) { return hunk.lines.some(function (line) { return line.endsWith('\r'); }); }); }) + && patch.every(function (index) { return index.hunks.every(function (hunk) { return hunk.lines.every(function (line, i) { var _a; return line.startsWith('\\') || line.endsWith('\r') || ((_a = hunk.lines[i + 1]) === null || _a === void 0 ? void 0 : _a.startsWith('\\')); }); }); }); +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/patch/parse.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/patch/parse.js new file mode 100644 index 0000000000000000000000000000000000000000..247262032e34a079ae87dc8008b8d45840ea7ef4 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/patch/parse.js @@ -0,0 +1,133 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parsePatch = parsePatch; +/** + * Parses a patch into structured data, in the same structure returned by `structuredPatch`. + * + * @return a JSON object representation of the a patch, suitable for use with the `applyPatch` method. + */ +function parsePatch(uniDiff) { + var diffstr = uniDiff.split(/\n/), list = []; + var i = 0; + function parseIndex() { + var index = {}; + list.push(index); + // Parse diff metadata + while (i < diffstr.length) { + var line = diffstr[i]; + // File header found, end parsing diff metadata + if ((/^(---|\+\+\+|@@)\s/).test(line)) { + break; + } + // Diff index + var header = (/^(?:Index:|diff(?: -r \w+)+)\s+(.+?)\s*$/).exec(line); + if (header) { + index.index = header[1]; + } + i++; + } + // Parse file headers if they are defined. Unified diff requires them, but + // there's no technical issues to have an isolated hunk without file header + parseFileHeader(index); + parseFileHeader(index); + // Parse hunks + index.hunks = []; + while (i < diffstr.length) { + var line = diffstr[i]; + if ((/^(Index:\s|diff\s|---\s|\+\+\+\s|===================================================================)/).test(line)) { + break; + } + else if ((/^@@/).test(line)) { + index.hunks.push(parseHunk()); + } + else if (line) { + throw new Error('Unknown line ' + (i + 1) + ' ' + JSON.stringify(line)); + } + else { + i++; + } + } + } + // Parses the --- and +++ headers, if none are found, no lines + // are consumed. + function parseFileHeader(index) { + var fileHeader = (/^(---|\+\+\+)\s+(.*)\r?$/).exec(diffstr[i]); + if (fileHeader) { + var data = fileHeader[2].split('\t', 2), header = (data[1] || '').trim(); + var fileName = data[0].replace(/\\\\/g, '\\'); + if ((/^".*"$/).test(fileName)) { + fileName = fileName.substr(1, fileName.length - 2); + } + if (fileHeader[1] === '---') { + index.oldFileName = fileName; + index.oldHeader = header; + } + else { + index.newFileName = fileName; + index.newHeader = header; + } + i++; + } + } + // Parses a hunk + // This assumes that we are at the start of a hunk. + function parseHunk() { + var _a; + var chunkHeaderIndex = i, chunkHeaderLine = diffstr[i++], chunkHeader = chunkHeaderLine.split(/@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@/); + var hunk = { + oldStart: +chunkHeader[1], + oldLines: typeof chunkHeader[2] === 'undefined' ? 1 : +chunkHeader[2], + newStart: +chunkHeader[3], + newLines: typeof chunkHeader[4] === 'undefined' ? 1 : +chunkHeader[4], + lines: [] + }; + // Unified Diff Format quirk: If the chunk size is 0, + // the first number is one lower than one would expect. + // https://www.artima.com/weblogs/viewpost.jsp?thread=164293 + if (hunk.oldLines === 0) { + hunk.oldStart += 1; + } + if (hunk.newLines === 0) { + hunk.newStart += 1; + } + var addCount = 0, removeCount = 0; + for (; i < diffstr.length && (removeCount < hunk.oldLines || addCount < hunk.newLines || ((_a = diffstr[i]) === null || _a === void 0 ? void 0 : _a.startsWith('\\'))); i++) { + var operation = (diffstr[i].length == 0 && i != (diffstr.length - 1)) ? ' ' : diffstr[i][0]; + if (operation === '+' || operation === '-' || operation === ' ' || operation === '\\') { + hunk.lines.push(diffstr[i]); + if (operation === '+') { + addCount++; + } + else if (operation === '-') { + removeCount++; + } + else if (operation === ' ') { + addCount++; + removeCount++; + } + } + else { + throw new Error("Hunk at line ".concat(chunkHeaderIndex + 1, " contained invalid line ").concat(diffstr[i])); + } + } + // Handle the empty block count case + if (!addCount && hunk.newLines === 1) { + hunk.newLines = 0; + } + if (!removeCount && hunk.oldLines === 1) { + hunk.oldLines = 0; + } + // Perform sanity checking + if (addCount !== hunk.newLines) { + throw new Error('Added line count did not match for hunk at line ' + (chunkHeaderIndex + 1)); + } + if (removeCount !== hunk.oldLines) { + throw new Error('Removed line count did not match for hunk at line ' + (chunkHeaderIndex + 1)); + } + return hunk; + } + while (i < diffstr.length) { + parseIndex(); + } + return list; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/patch/reverse.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/patch/reverse.js new file mode 100644 index 0000000000000000000000000000000000000000..078fcdaea0bbc0417b5a3b59c19800bdbdbdee98 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/patch/reverse.js @@ -0,0 +1,37 @@ +"use strict"; +var __assign = (this && this.__assign) || function () { + __assign = Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; + }; + return __assign.apply(this, arguments); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.reversePatch = reversePatch; +function reversePatch(structuredPatch) { + if (Array.isArray(structuredPatch)) { + // (See comment in unixToWin for why we need the pointless-looking anonymous function here) + return structuredPatch.map(function (patch) { return reversePatch(patch); }).reverse(); + } + return __assign(__assign({}, structuredPatch), { oldFileName: structuredPatch.newFileName, oldHeader: structuredPatch.newHeader, newFileName: structuredPatch.oldFileName, newHeader: structuredPatch.oldHeader, hunks: structuredPatch.hunks.map(function (hunk) { + return { + oldLines: hunk.newLines, + oldStart: hunk.newStart, + newLines: hunk.oldLines, + newStart: hunk.oldStart, + lines: hunk.lines.map(function (l) { + if (l.startsWith('-')) { + return "+".concat(l.slice(1)); + } + if (l.startsWith('+')) { + return "-".concat(l.slice(1)); + } + return l; + }) + }; + }) }); +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/types.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/types.js new file mode 100644 index 0000000000000000000000000000000000000000..c8ad2e549bdc6801e0d1c80b0308d4b9bd4985ce --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/types.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/util/array.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/util/array.js new file mode 100644 index 0000000000000000000000000000000000000000..c21937ee0fe518e767ce5b71730abfb739416c69 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/util/array.js @@ -0,0 +1,21 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.arrayEqual = arrayEqual; +exports.arrayStartsWith = arrayStartsWith; +function arrayEqual(a, b) { + if (a.length !== b.length) { + return false; + } + return arrayStartsWith(a, b); +} +function arrayStartsWith(array, start) { + if (start.length > array.length) { + return false; + } + for (var i = 0; i < start.length; i++) { + if (start[i] !== array[i]) { + return false; + } + } + return true; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/util/distance-iterator.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/util/distance-iterator.js new file mode 100644 index 0000000000000000000000000000000000000000..2421553c444eac57f744e427d71584dd9f0f532d --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/util/distance-iterator.js @@ -0,0 +1,40 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.default = default_1; +// Iterator that traverses in the range of [min, max], stepping +// by distance from a given start position. I.e. for [0, 4], with +// start of 2, this will iterate 2, 3, 1, 4, 0. +function default_1(start, minLine, maxLine) { + var wantForward = true, backwardExhausted = false, forwardExhausted = false, localOffset = 1; + return function iterator() { + if (wantForward && !forwardExhausted) { + if (backwardExhausted) { + localOffset++; + } + else { + wantForward = false; + } + // Check if trying to fit beyond text length, and if not, check it fits + // after offset location (or desired location on first iteration) + if (start + localOffset <= maxLine) { + return start + localOffset; + } + forwardExhausted = true; + } + if (!backwardExhausted) { + if (!forwardExhausted) { + wantForward = true; + } + // Check if trying to fit before text beginning, and if not, check it fits + // before offset location + if (minLine <= start - localOffset) { + return start - localOffset++; + } + backwardExhausted = true; + return iterator(); + } + // We tried to fit hunk before text beginning and beyond text length, then + // hunk can't fit on the text. Return undefined + return undefined; + }; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/util/params.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/util/params.js new file mode 100644 index 0000000000000000000000000000000000000000..6eefddba7922c713e58d139229e76918543609ae --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/util/params.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.generateOptions = generateOptions; +function generateOptions(options, defaults) { + if (typeof options === 'function') { + defaults.callback = options; + } + else if (options) { + for (var name in options) { + /* istanbul ignore else */ + if (Object.prototype.hasOwnProperty.call(options, name)) { + defaults[name] = options[name]; + } + } + } + return defaults; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/util/string.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/util/string.js new file mode 100644 index 0000000000000000000000000000000000000000..847ec88a88f5dac9305e70be1bc794a5555c5f26 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libcjs/util/string.js @@ -0,0 +1,141 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.longestCommonPrefix = longestCommonPrefix; +exports.longestCommonSuffix = longestCommonSuffix; +exports.replacePrefix = replacePrefix; +exports.replaceSuffix = replaceSuffix; +exports.removePrefix = removePrefix; +exports.removeSuffix = removeSuffix; +exports.maximumOverlap = maximumOverlap; +exports.hasOnlyWinLineEndings = hasOnlyWinLineEndings; +exports.hasOnlyUnixLineEndings = hasOnlyUnixLineEndings; +exports.trailingWs = trailingWs; +exports.leadingWs = leadingWs; +function longestCommonPrefix(str1, str2) { + var i; + for (i = 0; i < str1.length && i < str2.length; i++) { + if (str1[i] != str2[i]) { + return str1.slice(0, i); + } + } + return str1.slice(0, i); +} +function longestCommonSuffix(str1, str2) { + var i; + // Unlike longestCommonPrefix, we need a special case to handle all scenarios + // where we return the empty string since str1.slice(-0) will return the + // entire string. + if (!str1 || !str2 || str1[str1.length - 1] != str2[str2.length - 1]) { + return ''; + } + for (i = 0; i < str1.length && i < str2.length; i++) { + if (str1[str1.length - (i + 1)] != str2[str2.length - (i + 1)]) { + return str1.slice(-i); + } + } + return str1.slice(-i); +} +function replacePrefix(string, oldPrefix, newPrefix) { + if (string.slice(0, oldPrefix.length) != oldPrefix) { + throw Error("string ".concat(JSON.stringify(string), " doesn't start with prefix ").concat(JSON.stringify(oldPrefix), "; this is a bug")); + } + return newPrefix + string.slice(oldPrefix.length); +} +function replaceSuffix(string, oldSuffix, newSuffix) { + if (!oldSuffix) { + return string + newSuffix; + } + if (string.slice(-oldSuffix.length) != oldSuffix) { + throw Error("string ".concat(JSON.stringify(string), " doesn't end with suffix ").concat(JSON.stringify(oldSuffix), "; this is a bug")); + } + return string.slice(0, -oldSuffix.length) + newSuffix; +} +function removePrefix(string, oldPrefix) { + return replacePrefix(string, oldPrefix, ''); +} +function removeSuffix(string, oldSuffix) { + return replaceSuffix(string, oldSuffix, ''); +} +function maximumOverlap(string1, string2) { + return string2.slice(0, overlapCount(string1, string2)); +} +// Nicked from https://stackoverflow.com/a/60422853/1709587 +function overlapCount(a, b) { + // Deal with cases where the strings differ in length + var startA = 0; + if (a.length > b.length) { + startA = a.length - b.length; + } + var endB = b.length; + if (a.length < b.length) { + endB = a.length; + } + // Create a back-reference for each index + // that should be followed in case of a mismatch. + // We only need B to make these references: + var map = Array(endB); + var k = 0; // Index that lags behind j + map[0] = 0; + for (var j = 1; j < endB; j++) { + if (b[j] == b[k]) { + map[j] = map[k]; // skip over the same character (optional optimisation) + } + else { + map[j] = k; + } + while (k > 0 && b[j] != b[k]) { + k = map[k]; + } + if (b[j] == b[k]) { + k++; + } + } + // Phase 2: use these references while iterating over A + k = 0; + for (var i = startA; i < a.length; i++) { + while (k > 0 && a[i] != b[k]) { + k = map[k]; + } + if (a[i] == b[k]) { + k++; + } + } + return k; +} +/** + * Returns true if the string consistently uses Windows line endings. + */ +function hasOnlyWinLineEndings(string) { + return string.includes('\r\n') && !string.startsWith('\n') && !string.match(/[^\r]\n/); +} +/** + * Returns true if the string consistently uses Unix line endings. + */ +function hasOnlyUnixLineEndings(string) { + return !string.includes('\r\n') && string.includes('\n'); +} +function trailingWs(string) { + // Yes, this looks overcomplicated and dumb - why not replace the whole function with + // return string match(/\s*$/)[0] + // you ask? Because: + // 1. the trap described at https://markamery.com/blog/quadratic-time-regexes/ would mean doing + // this would cause this function to take O(n²) time in the worst case (specifically when + // there is a massive run of NON-TRAILING whitespace in `string`), and + // 2. the fix proposed in the same blog post, of using a negative lookbehind, is incompatible + // with old Safari versions that we'd like to not break if possible (see + // https://github.com/kpdecker/jsdiff/pull/550) + // It feels absurd to do this with an explicit loop instead of a regex, but I really can't see a + // better way that doesn't result in broken behaviour. + var i; + for (i = string.length - 1; i >= 0; i--) { + if (!string[i].match(/\s/)) { + break; + } + } + return string.substring(i + 1); +} +function leadingWs(string) { + // Thankfully the annoying considerations described in trailingWs don't apply here: + var match = string.match(/^\s*/); + return match ? match[0] : ''; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/convert/dmp.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/convert/dmp.js new file mode 100644 index 0000000000000000000000000000000000000000..44d28414658871b0e5489960476d7bee27b1fe94 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/convert/dmp.js @@ -0,0 +1,21 @@ +/** + * converts a list of change objects to the format returned by Google's [diff-match-patch](https://github.com/google/diff-match-patch) library + */ +export function convertChangesToDMP(changes) { + const ret = []; + let change, operation; + for (let i = 0; i < changes.length; i++) { + change = changes[i]; + if (change.added) { + operation = 1; + } + else if (change.removed) { + operation = -1; + } + else { + operation = 0; + } + ret.push([operation, change.value]); + } + return ret; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/convert/xml.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/convert/xml.js new file mode 100644 index 0000000000000000000000000000000000000000..90ea8a2b8c667a4a3841b4cfcb4068fc146fcac7 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/convert/xml.js @@ -0,0 +1,31 @@ +/** + * converts a list of change objects to a serialized XML format + */ +export function convertChangesToXML(changes) { + const ret = []; + for (let i = 0; i < changes.length; i++) { + const change = changes[i]; + if (change.added) { + ret.push(''); + } + else if (change.removed) { + ret.push(''); + } + ret.push(escapeHTML(change.value)); + if (change.added) { + ret.push(''); + } + else if (change.removed) { + ret.push(''); + } + } + return ret.join(''); +} +function escapeHTML(s) { + let n = s; + n = n.replace(/&/g, '&'); + n = n.replace(//g, '>'); + n = n.replace(/"/g, '"'); + return n; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/diff/array.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/diff/array.js new file mode 100644 index 0000000000000000000000000000000000000000..d92aeb485682d95b2172f88b28771a53730734aa --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/diff/array.js @@ -0,0 +1,16 @@ +import Diff from './base.js'; +class ArrayDiff extends Diff { + tokenize(value) { + return value.slice(); + } + join(value) { + return value; + } + removeEmpty(value) { + return value; + } +} +export const arrayDiff = new ArrayDiff(); +export function diffArrays(oldArr, newArr, options) { + return arrayDiff.diff(oldArr, newArr, options); +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/diff/base.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/diff/base.js new file mode 100644 index 0000000000000000000000000000000000000000..6e492e1198b315c31dc2114002bb71a542a84a73 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/diff/base.js @@ -0,0 +1,253 @@ +export default class Diff { + diff(oldStr, newStr, + // Type below is not accurate/complete - see above for full possibilities - but it compiles + options = {}) { + let callback; + if (typeof options === 'function') { + callback = options; + options = {}; + } + else if ('callback' in options) { + callback = options.callback; + } + // Allow subclasses to massage the input prior to running + const oldString = this.castInput(oldStr, options); + const newString = this.castInput(newStr, options); + const oldTokens = this.removeEmpty(this.tokenize(oldString, options)); + const newTokens = this.removeEmpty(this.tokenize(newString, options)); + return this.diffWithOptionsObj(oldTokens, newTokens, options, callback); + } + diffWithOptionsObj(oldTokens, newTokens, options, callback) { + var _a; + const done = (value) => { + value = this.postProcess(value, options); + if (callback) { + setTimeout(function () { callback(value); }, 0); + return undefined; + } + else { + return value; + } + }; + const newLen = newTokens.length, oldLen = oldTokens.length; + let editLength = 1; + let maxEditLength = newLen + oldLen; + if (options.maxEditLength != null) { + maxEditLength = Math.min(maxEditLength, options.maxEditLength); + } + const maxExecutionTime = (_a = options.timeout) !== null && _a !== void 0 ? _a : Infinity; + const abortAfterTimestamp = Date.now() + maxExecutionTime; + const bestPath = [{ oldPos: -1, lastComponent: undefined }]; + // Seed editLength = 0, i.e. the content starts with the same values + let newPos = this.extractCommon(bestPath[0], newTokens, oldTokens, 0, options); + if (bestPath[0].oldPos + 1 >= oldLen && newPos + 1 >= newLen) { + // Identity per the equality and tokenizer + return done(this.buildValues(bestPath[0].lastComponent, newTokens, oldTokens)); + } + // Once we hit the right edge of the edit graph on some diagonal k, we can + // definitely reach the end of the edit graph in no more than k edits, so + // there's no point in considering any moves to diagonal k+1 any more (from + // which we're guaranteed to need at least k+1 more edits). + // Similarly, once we've reached the bottom of the edit graph, there's no + // point considering moves to lower diagonals. + // We record this fact by setting minDiagonalToConsider and + // maxDiagonalToConsider to some finite value once we've hit the edge of + // the edit graph. + // This optimization is not faithful to the original algorithm presented in + // Myers's paper, which instead pointlessly extends D-paths off the end of + // the edit graph - see page 7 of Myers's paper which notes this point + // explicitly and illustrates it with a diagram. This has major performance + // implications for some common scenarios. For instance, to compute a diff + // where the new text simply appends d characters on the end of the + // original text of length n, the true Myers algorithm will take O(n+d^2) + // time while this optimization needs only O(n+d) time. + let minDiagonalToConsider = -Infinity, maxDiagonalToConsider = Infinity; + // Main worker method. checks all permutations of a given edit length for acceptance. + const execEditLength = () => { + for (let diagonalPath = Math.max(minDiagonalToConsider, -editLength); diagonalPath <= Math.min(maxDiagonalToConsider, editLength); diagonalPath += 2) { + let basePath; + const removePath = bestPath[diagonalPath - 1], addPath = bestPath[diagonalPath + 1]; + if (removePath) { + // No one else is going to attempt to use this value, clear it + // @ts-expect-error - perf optimisation. This type-violating value will never be read. + bestPath[diagonalPath - 1] = undefined; + } + let canAdd = false; + if (addPath) { + // what newPos will be after we do an insertion: + const addPathNewPos = addPath.oldPos - diagonalPath; + canAdd = addPath && 0 <= addPathNewPos && addPathNewPos < newLen; + } + const canRemove = removePath && removePath.oldPos + 1 < oldLen; + if (!canAdd && !canRemove) { + // If this path is a terminal then prune + // @ts-expect-error - perf optimisation. This type-violating value will never be read. + bestPath[diagonalPath] = undefined; + continue; + } + // Select the diagonal that we want to branch from. We select the prior + // path whose position in the old string is the farthest from the origin + // and does not pass the bounds of the diff graph + if (!canRemove || (canAdd && removePath.oldPos < addPath.oldPos)) { + basePath = this.addToPath(addPath, true, false, 0, options); + } + else { + basePath = this.addToPath(removePath, false, true, 1, options); + } + newPos = this.extractCommon(basePath, newTokens, oldTokens, diagonalPath, options); + if (basePath.oldPos + 1 >= oldLen && newPos + 1 >= newLen) { + // If we have hit the end of both strings, then we are done + return done(this.buildValues(basePath.lastComponent, newTokens, oldTokens)) || true; + } + else { + bestPath[diagonalPath] = basePath; + if (basePath.oldPos + 1 >= oldLen) { + maxDiagonalToConsider = Math.min(maxDiagonalToConsider, diagonalPath - 1); + } + if (newPos + 1 >= newLen) { + minDiagonalToConsider = Math.max(minDiagonalToConsider, diagonalPath + 1); + } + } + } + editLength++; + }; + // Performs the length of edit iteration. Is a bit fugly as this has to support the + // sync and async mode which is never fun. Loops over execEditLength until a value + // is produced, or until the edit length exceeds options.maxEditLength (if given), + // in which case it will return undefined. + if (callback) { + (function exec() { + setTimeout(function () { + if (editLength > maxEditLength || Date.now() > abortAfterTimestamp) { + return callback(undefined); + } + if (!execEditLength()) { + exec(); + } + }, 0); + }()); + } + else { + while (editLength <= maxEditLength && Date.now() <= abortAfterTimestamp) { + const ret = execEditLength(); + if (ret) { + return ret; + } + } + } + } + addToPath(path, added, removed, oldPosInc, options) { + const last = path.lastComponent; + if (last && !options.oneChangePerToken && last.added === added && last.removed === removed) { + return { + oldPos: path.oldPos + oldPosInc, + lastComponent: { count: last.count + 1, added: added, removed: removed, previousComponent: last.previousComponent } + }; + } + else { + return { + oldPos: path.oldPos + oldPosInc, + lastComponent: { count: 1, added: added, removed: removed, previousComponent: last } + }; + } + } + extractCommon(basePath, newTokens, oldTokens, diagonalPath, options) { + const newLen = newTokens.length, oldLen = oldTokens.length; + let oldPos = basePath.oldPos, newPos = oldPos - diagonalPath, commonCount = 0; + while (newPos + 1 < newLen && oldPos + 1 < oldLen && this.equals(oldTokens[oldPos + 1], newTokens[newPos + 1], options)) { + newPos++; + oldPos++; + commonCount++; + if (options.oneChangePerToken) { + basePath.lastComponent = { count: 1, previousComponent: basePath.lastComponent, added: false, removed: false }; + } + } + if (commonCount && !options.oneChangePerToken) { + basePath.lastComponent = { count: commonCount, previousComponent: basePath.lastComponent, added: false, removed: false }; + } + basePath.oldPos = oldPos; + return newPos; + } + equals(left, right, options) { + if (options.comparator) { + return options.comparator(left, right); + } + else { + return left === right + || (!!options.ignoreCase && left.toLowerCase() === right.toLowerCase()); + } + } + removeEmpty(array) { + const ret = []; + for (let i = 0; i < array.length; i++) { + if (array[i]) { + ret.push(array[i]); + } + } + return ret; + } + // eslint-disable-next-line @typescript-eslint/no-unused-vars + castInput(value, options) { + return value; + } + // eslint-disable-next-line @typescript-eslint/no-unused-vars + tokenize(value, options) { + return Array.from(value); + } + join(chars) { + // Assumes ValueT is string, which is the case for most subclasses. + // When it's false, e.g. in diffArrays, this method needs to be overridden (e.g. with a no-op) + // Yes, the casts are verbose and ugly, because this pattern - of having the base class SORT OF + // assume tokens and values are strings, but not completely - is weird and janky. + return chars.join(''); + } + postProcess(changeObjects, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + options) { + return changeObjects; + } + get useLongestToken() { + return false; + } + buildValues(lastComponent, newTokens, oldTokens) { + // First we convert our linked list of components in reverse order to an + // array in the right order: + const components = []; + let nextComponent; + while (lastComponent) { + components.push(lastComponent); + nextComponent = lastComponent.previousComponent; + delete lastComponent.previousComponent; + lastComponent = nextComponent; + } + components.reverse(); + const componentLen = components.length; + let componentPos = 0, newPos = 0, oldPos = 0; + for (; componentPos < componentLen; componentPos++) { + const component = components[componentPos]; + if (!component.removed) { + if (!component.added && this.useLongestToken) { + let value = newTokens.slice(newPos, newPos + component.count); + value = value.map(function (value, i) { + const oldValue = oldTokens[oldPos + i]; + return oldValue.length > value.length ? oldValue : value; + }); + component.value = this.join(value); + } + else { + component.value = this.join(newTokens.slice(newPos, newPos + component.count)); + } + newPos += component.count; + // Common case + if (!component.added) { + oldPos += component.count; + } + } + else { + component.value = this.join(oldTokens.slice(oldPos, oldPos + component.count)); + oldPos += component.count; + } + } + return components; + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/diff/character.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/diff/character.js new file mode 100644 index 0000000000000000000000000000000000000000..ca70d065d37cb494e845687163be19f8c9f928d4 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/diff/character.js @@ -0,0 +1,7 @@ +import Diff from './base.js'; +class CharacterDiff extends Diff { +} +export const characterDiff = new CharacterDiff(); +export function diffChars(oldStr, newStr, options) { + return characterDiff.diff(oldStr, newStr, options); +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/diff/css.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/diff/css.js new file mode 100644 index 0000000000000000000000000000000000000000..2e7adcc3c2c3d31c10198bc2cbfbb30de99a386b --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/diff/css.js @@ -0,0 +1,10 @@ +import Diff from './base.js'; +class CssDiff extends Diff { + tokenize(value) { + return value.split(/([{}:;,]|\s+)/); + } +} +export const cssDiff = new CssDiff(); +export function diffCss(oldStr, newStr, options) { + return cssDiff.diff(oldStr, newStr, options); +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/diff/json.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/diff/json.js new file mode 100644 index 0000000000000000000000000000000000000000..be9f7617df9971aabeb439cd0269be76f74d382c --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/diff/json.js @@ -0,0 +1,78 @@ +import Diff from './base.js'; +import { tokenize } from './line.js'; +class JsonDiff extends Diff { + constructor() { + super(...arguments); + this.tokenize = tokenize; + } + get useLongestToken() { + // Discriminate between two lines of pretty-printed, serialized JSON where one of them has a + // dangling comma and the other doesn't. Turns out including the dangling comma yields the nicest output: + return true; + } + castInput(value, options) { + const { undefinedReplacement, stringifyReplacer = (k, v) => typeof v === 'undefined' ? undefinedReplacement : v } = options; + return typeof value === 'string' ? value : JSON.stringify(canonicalize(value, null, null, stringifyReplacer), null, ' '); + } + equals(left, right, options) { + return super.equals(left.replace(/,([\r\n])/g, '$1'), right.replace(/,([\r\n])/g, '$1'), options); + } +} +export const jsonDiff = new JsonDiff(); +export function diffJson(oldStr, newStr, options) { + return jsonDiff.diff(oldStr, newStr, options); +} +// This function handles the presence of circular references by bailing out when encountering an +// object that is already on the "stack" of items being processed. Accepts an optional replacer +export function canonicalize(obj, stack, replacementStack, replacer, key) { + stack = stack || []; + replacementStack = replacementStack || []; + if (replacer) { + obj = replacer(key === undefined ? '' : key, obj); + } + let i; + for (i = 0; i < stack.length; i += 1) { + if (stack[i] === obj) { + return replacementStack[i]; + } + } + let canonicalizedObj; + if ('[object Array]' === Object.prototype.toString.call(obj)) { + stack.push(obj); + canonicalizedObj = new Array(obj.length); + replacementStack.push(canonicalizedObj); + for (i = 0; i < obj.length; i += 1) { + canonicalizedObj[i] = canonicalize(obj[i], stack, replacementStack, replacer, String(i)); + } + stack.pop(); + replacementStack.pop(); + return canonicalizedObj; + } + if (obj && obj.toJSON) { + obj = obj.toJSON(); + } + if (typeof obj === 'object' && obj !== null) { + stack.push(obj); + canonicalizedObj = {}; + replacementStack.push(canonicalizedObj); + const sortedKeys = []; + let key; + for (key in obj) { + /* istanbul ignore else */ + if (Object.prototype.hasOwnProperty.call(obj, key)) { + sortedKeys.push(key); + } + } + sortedKeys.sort(); + for (i = 0; i < sortedKeys.length; i += 1) { + key = sortedKeys[i]; + canonicalizedObj[key] = canonicalize(obj[key], stack, replacementStack, replacer, key); + } + stack.pop(); + replacementStack.pop(); + } + else { + canonicalizedObj = obj; + } + return canonicalizedObj; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/diff/line.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/diff/line.js new file mode 100644 index 0000000000000000000000000000000000000000..0675d4fb003f93cef5bd4d17398370017915a6bf --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/diff/line.js @@ -0,0 +1,65 @@ +import Diff from './base.js'; +import { generateOptions } from '../util/params.js'; +class LineDiff extends Diff { + constructor() { + super(...arguments); + this.tokenize = tokenize; + } + equals(left, right, options) { + // If we're ignoring whitespace, we need to normalise lines by stripping + // whitespace before checking equality. (This has an annoying interaction + // with newlineIsToken that requires special handling: if newlines get their + // own token, then we DON'T want to trim the *newline* tokens down to empty + // strings, since this would cause us to treat whitespace-only line content + // as equal to a separator between lines, which would be weird and + // inconsistent with the documented behavior of the options.) + if (options.ignoreWhitespace) { + if (!options.newlineIsToken || !left.includes('\n')) { + left = left.trim(); + } + if (!options.newlineIsToken || !right.includes('\n')) { + right = right.trim(); + } + } + else if (options.ignoreNewlineAtEof && !options.newlineIsToken) { + if (left.endsWith('\n')) { + left = left.slice(0, -1); + } + if (right.endsWith('\n')) { + right = right.slice(0, -1); + } + } + return super.equals(left, right, options); + } +} +export const lineDiff = new LineDiff(); +export function diffLines(oldStr, newStr, options) { + return lineDiff.diff(oldStr, newStr, options); +} +export function diffTrimmedLines(oldStr, newStr, options) { + options = generateOptions(options, { ignoreWhitespace: true }); + return lineDiff.diff(oldStr, newStr, options); +} +// Exported standalone so it can be used from jsonDiff too. +export function tokenize(value, options) { + if (options.stripTrailingCr) { + // remove one \r before \n to match GNU diff's --strip-trailing-cr behavior + value = value.replace(/\r\n/g, '\n'); + } + const retLines = [], linesAndNewlines = value.split(/(\n|\r\n)/); + // Ignore the final empty token that occurs if the string ends with a new line + if (!linesAndNewlines[linesAndNewlines.length - 1]) { + linesAndNewlines.pop(); + } + // Merge the content and line separators into single tokens + for (let i = 0; i < linesAndNewlines.length; i++) { + const line = linesAndNewlines[i]; + if (i % 2 && !options.newlineIsToken) { + retLines[retLines.length - 1] += line; + } + else { + retLines.push(line); + } + } + return retLines; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/diff/sentence.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/diff/sentence.js new file mode 100644 index 0000000000000000000000000000000000000000..db37010ef647276126576b145991e43046e73710 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/diff/sentence.js @@ -0,0 +1,43 @@ +import Diff from './base.js'; +function isSentenceEndPunct(char) { + return char == '.' || char == '!' || char == '?'; +} +class SentenceDiff extends Diff { + tokenize(value) { + var _a; + // If in future we drop support for environments that don't support lookbehinds, we can replace + // this entire function with: + // return value.split(/(?<=[.!?])(\s+|$)/); + // but until then, for similar reasons to the trailingWs function in string.ts, we are forced + // to do this verbosely "by hand" instead of using a regex. + const result = []; + let tokenStartI = 0; + for (let i = 0; i < value.length; i++) { + if (i == value.length - 1) { + result.push(value.slice(tokenStartI)); + break; + } + if (isSentenceEndPunct(value[i]) && value[i + 1].match(/\s/)) { + // We've hit a sentence break - i.e. a punctuation mark followed by whitespace. + // We now want to push TWO tokens to the result: + // 1. the sentence + result.push(value.slice(tokenStartI, i + 1)); + // 2. the whitespace + i = tokenStartI = i + 1; + while ((_a = value[i + 1]) === null || _a === void 0 ? void 0 : _a.match(/\s/)) { + i++; + } + result.push(value.slice(tokenStartI, i + 1)); + // Then the next token (a sentence) starts on the character after the whitespace. + // (It's okay if this is off the end of the string - then the outer loop will terminate + // here anyway.) + tokenStartI = i + 1; + } + } + return result; + } +} +export const sentenceDiff = new SentenceDiff(); +export function diffSentences(oldStr, newStr, options) { + return sentenceDiff.diff(oldStr, newStr, options); +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/diff/word.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/diff/word.js new file mode 100644 index 0000000000000000000000000000000000000000..5f8e03a09283ee68116602151c698a25eb094154 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/diff/word.js @@ -0,0 +1,276 @@ +import Diff from './base.js'; +import { longestCommonPrefix, longestCommonSuffix, replacePrefix, replaceSuffix, removePrefix, removeSuffix, maximumOverlap, leadingWs, trailingWs } from '../util/string.js'; +// Based on https://en.wikipedia.org/wiki/Latin_script_in_Unicode +// +// Ranges and exceptions: +// Latin-1 Supplement, 0080–00FF +// - U+00D7 × Multiplication sign +// - U+00F7 ÷ Division sign +// Latin Extended-A, 0100–017F +// Latin Extended-B, 0180–024F +// IPA Extensions, 0250–02AF +// Spacing Modifier Letters, 02B0–02FF +// - U+02C7 ˇ ˇ Caron +// - U+02D8 ˘ ˘ Breve +// - U+02D9 ˙ ˙ Dot Above +// - U+02DA ˚ ˚ Ring Above +// - U+02DB ˛ ˛ Ogonek +// - U+02DC ˜ ˜ Small Tilde +// - U+02DD ˝ ˝ Double Acute Accent +// Latin Extended Additional, 1E00–1EFF +const extendedWordChars = 'a-zA-Z0-9_\\u{C0}-\\u{FF}\\u{D8}-\\u{F6}\\u{F8}-\\u{2C6}\\u{2C8}-\\u{2D7}\\u{2DE}-\\u{2FF}\\u{1E00}-\\u{1EFF}'; +// Each token is one of the following: +// - A punctuation mark plus the surrounding whitespace +// - A word plus the surrounding whitespace +// - Pure whitespace (but only in the special case where this the entire text +// is just whitespace) +// +// We have to include surrounding whitespace in the tokens because the two +// alternative approaches produce horribly broken results: +// * If we just discard the whitespace, we can't fully reproduce the original +// text from the sequence of tokens and any attempt to render the diff will +// get the whitespace wrong. +// * If we have separate tokens for whitespace, then in a typical text every +// second token will be a single space character. But this often results in +// the optimal diff between two texts being a perverse one that preserves +// the spaces between words but deletes and reinserts actual common words. +// See https://github.com/kpdecker/jsdiff/issues/160#issuecomment-1866099640 +// for an example. +// +// Keeping the surrounding whitespace of course has implications for .equals +// and .join, not just .tokenize. +// This regex does NOT fully implement the tokenization rules described above. +// Instead, it gives runs of whitespace their own "token". The tokenize method +// then handles stitching whitespace tokens onto adjacent word or punctuation +// tokens. +const tokenizeIncludingWhitespace = new RegExp(`[${extendedWordChars}]+|\\s+|[^${extendedWordChars}]`, 'ug'); +class WordDiff extends Diff { + equals(left, right, options) { + if (options.ignoreCase) { + left = left.toLowerCase(); + right = right.toLowerCase(); + } + return left.trim() === right.trim(); + } + tokenize(value, options = {}) { + let parts; + if (options.intlSegmenter) { + const segmenter = options.intlSegmenter; + if (segmenter.resolvedOptions().granularity != 'word') { + throw new Error('The segmenter passed must have a granularity of "word"'); + } + parts = Array.from(segmenter.segment(value), segment => segment.segment); + } + else { + parts = value.match(tokenizeIncludingWhitespace) || []; + } + const tokens = []; + let prevPart = null; + parts.forEach(part => { + if ((/\s/).test(part)) { + if (prevPart == null) { + tokens.push(part); + } + else { + tokens.push(tokens.pop() + part); + } + } + else if (prevPart != null && (/\s/).test(prevPart)) { + if (tokens[tokens.length - 1] == prevPart) { + tokens.push(tokens.pop() + part); + } + else { + tokens.push(prevPart + part); + } + } + else { + tokens.push(part); + } + prevPart = part; + }); + return tokens; + } + join(tokens) { + // Tokens being joined here will always have appeared consecutively in the + // same text, so we can simply strip off the leading whitespace from all the + // tokens except the first (and except any whitespace-only tokens - but such + // a token will always be the first and only token anyway) and then join them + // and the whitespace around words and punctuation will end up correct. + return tokens.map((token, i) => { + if (i == 0) { + return token; + } + else { + return token.replace((/^\s+/), ''); + } + }).join(''); + } + postProcess(changes, options) { + if (!changes || options.oneChangePerToken) { + return changes; + } + let lastKeep = null; + // Change objects representing any insertion or deletion since the last + // "keep" change object. There can be at most one of each. + let insertion = null; + let deletion = null; + changes.forEach(change => { + if (change.added) { + insertion = change; + } + else if (change.removed) { + deletion = change; + } + else { + if (insertion || deletion) { // May be false at start of text + dedupeWhitespaceInChangeObjects(lastKeep, deletion, insertion, change); + } + lastKeep = change; + insertion = null; + deletion = null; + } + }); + if (insertion || deletion) { + dedupeWhitespaceInChangeObjects(lastKeep, deletion, insertion, null); + } + return changes; + } +} +export const wordDiff = new WordDiff(); +export function diffWords(oldStr, newStr, options) { + // This option has never been documented and never will be (it's clearer to + // just call `diffWordsWithSpace` directly if you need that behavior), but + // has existed in jsdiff for a long time, so we retain support for it here + // for the sake of backwards compatibility. + if ((options === null || options === void 0 ? void 0 : options.ignoreWhitespace) != null && !options.ignoreWhitespace) { + return diffWordsWithSpace(oldStr, newStr, options); + } + return wordDiff.diff(oldStr, newStr, options); +} +function dedupeWhitespaceInChangeObjects(startKeep, deletion, insertion, endKeep) { + // Before returning, we tidy up the leading and trailing whitespace of the + // change objects to eliminate cases where trailing whitespace in one object + // is repeated as leading whitespace in the next. + // Below are examples of the outcomes we want here to explain the code. + // I=insert, K=keep, D=delete + // 1. diffing 'foo bar baz' vs 'foo baz' + // Prior to cleanup, we have K:'foo ' D:' bar ' K:' baz' + // After cleanup, we want: K:'foo ' D:'bar ' K:'baz' + // + // 2. Diffing 'foo bar baz' vs 'foo qux baz' + // Prior to cleanup, we have K:'foo ' D:' bar ' I:' qux ' K:' baz' + // After cleanup, we want K:'foo ' D:'bar' I:'qux' K:' baz' + // + // 3. Diffing 'foo\nbar baz' vs 'foo baz' + // Prior to cleanup, we have K:'foo ' D:'\nbar ' K:' baz' + // After cleanup, we want K'foo' D:'\nbar' K:' baz' + // + // 4. Diffing 'foo baz' vs 'foo\nbar baz' + // Prior to cleanup, we have K:'foo\n' I:'\nbar ' K:' baz' + // After cleanup, we ideally want K'foo' I:'\nbar' K:' baz' + // but don't actually manage this currently (the pre-cleanup change + // objects don't contain enough information to make it possible). + // + // 5. Diffing 'foo bar baz' vs 'foo baz' + // Prior to cleanup, we have K:'foo ' D:' bar ' K:' baz' + // After cleanup, we want K:'foo ' D:' bar ' K:'baz' + // + // Our handling is unavoidably imperfect in the case where there's a single + // indel between keeps and the whitespace has changed. For instance, consider + // diffing 'foo\tbar\nbaz' vs 'foo baz'. Unless we create an extra change + // object to represent the insertion of the space character (which isn't even + // a token), we have no way to avoid losing information about the texts' + // original whitespace in the result we return. Still, we do our best to + // output something that will look sensible if we e.g. print it with + // insertions in green and deletions in red. + // Between two "keep" change objects (or before the first or after the last + // change object), we can have either: + // * A "delete" followed by an "insert" + // * Just an "insert" + // * Just a "delete" + // We handle the three cases separately. + if (deletion && insertion) { + const oldWsPrefix = leadingWs(deletion.value); + const oldWsSuffix = trailingWs(deletion.value); + const newWsPrefix = leadingWs(insertion.value); + const newWsSuffix = trailingWs(insertion.value); + if (startKeep) { + const commonWsPrefix = longestCommonPrefix(oldWsPrefix, newWsPrefix); + startKeep.value = replaceSuffix(startKeep.value, newWsPrefix, commonWsPrefix); + deletion.value = removePrefix(deletion.value, commonWsPrefix); + insertion.value = removePrefix(insertion.value, commonWsPrefix); + } + if (endKeep) { + const commonWsSuffix = longestCommonSuffix(oldWsSuffix, newWsSuffix); + endKeep.value = replacePrefix(endKeep.value, newWsSuffix, commonWsSuffix); + deletion.value = removeSuffix(deletion.value, commonWsSuffix); + insertion.value = removeSuffix(insertion.value, commonWsSuffix); + } + } + else if (insertion) { + // The whitespaces all reflect what was in the new text rather than + // the old, so we essentially have no information about whitespace + // insertion or deletion. We just want to dedupe the whitespace. + // We do that by having each change object keep its trailing + // whitespace and deleting duplicate leading whitespace where + // present. + if (startKeep) { + const ws = leadingWs(insertion.value); + insertion.value = insertion.value.substring(ws.length); + } + if (endKeep) { + const ws = leadingWs(endKeep.value); + endKeep.value = endKeep.value.substring(ws.length); + } + // otherwise we've got a deletion and no insertion + } + else if (startKeep && endKeep) { + const newWsFull = leadingWs(endKeep.value), delWsStart = leadingWs(deletion.value), delWsEnd = trailingWs(deletion.value); + // Any whitespace that comes straight after startKeep in both the old and + // new texts, assign to startKeep and remove from the deletion. + const newWsStart = longestCommonPrefix(newWsFull, delWsStart); + deletion.value = removePrefix(deletion.value, newWsStart); + // Any whitespace that comes straight before endKeep in both the old and + // new texts, and hasn't already been assigned to startKeep, assign to + // endKeep and remove from the deletion. + const newWsEnd = longestCommonSuffix(removePrefix(newWsFull, newWsStart), delWsEnd); + deletion.value = removeSuffix(deletion.value, newWsEnd); + endKeep.value = replacePrefix(endKeep.value, newWsFull, newWsEnd); + // If there's any whitespace from the new text that HASN'T already been + // assigned, assign it to the start: + startKeep.value = replaceSuffix(startKeep.value, newWsFull, newWsFull.slice(0, newWsFull.length - newWsEnd.length)); + } + else if (endKeep) { + // We are at the start of the text. Preserve all the whitespace on + // endKeep, and just remove whitespace from the end of deletion to the + // extent that it overlaps with the start of endKeep. + const endKeepWsPrefix = leadingWs(endKeep.value); + const deletionWsSuffix = trailingWs(deletion.value); + const overlap = maximumOverlap(deletionWsSuffix, endKeepWsPrefix); + deletion.value = removeSuffix(deletion.value, overlap); + } + else if (startKeep) { + // We are at the END of the text. Preserve all the whitespace on + // startKeep, and just remove whitespace from the start of deletion to + // the extent that it overlaps with the end of startKeep. + const startKeepWsSuffix = trailingWs(startKeep.value); + const deletionWsPrefix = leadingWs(deletion.value); + const overlap = maximumOverlap(startKeepWsSuffix, deletionWsPrefix); + deletion.value = removePrefix(deletion.value, overlap); + } +} +class WordsWithSpaceDiff extends Diff { + tokenize(value) { + // Slightly different to the tokenizeIncludingWhitespace regex used above in + // that this one treats each individual newline as a distinct tokens, rather + // than merging them into other surrounding whitespace. This was requested + // in https://github.com/kpdecker/jsdiff/issues/180 & + // https://github.com/kpdecker/jsdiff/issues/211 + const regex = new RegExp(`(\\r?\\n)|[${extendedWordChars}]+|[^\\S\\n\\r]+|[^${extendedWordChars}]`, 'ug'); + return value.match(regex) || []; + } +} +export const wordsWithSpaceDiff = new WordsWithSpaceDiff(); +export function diffWordsWithSpace(oldStr, newStr, options) { + return wordsWithSpaceDiff.diff(oldStr, newStr, options); +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/index.js new file mode 100644 index 0000000000000000000000000000000000000000..48c8a7af6a41208d933736432dadc283c4c7191d --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/index.js @@ -0,0 +1,30 @@ +/* See LICENSE file for terms of use */ +/* + * Text diff implementation. + * + * This library supports the following APIs: + * Diff.diffChars: Character by character diff + * Diff.diffWords: Word (as defined by \b regex) diff which ignores whitespace + * Diff.diffLines: Line based diff + * + * Diff.diffCss: Diff targeted at CSS content + * + * These methods are based on the implementation proposed in + * "An O(ND) Difference Algorithm and its Variations" (Myers, 1986). + * http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.4.6927 + */ +import Diff from './diff/base.js'; +import { diffChars, characterDiff } from './diff/character.js'; +import { diffWords, diffWordsWithSpace, wordDiff, wordsWithSpaceDiff } from './diff/word.js'; +import { diffLines, diffTrimmedLines, lineDiff } from './diff/line.js'; +import { diffSentences, sentenceDiff } from './diff/sentence.js'; +import { diffCss, cssDiff } from './diff/css.js'; +import { diffJson, canonicalize, jsonDiff } from './diff/json.js'; +import { diffArrays, arrayDiff } from './diff/array.js'; +import { applyPatch, applyPatches } from './patch/apply.js'; +import { parsePatch } from './patch/parse.js'; +import { reversePatch } from './patch/reverse.js'; +import { structuredPatch, createTwoFilesPatch, createPatch, formatPatch } from './patch/create.js'; +import { convertChangesToDMP } from './convert/dmp.js'; +import { convertChangesToXML } from './convert/xml.js'; +export { Diff, diffChars, characterDiff, diffWords, wordDiff, diffWordsWithSpace, wordsWithSpaceDiff, diffLines, lineDiff, diffTrimmedLines, diffSentences, sentenceDiff, diffCss, cssDiff, diffJson, jsonDiff, diffArrays, arrayDiff, structuredPatch, createTwoFilesPatch, createPatch, formatPatch, applyPatch, applyPatches, parsePatch, reversePatch, convertChangesToDMP, convertChangesToXML, canonicalize }; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/package.json new file mode 100644 index 0000000000000000000000000000000000000000..2bd6e5099f38c65ce6910714a12c91eaed9514a5 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/package.json @@ -0,0 +1 @@ +{"type":"module","sideEffects":false} \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/patch/apply.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/patch/apply.js new file mode 100644 index 0000000000000000000000000000000000000000..fe2e8db5c465d27796c0a76d71e6bb847168cb6f --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/patch/apply.js @@ -0,0 +1,257 @@ +import { hasOnlyWinLineEndings, hasOnlyUnixLineEndings } from '../util/string.js'; +import { isWin, isUnix, unixToWin, winToUnix } from './line-endings.js'; +import { parsePatch } from './parse.js'; +import distanceIterator from '../util/distance-iterator.js'; +/** + * attempts to apply a unified diff patch. + * + * Hunks are applied first to last. + * `applyPatch` first tries to apply the first hunk at the line number specified in the hunk header, and with all context lines matching exactly. + * If that fails, it tries scanning backwards and forwards, one line at a time, to find a place to apply the hunk where the context lines match exactly. + * If that still fails, and `fuzzFactor` is greater than zero, it increments the maximum number of mismatches (missing, extra, or changed context lines) that there can be between the hunk context and a region where we are trying to apply the patch such that the hunk will still be considered to match. + * Regardless of `fuzzFactor`, lines to be deleted in the hunk *must* be present for a hunk to match, and the context lines *immediately* before and after an insertion must match exactly. + * + * Once a hunk is successfully fitted, the process begins again with the next hunk. + * Regardless of `fuzzFactor`, later hunks must be applied later in the file than earlier hunks. + * + * If a hunk cannot be successfully fitted *anywhere* with fewer than `fuzzFactor` mismatches, `applyPatch` fails and returns `false`. + * + * If a hunk is successfully fitted but not at the line number specified by the hunk header, all subsequent hunks have their target line number adjusted accordingly. + * (e.g. if the first hunk is applied 10 lines below where the hunk header said it should fit, `applyPatch` will *start* looking for somewhere to apply the second hunk 10 lines below where its hunk header says it goes.) + * + * If the patch was applied successfully, returns a string containing the patched text. + * If the patch could not be applied (because some hunks in the patch couldn't be fitted to the text in `source`), `applyPatch` returns false. + * + * @param patch a string diff or the output from the `parsePatch` or `structuredPatch` methods. + */ +export function applyPatch(source, patch, options = {}) { + let patches; + if (typeof patch === 'string') { + patches = parsePatch(patch); + } + else if (Array.isArray(patch)) { + patches = patch; + } + else { + patches = [patch]; + } + if (patches.length > 1) { + throw new Error('applyPatch only works with a single input.'); + } + return applyStructuredPatch(source, patches[0], options); +} +function applyStructuredPatch(source, patch, options = {}) { + if (options.autoConvertLineEndings || options.autoConvertLineEndings == null) { + if (hasOnlyWinLineEndings(source) && isUnix(patch)) { + patch = unixToWin(patch); + } + else if (hasOnlyUnixLineEndings(source) && isWin(patch)) { + patch = winToUnix(patch); + } + } + // Apply the diff to the input + const lines = source.split('\n'), hunks = patch.hunks, compareLine = options.compareLine || ((lineNumber, line, operation, patchContent) => line === patchContent), fuzzFactor = options.fuzzFactor || 0; + let minLine = 0; + if (fuzzFactor < 0 || !Number.isInteger(fuzzFactor)) { + throw new Error('fuzzFactor must be a non-negative integer'); + } + // Special case for empty patch. + if (!hunks.length) { + return source; + } + // Before anything else, handle EOFNL insertion/removal. If the patch tells us to make a change + // to the EOFNL that is redundant/impossible - i.e. to remove a newline that's not there, or add a + // newline that already exists - then we either return false and fail to apply the patch (if + // fuzzFactor is 0) or simply ignore the problem and do nothing (if fuzzFactor is >0). + // If we do need to remove/add a newline at EOF, this will always be in the final hunk: + let prevLine = '', removeEOFNL = false, addEOFNL = false; + for (let i = 0; i < hunks[hunks.length - 1].lines.length; i++) { + const line = hunks[hunks.length - 1].lines[i]; + if (line[0] == '\\') { + if (prevLine[0] == '+') { + removeEOFNL = true; + } + else if (prevLine[0] == '-') { + addEOFNL = true; + } + } + prevLine = line; + } + if (removeEOFNL) { + if (addEOFNL) { + // This means the final line gets changed but doesn't have a trailing newline in either the + // original or patched version. In that case, we do nothing if fuzzFactor > 0, and if + // fuzzFactor is 0, we simply validate that the source file has no trailing newline. + if (!fuzzFactor && lines[lines.length - 1] == '') { + return false; + } + } + else if (lines[lines.length - 1] == '') { + lines.pop(); + } + else if (!fuzzFactor) { + return false; + } + } + else if (addEOFNL) { + if (lines[lines.length - 1] != '') { + lines.push(''); + } + else if (!fuzzFactor) { + return false; + } + } + /** + * Checks if the hunk can be made to fit at the provided location with at most `maxErrors` + * insertions, substitutions, or deletions, while ensuring also that: + * - lines deleted in the hunk match exactly, and + * - wherever an insertion operation or block of insertion operations appears in the hunk, the + * immediately preceding and following lines of context match exactly + * + * `toPos` should be set such that lines[toPos] is meant to match hunkLines[0]. + * + * If the hunk can be applied, returns an object with properties `oldLineLastI` and + * `replacementLines`. Otherwise, returns null. + */ + function applyHunk(hunkLines, toPos, maxErrors, hunkLinesI = 0, lastContextLineMatched = true, patchedLines = [], patchedLinesLength = 0) { + let nConsecutiveOldContextLines = 0; + let nextContextLineMustMatch = false; + for (; hunkLinesI < hunkLines.length; hunkLinesI++) { + const hunkLine = hunkLines[hunkLinesI], operation = (hunkLine.length > 0 ? hunkLine[0] : ' '), content = (hunkLine.length > 0 ? hunkLine.substr(1) : hunkLine); + if (operation === '-') { + if (compareLine(toPos + 1, lines[toPos], operation, content)) { + toPos++; + nConsecutiveOldContextLines = 0; + } + else { + if (!maxErrors || lines[toPos] == null) { + return null; + } + patchedLines[patchedLinesLength] = lines[toPos]; + return applyHunk(hunkLines, toPos + 1, maxErrors - 1, hunkLinesI, false, patchedLines, patchedLinesLength + 1); + } + } + if (operation === '+') { + if (!lastContextLineMatched) { + return null; + } + patchedLines[patchedLinesLength] = content; + patchedLinesLength++; + nConsecutiveOldContextLines = 0; + nextContextLineMustMatch = true; + } + if (operation === ' ') { + nConsecutiveOldContextLines++; + patchedLines[patchedLinesLength] = lines[toPos]; + if (compareLine(toPos + 1, lines[toPos], operation, content)) { + patchedLinesLength++; + lastContextLineMatched = true; + nextContextLineMustMatch = false; + toPos++; + } + else { + if (nextContextLineMustMatch || !maxErrors) { + return null; + } + // Consider 3 possibilities in sequence: + // 1. lines contains a *substitution* not included in the patch context, or + // 2. lines contains an *insertion* not included in the patch context, or + // 3. lines contains a *deletion* not included in the patch context + // The first two options are of course only possible if the line from lines is non-null - + // i.e. only option 3 is possible if we've overrun the end of the old file. + return (lines[toPos] && (applyHunk(hunkLines, toPos + 1, maxErrors - 1, hunkLinesI + 1, false, patchedLines, patchedLinesLength + 1) || applyHunk(hunkLines, toPos + 1, maxErrors - 1, hunkLinesI, false, patchedLines, patchedLinesLength + 1)) || applyHunk(hunkLines, toPos, maxErrors - 1, hunkLinesI + 1, false, patchedLines, patchedLinesLength)); + } + } + } + // Before returning, trim any unmodified context lines off the end of patchedLines and reduce + // toPos (and thus oldLineLastI) accordingly. This allows later hunks to be applied to a region + // that starts in this hunk's trailing context. + patchedLinesLength -= nConsecutiveOldContextLines; + toPos -= nConsecutiveOldContextLines; + patchedLines.length = patchedLinesLength; + return { + patchedLines, + oldLineLastI: toPos - 1 + }; + } + const resultLines = []; + // Search best fit offsets for each hunk based on the previous ones + let prevHunkOffset = 0; + for (let i = 0; i < hunks.length; i++) { + const hunk = hunks[i]; + let hunkResult; + const maxLine = lines.length - hunk.oldLines + fuzzFactor; + let toPos; + for (let maxErrors = 0; maxErrors <= fuzzFactor; maxErrors++) { + toPos = hunk.oldStart + prevHunkOffset - 1; + const iterator = distanceIterator(toPos, minLine, maxLine); + for (; toPos !== undefined; toPos = iterator()) { + hunkResult = applyHunk(hunk.lines, toPos, maxErrors); + if (hunkResult) { + break; + } + } + if (hunkResult) { + break; + } + } + if (!hunkResult) { + return false; + } + // Copy everything from the end of where we applied the last hunk to the start of this hunk + for (let i = minLine; i < toPos; i++) { + resultLines.push(lines[i]); + } + // Add the lines produced by applying the hunk: + for (let i = 0; i < hunkResult.patchedLines.length; i++) { + const line = hunkResult.patchedLines[i]; + resultLines.push(line); + } + // Set lower text limit to end of the current hunk, so next ones don't try + // to fit over already patched text + minLine = hunkResult.oldLineLastI + 1; + // Note the offset between where the patch said the hunk should've applied and where we + // applied it, so we can adjust future hunks accordingly: + prevHunkOffset = toPos + 1 - hunk.oldStart; + } + // Copy over the rest of the lines from the old text + for (let i = minLine; i < lines.length; i++) { + resultLines.push(lines[i]); + } + return resultLines.join('\n'); +} +/** + * applies one or more patches. + * + * `patch` may be either an array of structured patch objects, or a string representing a patch in unified diff format (which may patch one or more files). + * + * This method will iterate over the contents of the patch and apply to data provided through callbacks. The general flow for each patch index is: + * + * - `options.loadFile(index, callback)` is called. The caller should then load the contents of the file and then pass that to the `callback(err, data)` callback. Passing an `err` will terminate further patch execution. + * - `options.patched(index, content, callback)` is called once the patch has been applied. `content` will be the return value from `applyPatch`. When it's ready, the caller should call `callback(err)` callback. Passing an `err` will terminate further patch execution. + * + * Once all patches have been applied or an error occurs, the `options.complete(err)` callback is made. + */ +export function applyPatches(uniDiff, options) { + const spDiff = typeof uniDiff === 'string' ? parsePatch(uniDiff) : uniDiff; + let currentIndex = 0; + function processIndex() { + const index = spDiff[currentIndex++]; + if (!index) { + return options.complete(); + } + options.loadFile(index, function (err, data) { + if (err) { + return options.complete(err); + } + const updatedContent = applyPatch(data, index, options); + options.patched(index, updatedContent, function (err) { + if (err) { + return options.complete(err); + } + processIndex(); + }); + }); + } + processIndex(); +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/patch/create.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/patch/create.js new file mode 100644 index 0000000000000000000000000000000000000000..7019c3c5ec46e7c1fc0012bb567fea0f62374558 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/patch/create.js @@ -0,0 +1,201 @@ +import { diffLines } from '../diff/line.js'; +export function structuredPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options) { + let optionsObj; + if (!options) { + optionsObj = {}; + } + else if (typeof options === 'function') { + optionsObj = { callback: options }; + } + else { + optionsObj = options; + } + if (typeof optionsObj.context === 'undefined') { + optionsObj.context = 4; + } + // We copy this into its own variable to placate TypeScript, which thinks + // optionsObj.context might be undefined in the callbacks below. + const context = optionsObj.context; + // @ts-expect-error (runtime check for something that is correctly a static type error) + if (optionsObj.newlineIsToken) { + throw new Error('newlineIsToken may not be used with patch-generation functions, only with diffing functions'); + } + if (!optionsObj.callback) { + return diffLinesResultToPatch(diffLines(oldStr, newStr, optionsObj)); + } + else { + const { callback } = optionsObj; + diffLines(oldStr, newStr, Object.assign(Object.assign({}, optionsObj), { callback: (diff) => { + const patch = diffLinesResultToPatch(diff); + // TypeScript is unhappy without the cast because it does not understand that `patch` may + // be undefined here only if `callback` is StructuredPatchCallbackAbortable: + callback(patch); + } })); + } + function diffLinesResultToPatch(diff) { + // STEP 1: Build up the patch with no "\ No newline at end of file" lines and with the arrays + // of lines containing trailing newline characters. We'll tidy up later... + if (!diff) { + return; + } + diff.push({ value: '', lines: [] }); // Append an empty value to make cleanup easier + function contextLines(lines) { + return lines.map(function (entry) { return ' ' + entry; }); + } + const hunks = []; + let oldRangeStart = 0, newRangeStart = 0, curRange = [], oldLine = 1, newLine = 1; + for (let i = 0; i < diff.length; i++) { + const current = diff[i], lines = current.lines || splitLines(current.value); + current.lines = lines; + if (current.added || current.removed) { + // If we have previous context, start with that + if (!oldRangeStart) { + const prev = diff[i - 1]; + oldRangeStart = oldLine; + newRangeStart = newLine; + if (prev) { + curRange = context > 0 ? contextLines(prev.lines.slice(-context)) : []; + oldRangeStart -= curRange.length; + newRangeStart -= curRange.length; + } + } + // Output our changes + for (const line of lines) { + curRange.push((current.added ? '+' : '-') + line); + } + // Track the updated file position + if (current.added) { + newLine += lines.length; + } + else { + oldLine += lines.length; + } + } + else { + // Identical context lines. Track line changes + if (oldRangeStart) { + // Close out any changes that have been output (or join overlapping) + if (lines.length <= context * 2 && i < diff.length - 2) { + // Overlapping + for (const line of contextLines(lines)) { + curRange.push(line); + } + } + else { + // end the range and output + const contextSize = Math.min(lines.length, context); + for (const line of contextLines(lines.slice(0, contextSize))) { + curRange.push(line); + } + const hunk = { + oldStart: oldRangeStart, + oldLines: (oldLine - oldRangeStart + contextSize), + newStart: newRangeStart, + newLines: (newLine - newRangeStart + contextSize), + lines: curRange + }; + hunks.push(hunk); + oldRangeStart = 0; + newRangeStart = 0; + curRange = []; + } + } + oldLine += lines.length; + newLine += lines.length; + } + } + // Step 2: eliminate the trailing `\n` from each line of each hunk, and, where needed, add + // "\ No newline at end of file". + for (const hunk of hunks) { + for (let i = 0; i < hunk.lines.length; i++) { + if (hunk.lines[i].endsWith('\n')) { + hunk.lines[i] = hunk.lines[i].slice(0, -1); + } + else { + hunk.lines.splice(i + 1, 0, '\\ No newline at end of file'); + i++; // Skip the line we just added, then continue iterating + } + } + } + return { + oldFileName: oldFileName, newFileName: newFileName, + oldHeader: oldHeader, newHeader: newHeader, + hunks: hunks + }; + } +} +/** + * creates a unified diff patch. + * @param patch either a single structured patch object (as returned by `structuredPatch`) or an array of them (as returned by `parsePatch`) + */ +export function formatPatch(patch) { + if (Array.isArray(patch)) { + return patch.map(formatPatch).join('\n'); + } + const ret = []; + if (patch.oldFileName == patch.newFileName) { + ret.push('Index: ' + patch.oldFileName); + } + ret.push('==================================================================='); + ret.push('--- ' + patch.oldFileName + (typeof patch.oldHeader === 'undefined' ? '' : '\t' + patch.oldHeader)); + ret.push('+++ ' + patch.newFileName + (typeof patch.newHeader === 'undefined' ? '' : '\t' + patch.newHeader)); + for (let i = 0; i < patch.hunks.length; i++) { + const hunk = patch.hunks[i]; + // Unified Diff Format quirk: If the chunk size is 0, + // the first number is one lower than one would expect. + // https://www.artima.com/weblogs/viewpost.jsp?thread=164293 + if (hunk.oldLines === 0) { + hunk.oldStart -= 1; + } + if (hunk.newLines === 0) { + hunk.newStart -= 1; + } + ret.push('@@ -' + hunk.oldStart + ',' + hunk.oldLines + + ' +' + hunk.newStart + ',' + hunk.newLines + + ' @@'); + for (const line of hunk.lines) { + ret.push(line); + } + } + return ret.join('\n') + '\n'; +} +export function createTwoFilesPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options) { + if (typeof options === 'function') { + options = { callback: options }; + } + if (!(options === null || options === void 0 ? void 0 : options.callback)) { + const patchObj = structuredPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options); + if (!patchObj) { + return; + } + return formatPatch(patchObj); + } + else { + const { callback } = options; + structuredPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, Object.assign(Object.assign({}, options), { callback: patchObj => { + if (!patchObj) { + callback(undefined); + } + else { + callback(formatPatch(patchObj)); + } + } })); + } +} +export function createPatch(fileName, oldStr, newStr, oldHeader, newHeader, options) { + return createTwoFilesPatch(fileName, fileName, oldStr, newStr, oldHeader, newHeader, options); +} +/** + * Split `text` into an array of lines, including the trailing newline character (where present) + */ +function splitLines(text) { + const hasTrailingNl = text.endsWith('\n'); + const result = text.split('\n').map(line => line + '\n'); + if (hasTrailingNl) { + result.pop(); + } + else { + result.push(result.pop().slice(0, -1)); + } + return result; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/patch/line-endings.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/patch/line-endings.js new file mode 100644 index 0000000000000000000000000000000000000000..ab54b715f0047dd16236b87b82b9612cb035f932 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/patch/line-endings.js @@ -0,0 +1,44 @@ +export function unixToWin(patch) { + if (Array.isArray(patch)) { + // It would be cleaner if instead of the line below we could just write + // return patch.map(unixToWin) + // but mysteriously TypeScript (v5.7.3 at the time of writing) does not like this and it will + // refuse to compile, thinking that unixToWin could then return StructuredPatch[][] and the + // result would be incompatible with the overload signatures. + // See bug report at https://github.com/microsoft/TypeScript/issues/61398. + return patch.map(p => unixToWin(p)); + } + return Object.assign(Object.assign({}, patch), { hunks: patch.hunks.map(hunk => (Object.assign(Object.assign({}, hunk), { lines: hunk.lines.map((line, i) => { + var _a; + return (line.startsWith('\\') || line.endsWith('\r') || ((_a = hunk.lines[i + 1]) === null || _a === void 0 ? void 0 : _a.startsWith('\\'))) + ? line + : line + '\r'; + }) }))) }); +} +export function winToUnix(patch) { + if (Array.isArray(patch)) { + // (See comment above equivalent line in unixToWin) + return patch.map(p => winToUnix(p)); + } + return Object.assign(Object.assign({}, patch), { hunks: patch.hunks.map(hunk => (Object.assign(Object.assign({}, hunk), { lines: hunk.lines.map(line => line.endsWith('\r') ? line.substring(0, line.length - 1) : line) }))) }); +} +/** + * Returns true if the patch consistently uses Unix line endings (or only involves one line and has + * no line endings). + */ +export function isUnix(patch) { + if (!Array.isArray(patch)) { + patch = [patch]; + } + return !patch.some(index => index.hunks.some(hunk => hunk.lines.some(line => !line.startsWith('\\') && line.endsWith('\r')))); +} +/** + * Returns true if the patch uses Windows line endings and only Windows line endings. + */ +export function isWin(patch) { + if (!Array.isArray(patch)) { + patch = [patch]; + } + return patch.some(index => index.hunks.some(hunk => hunk.lines.some(line => line.endsWith('\r')))) + && patch.every(index => index.hunks.every(hunk => hunk.lines.every((line, i) => { var _a; return line.startsWith('\\') || line.endsWith('\r') || ((_a = hunk.lines[i + 1]) === null || _a === void 0 ? void 0 : _a.startsWith('\\')); }))); +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/patch/parse.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/patch/parse.js new file mode 100644 index 0000000000000000000000000000000000000000..3f9a0d7904f60a4eaf5f88c513343ec117cc5ac0 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/patch/parse.js @@ -0,0 +1,130 @@ +/** + * Parses a patch into structured data, in the same structure returned by `structuredPatch`. + * + * @return a JSON object representation of the a patch, suitable for use with the `applyPatch` method. + */ +export function parsePatch(uniDiff) { + const diffstr = uniDiff.split(/\n/), list = []; + let i = 0; + function parseIndex() { + const index = {}; + list.push(index); + // Parse diff metadata + while (i < diffstr.length) { + const line = diffstr[i]; + // File header found, end parsing diff metadata + if ((/^(---|\+\+\+|@@)\s/).test(line)) { + break; + } + // Diff index + const header = (/^(?:Index:|diff(?: -r \w+)+)\s+(.+?)\s*$/).exec(line); + if (header) { + index.index = header[1]; + } + i++; + } + // Parse file headers if they are defined. Unified diff requires them, but + // there's no technical issues to have an isolated hunk without file header + parseFileHeader(index); + parseFileHeader(index); + // Parse hunks + index.hunks = []; + while (i < diffstr.length) { + const line = diffstr[i]; + if ((/^(Index:\s|diff\s|---\s|\+\+\+\s|===================================================================)/).test(line)) { + break; + } + else if ((/^@@/).test(line)) { + index.hunks.push(parseHunk()); + } + else if (line) { + throw new Error('Unknown line ' + (i + 1) + ' ' + JSON.stringify(line)); + } + else { + i++; + } + } + } + // Parses the --- and +++ headers, if none are found, no lines + // are consumed. + function parseFileHeader(index) { + const fileHeader = (/^(---|\+\+\+)\s+(.*)\r?$/).exec(diffstr[i]); + if (fileHeader) { + const data = fileHeader[2].split('\t', 2), header = (data[1] || '').trim(); + let fileName = data[0].replace(/\\\\/g, '\\'); + if ((/^".*"$/).test(fileName)) { + fileName = fileName.substr(1, fileName.length - 2); + } + if (fileHeader[1] === '---') { + index.oldFileName = fileName; + index.oldHeader = header; + } + else { + index.newFileName = fileName; + index.newHeader = header; + } + i++; + } + } + // Parses a hunk + // This assumes that we are at the start of a hunk. + function parseHunk() { + var _a; + const chunkHeaderIndex = i, chunkHeaderLine = diffstr[i++], chunkHeader = chunkHeaderLine.split(/@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@/); + const hunk = { + oldStart: +chunkHeader[1], + oldLines: typeof chunkHeader[2] === 'undefined' ? 1 : +chunkHeader[2], + newStart: +chunkHeader[3], + newLines: typeof chunkHeader[4] === 'undefined' ? 1 : +chunkHeader[4], + lines: [] + }; + // Unified Diff Format quirk: If the chunk size is 0, + // the first number is one lower than one would expect. + // https://www.artima.com/weblogs/viewpost.jsp?thread=164293 + if (hunk.oldLines === 0) { + hunk.oldStart += 1; + } + if (hunk.newLines === 0) { + hunk.newStart += 1; + } + let addCount = 0, removeCount = 0; + for (; i < diffstr.length && (removeCount < hunk.oldLines || addCount < hunk.newLines || ((_a = diffstr[i]) === null || _a === void 0 ? void 0 : _a.startsWith('\\'))); i++) { + const operation = (diffstr[i].length == 0 && i != (diffstr.length - 1)) ? ' ' : diffstr[i][0]; + if (operation === '+' || operation === '-' || operation === ' ' || operation === '\\') { + hunk.lines.push(diffstr[i]); + if (operation === '+') { + addCount++; + } + else if (operation === '-') { + removeCount++; + } + else if (operation === ' ') { + addCount++; + removeCount++; + } + } + else { + throw new Error(`Hunk at line ${chunkHeaderIndex + 1} contained invalid line ${diffstr[i]}`); + } + } + // Handle the empty block count case + if (!addCount && hunk.newLines === 1) { + hunk.newLines = 0; + } + if (!removeCount && hunk.oldLines === 1) { + hunk.oldLines = 0; + } + // Perform sanity checking + if (addCount !== hunk.newLines) { + throw new Error('Added line count did not match for hunk at line ' + (chunkHeaderIndex + 1)); + } + if (removeCount !== hunk.oldLines) { + throw new Error('Removed line count did not match for hunk at line ' + (chunkHeaderIndex + 1)); + } + return hunk; + } + while (i < diffstr.length) { + parseIndex(); + } + return list; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/patch/reverse.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/patch/reverse.js new file mode 100644 index 0000000000000000000000000000000000000000..9207b51c63c55eaa02ffbf0b95bb7697530602b5 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/patch/reverse.js @@ -0,0 +1,23 @@ +export function reversePatch(structuredPatch) { + if (Array.isArray(structuredPatch)) { + // (See comment in unixToWin for why we need the pointless-looking anonymous function here) + return structuredPatch.map(patch => reversePatch(patch)).reverse(); + } + return Object.assign(Object.assign({}, structuredPatch), { oldFileName: structuredPatch.newFileName, oldHeader: structuredPatch.newHeader, newFileName: structuredPatch.oldFileName, newHeader: structuredPatch.oldHeader, hunks: structuredPatch.hunks.map(hunk => { + return { + oldLines: hunk.newLines, + oldStart: hunk.newStart, + newLines: hunk.oldLines, + newStart: hunk.oldStart, + lines: hunk.lines.map(l => { + if (l.startsWith('-')) { + return `+${l.slice(1)}`; + } + if (l.startsWith('+')) { + return `-${l.slice(1)}`; + } + return l; + }) + }; + }) }); +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/types.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/types.js new file mode 100644 index 0000000000000000000000000000000000000000..cb0ff5c3b541f646105198ee23ac0fc3d805023e --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/types.js @@ -0,0 +1 @@ +export {}; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/util/array.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/util/array.js new file mode 100644 index 0000000000000000000000000000000000000000..c3e00f850039086088f96dba21128b65b5c0ac47 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/util/array.js @@ -0,0 +1,17 @@ +export function arrayEqual(a, b) { + if (a.length !== b.length) { + return false; + } + return arrayStartsWith(a, b); +} +export function arrayStartsWith(array, start) { + if (start.length > array.length) { + return false; + } + for (let i = 0; i < start.length; i++) { + if (start[i] !== array[i]) { + return false; + } + } + return true; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/util/distance-iterator.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/util/distance-iterator.js new file mode 100644 index 0000000000000000000000000000000000000000..afa638143ece1a1eee50c41759e58905f79f08ba --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/util/distance-iterator.js @@ -0,0 +1,37 @@ +// Iterator that traverses in the range of [min, max], stepping +// by distance from a given start position. I.e. for [0, 4], with +// start of 2, this will iterate 2, 3, 1, 4, 0. +export default function (start, minLine, maxLine) { + let wantForward = true, backwardExhausted = false, forwardExhausted = false, localOffset = 1; + return function iterator() { + if (wantForward && !forwardExhausted) { + if (backwardExhausted) { + localOffset++; + } + else { + wantForward = false; + } + // Check if trying to fit beyond text length, and if not, check it fits + // after offset location (or desired location on first iteration) + if (start + localOffset <= maxLine) { + return start + localOffset; + } + forwardExhausted = true; + } + if (!backwardExhausted) { + if (!forwardExhausted) { + wantForward = true; + } + // Check if trying to fit before text beginning, and if not, check it fits + // before offset location + if (minLine <= start - localOffset) { + return start - localOffset++; + } + backwardExhausted = true; + return iterator(); + } + // We tried to fit hunk before text beginning and beyond text length, then + // hunk can't fit on the text. Return undefined + return undefined; + }; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/util/params.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/util/params.js new file mode 100644 index 0000000000000000000000000000000000000000..c9921a2106257de02e9d239e3c63cba6c009b98c --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/util/params.js @@ -0,0 +1,14 @@ +export function generateOptions(options, defaults) { + if (typeof options === 'function') { + defaults.callback = options; + } + else if (options) { + for (const name in options) { + /* istanbul ignore else */ + if (Object.prototype.hasOwnProperty.call(options, name)) { + defaults[name] = options[name]; + } + } + } + return defaults; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/util/string.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/util/string.js new file mode 100644 index 0000000000000000000000000000000000000000..36cfb3aa85ddfebbf9c82e8b00e07e5110b33dea --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/diff/libesm/util/string.js @@ -0,0 +1,128 @@ +export function longestCommonPrefix(str1, str2) { + let i; + for (i = 0; i < str1.length && i < str2.length; i++) { + if (str1[i] != str2[i]) { + return str1.slice(0, i); + } + } + return str1.slice(0, i); +} +export function longestCommonSuffix(str1, str2) { + let i; + // Unlike longestCommonPrefix, we need a special case to handle all scenarios + // where we return the empty string since str1.slice(-0) will return the + // entire string. + if (!str1 || !str2 || str1[str1.length - 1] != str2[str2.length - 1]) { + return ''; + } + for (i = 0; i < str1.length && i < str2.length; i++) { + if (str1[str1.length - (i + 1)] != str2[str2.length - (i + 1)]) { + return str1.slice(-i); + } + } + return str1.slice(-i); +} +export function replacePrefix(string, oldPrefix, newPrefix) { + if (string.slice(0, oldPrefix.length) != oldPrefix) { + throw Error(`string ${JSON.stringify(string)} doesn't start with prefix ${JSON.stringify(oldPrefix)}; this is a bug`); + } + return newPrefix + string.slice(oldPrefix.length); +} +export function replaceSuffix(string, oldSuffix, newSuffix) { + if (!oldSuffix) { + return string + newSuffix; + } + if (string.slice(-oldSuffix.length) != oldSuffix) { + throw Error(`string ${JSON.stringify(string)} doesn't end with suffix ${JSON.stringify(oldSuffix)}; this is a bug`); + } + return string.slice(0, -oldSuffix.length) + newSuffix; +} +export function removePrefix(string, oldPrefix) { + return replacePrefix(string, oldPrefix, ''); +} +export function removeSuffix(string, oldSuffix) { + return replaceSuffix(string, oldSuffix, ''); +} +export function maximumOverlap(string1, string2) { + return string2.slice(0, overlapCount(string1, string2)); +} +// Nicked from https://stackoverflow.com/a/60422853/1709587 +function overlapCount(a, b) { + // Deal with cases where the strings differ in length + let startA = 0; + if (a.length > b.length) { + startA = a.length - b.length; + } + let endB = b.length; + if (a.length < b.length) { + endB = a.length; + } + // Create a back-reference for each index + // that should be followed in case of a mismatch. + // We only need B to make these references: + const map = Array(endB); + let k = 0; // Index that lags behind j + map[0] = 0; + for (let j = 1; j < endB; j++) { + if (b[j] == b[k]) { + map[j] = map[k]; // skip over the same character (optional optimisation) + } + else { + map[j] = k; + } + while (k > 0 && b[j] != b[k]) { + k = map[k]; + } + if (b[j] == b[k]) { + k++; + } + } + // Phase 2: use these references while iterating over A + k = 0; + for (let i = startA; i < a.length; i++) { + while (k > 0 && a[i] != b[k]) { + k = map[k]; + } + if (a[i] == b[k]) { + k++; + } + } + return k; +} +/** + * Returns true if the string consistently uses Windows line endings. + */ +export function hasOnlyWinLineEndings(string) { + return string.includes('\r\n') && !string.startsWith('\n') && !string.match(/[^\r]\n/); +} +/** + * Returns true if the string consistently uses Unix line endings. + */ +export function hasOnlyUnixLineEndings(string) { + return !string.includes('\r\n') && string.includes('\n'); +} +export function trailingWs(string) { + // Yes, this looks overcomplicated and dumb - why not replace the whole function with + // return string match(/\s*$/)[0] + // you ask? Because: + // 1. the trap described at https://markamery.com/blog/quadratic-time-regexes/ would mean doing + // this would cause this function to take O(n²) time in the worst case (specifically when + // there is a massive run of NON-TRAILING whitespace in `string`), and + // 2. the fix proposed in the same blog post, of using a negative lookbehind, is incompatible + // with old Safari versions that we'd like to not break if possible (see + // https://github.com/kpdecker/jsdiff/pull/550) + // It feels absurd to do this with an explicit loop instead of a regex, but I really can't see a + // better way that doesn't result in broken behaviour. + let i; + for (i = string.length - 1; i >= 0; i--) { + if (!string[i].match(/\s/)) { + break; + } + } + return string.substring(i + 1); +} +export function leadingWs(string) { + // Thankfully the annoying considerations described in trailingWs don't apply here: + const match = string.match(/^\s*/); + return match ? match[0] : ''; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/emoji-regex/es2015/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/emoji-regex/es2015/index.js new file mode 100644 index 0000000000000000000000000000000000000000..b4cf3dcd389935061d82cd4f2d9da78c77a5b088 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/emoji-regex/es2015/index.js @@ -0,0 +1,6 @@ +"use strict"; + +module.exports = () => { + // https://mths.be/emoji + return /\u{1F3F4}\u{E0067}\u{E0062}(?:\u{E0065}\u{E006E}\u{E0067}|\u{E0073}\u{E0063}\u{E0074}|\u{E0077}\u{E006C}\u{E0073})\u{E007F}|\u{1F468}(?:\u{1F3FC}\u200D(?:\u{1F91D}\u200D\u{1F468}\u{1F3FB}|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FE}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FE}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FD}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FD}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FC}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F468}|[\u{1F468}\u{1F469}]\u200D(?:\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}])|\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}]|[\u{1F468}\u{1F469}]\u200D[\u{1F466}\u{1F467}]|[\u2695\u2696\u2708]\uFE0F|[\u{1F466}\u{1F467}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|(?:\u{1F3FB}\u200D[\u2695\u2696\u2708]|\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708])\uFE0F|\u{1F3FB}\u200D[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|[\u{1F3FB}-\u{1F3FF}])|(?:\u{1F9D1}\u{1F3FB}\u200D\u{1F91D}\u200D\u{1F9D1}|\u{1F469}\u{1F3FC}\u200D\u{1F91D}\u200D\u{1F469})\u{1F3FB}|\u{1F9D1}(?:\u{1F3FF}\u200D\u{1F91D}\u200D\u{1F9D1}[\u{1F3FB}-\u{1F3FF}]|\u200D\u{1F91D}\u200D\u{1F9D1})|(?:\u{1F9D1}\u{1F3FE}\u200D\u{1F91D}\u200D\u{1F9D1}|\u{1F469}\u{1F3FF}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}-\u{1F3FE}]|(?:\u{1F9D1}\u{1F3FC}\u200D\u{1F91D}\u200D\u{1F9D1}|\u{1F469}\u{1F3FD}\u200D\u{1F91D}\u200D\u{1F469})[\u{1F3FB}\u{1F3FC}]|\u{1F469}(?:\u{1F3FE}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FD}\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FC}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FD}-\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FB}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FC}-\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FD}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FC}\u{1F3FE}\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D[\u{1F468}\u{1F469}]|[\u{1F468}\u{1F469}])|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F469}\u200D\u{1F469}\u200D(?:\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}])|(?:\u{1F9D1}\u{1F3FD}\u200D\u{1F91D}\u200D\u{1F9D1}|\u{1F469}\u{1F3FE}\u200D\u{1F91D}\u200D\u{1F469})[\u{1F3FB}-\u{1F3FD}]|\u{1F469}\u200D\u{1F466}\u200D\u{1F466}|\u{1F469}\u200D\u{1F469}\u200D[\u{1F466}\u{1F467}]|(?:\u{1F441}\uFE0F\u200D\u{1F5E8}|\u{1F469}(?:\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708]|\u{1F3FB}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|(?:[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}]\uFE0F|[\u{1F46F}\u{1F93C}\u{1F9DE}\u{1F9DF}])\u200D[\u2640\u2642]|[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}][\u{1F3FB}-\u{1F3FF}]\u200D[\u2640\u2642]|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D6}-\u{1F9DD}](?:[\u{1F3FB}-\u{1F3FF}]\u200D[\u2640\u2642]|\u200D[\u2640\u2642])|\u{1F3F4}\u200D\u2620)\uFE0F|\u{1F469}\u200D\u{1F467}\u200D[\u{1F466}\u{1F467}]|\u{1F3F3}\uFE0F\u200D\u{1F308}|\u{1F415}\u200D\u{1F9BA}|\u{1F469}\u200D\u{1F466}|\u{1F469}\u200D\u{1F467}|\u{1F1FD}\u{1F1F0}|\u{1F1F4}\u{1F1F2}|\u{1F1F6}\u{1F1E6}|[#\*0-9]\uFE0F\u20E3|\u{1F1E7}[\u{1F1E6}\u{1F1E7}\u{1F1E9}-\u{1F1EF}\u{1F1F1}-\u{1F1F4}\u{1F1F6}-\u{1F1F9}\u{1F1FB}\u{1F1FC}\u{1F1FE}\u{1F1FF}]|\u{1F1F9}[\u{1F1E6}\u{1F1E8}\u{1F1E9}\u{1F1EB}-\u{1F1ED}\u{1F1EF}-\u{1F1F4}\u{1F1F7}\u{1F1F9}\u{1F1FB}\u{1F1FC}\u{1F1FF}]|\u{1F1EA}[\u{1F1E6}\u{1F1E8}\u{1F1EA}\u{1F1EC}\u{1F1ED}\u{1F1F7}-\u{1F1FA}]|\u{1F9D1}[\u{1F3FB}-\u{1F3FF}]|\u{1F1F7}[\u{1F1EA}\u{1F1F4}\u{1F1F8}\u{1F1FA}\u{1F1FC}]|\u{1F469}[\u{1F3FB}-\u{1F3FF}]|\u{1F1F2}[\u{1F1E6}\u{1F1E8}-\u{1F1ED}\u{1F1F0}-\u{1F1FF}]|\u{1F1E6}[\u{1F1E8}-\u{1F1EC}\u{1F1EE}\u{1F1F1}\u{1F1F2}\u{1F1F4}\u{1F1F6}-\u{1F1FA}\u{1F1FC}\u{1F1FD}\u{1F1FF}]|\u{1F1F0}[\u{1F1EA}\u{1F1EC}-\u{1F1EE}\u{1F1F2}\u{1F1F3}\u{1F1F5}\u{1F1F7}\u{1F1FC}\u{1F1FE}\u{1F1FF}]|\u{1F1ED}[\u{1F1F0}\u{1F1F2}\u{1F1F3}\u{1F1F7}\u{1F1F9}\u{1F1FA}]|\u{1F1E9}[\u{1F1EA}\u{1F1EC}\u{1F1EF}\u{1F1F0}\u{1F1F2}\u{1F1F4}\u{1F1FF}]|\u{1F1FE}[\u{1F1EA}\u{1F1F9}]|\u{1F1EC}[\u{1F1E6}\u{1F1E7}\u{1F1E9}-\u{1F1EE}\u{1F1F1}-\u{1F1F3}\u{1F1F5}-\u{1F1FA}\u{1F1FC}\u{1F1FE}]|\u{1F1F8}[\u{1F1E6}-\u{1F1EA}\u{1F1EC}-\u{1F1F4}\u{1F1F7}-\u{1F1F9}\u{1F1FB}\u{1F1FD}-\u{1F1FF}]|\u{1F1EB}[\u{1F1EE}-\u{1F1F0}\u{1F1F2}\u{1F1F4}\u{1F1F7}]|\u{1F1F5}[\u{1F1E6}\u{1F1EA}-\u{1F1ED}\u{1F1F0}-\u{1F1F3}\u{1F1F7}-\u{1F1F9}\u{1F1FC}\u{1F1FE}]|\u{1F1FB}[\u{1F1E6}\u{1F1E8}\u{1F1EA}\u{1F1EC}\u{1F1EE}\u{1F1F3}\u{1F1FA}]|\u{1F1F3}[\u{1F1E6}\u{1F1E8}\u{1F1EA}-\u{1F1EC}\u{1F1EE}\u{1F1F1}\u{1F1F4}\u{1F1F5}\u{1F1F7}\u{1F1FA}\u{1F1FF}]|\u{1F1E8}[\u{1F1E6}\u{1F1E8}\u{1F1E9}\u{1F1EB}-\u{1F1EE}\u{1F1F0}-\u{1F1F5}\u{1F1F7}\u{1F1FA}-\u{1F1FF}]|\u{1F1F1}[\u{1F1E6}-\u{1F1E8}\u{1F1EE}\u{1F1F0}\u{1F1F7}-\u{1F1FB}\u{1F1FE}]|\u{1F1FF}[\u{1F1E6}\u{1F1F2}\u{1F1FC}]|\u{1F1FC}[\u{1F1EB}\u{1F1F8}]|\u{1F1FA}[\u{1F1E6}\u{1F1EC}\u{1F1F2}\u{1F1F3}\u{1F1F8}\u{1F1FE}\u{1F1FF}]|\u{1F1EE}[\u{1F1E8}-\u{1F1EA}\u{1F1F1}-\u{1F1F4}\u{1F1F6}-\u{1F1F9}]|\u{1F1EF}[\u{1F1EA}\u{1F1F2}\u{1F1F4}\u{1F1F5}]|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D6}-\u{1F9DD}][\u{1F3FB}-\u{1F3FF}]|[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}][\u{1F3FB}-\u{1F3FF}]|[\u261D\u270A-\u270D\u{1F385}\u{1F3C2}\u{1F3C7}\u{1F442}\u{1F443}\u{1F446}-\u{1F450}\u{1F466}\u{1F467}\u{1F46B}-\u{1F46D}\u{1F470}\u{1F472}\u{1F474}-\u{1F476}\u{1F478}\u{1F47C}\u{1F483}\u{1F485}\u{1F4AA}\u{1F574}\u{1F57A}\u{1F590}\u{1F595}\u{1F596}\u{1F64C}\u{1F64F}\u{1F6C0}\u{1F6CC}\u{1F90F}\u{1F918}-\u{1F91C}\u{1F91E}\u{1F91F}\u{1F930}-\u{1F936}\u{1F9B5}\u{1F9B6}\u{1F9BB}\u{1F9D2}-\u{1F9D5}][\u{1F3FB}-\u{1F3FF}]|[\u231A\u231B\u23E9-\u23EC\u23F0\u23F3\u25FD\u25FE\u2614\u2615\u2648-\u2653\u267F\u2693\u26A1\u26AA\u26AB\u26BD\u26BE\u26C4\u26C5\u26CE\u26D4\u26EA\u26F2\u26F3\u26F5\u26FA\u26FD\u2705\u270A\u270B\u2728\u274C\u274E\u2753-\u2755\u2757\u2795-\u2797\u27B0\u27BF\u2B1B\u2B1C\u2B50\u2B55\u{1F004}\u{1F0CF}\u{1F18E}\u{1F191}-\u{1F19A}\u{1F1E6}-\u{1F1FF}\u{1F201}\u{1F21A}\u{1F22F}\u{1F232}-\u{1F236}\u{1F238}-\u{1F23A}\u{1F250}\u{1F251}\u{1F300}-\u{1F320}\u{1F32D}-\u{1F335}\u{1F337}-\u{1F37C}\u{1F37E}-\u{1F393}\u{1F3A0}-\u{1F3CA}\u{1F3CF}-\u{1F3D3}\u{1F3E0}-\u{1F3F0}\u{1F3F4}\u{1F3F8}-\u{1F43E}\u{1F440}\u{1F442}-\u{1F4FC}\u{1F4FF}-\u{1F53D}\u{1F54B}-\u{1F54E}\u{1F550}-\u{1F567}\u{1F57A}\u{1F595}\u{1F596}\u{1F5A4}\u{1F5FB}-\u{1F64F}\u{1F680}-\u{1F6C5}\u{1F6CC}\u{1F6D0}-\u{1F6D2}\u{1F6D5}\u{1F6EB}\u{1F6EC}\u{1F6F4}-\u{1F6FA}\u{1F7E0}-\u{1F7EB}\u{1F90D}-\u{1F93A}\u{1F93C}-\u{1F945}\u{1F947}-\u{1F971}\u{1F973}-\u{1F976}\u{1F97A}-\u{1F9A2}\u{1F9A5}-\u{1F9AA}\u{1F9AE}-\u{1F9CA}\u{1F9CD}-\u{1F9FF}\u{1FA70}-\u{1FA73}\u{1FA78}-\u{1FA7A}\u{1FA80}-\u{1FA82}\u{1FA90}-\u{1FA95}]|[#\*0-9\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u231A\u231B\u2328\u23CF\u23E9-\u23F3\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB-\u25FE\u2600-\u2604\u260E\u2611\u2614\u2615\u2618\u261D\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u2648-\u2653\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u267F\u2692-\u2697\u2699\u269B\u269C\u26A0\u26A1\u26AA\u26AB\u26B0\u26B1\u26BD\u26BE\u26C4\u26C5\u26C8\u26CE\u26CF\u26D1\u26D3\u26D4\u26E9\u26EA\u26F0-\u26F5\u26F7-\u26FA\u26FD\u2702\u2705\u2708-\u270D\u270F\u2712\u2714\u2716\u271D\u2721\u2728\u2733\u2734\u2744\u2747\u274C\u274E\u2753-\u2755\u2757\u2763\u2764\u2795-\u2797\u27A1\u27B0\u27BF\u2934\u2935\u2B05-\u2B07\u2B1B\u2B1C\u2B50\u2B55\u3030\u303D\u3297\u3299\u{1F004}\u{1F0CF}\u{1F170}\u{1F171}\u{1F17E}\u{1F17F}\u{1F18E}\u{1F191}-\u{1F19A}\u{1F1E6}-\u{1F1FF}\u{1F201}\u{1F202}\u{1F21A}\u{1F22F}\u{1F232}-\u{1F23A}\u{1F250}\u{1F251}\u{1F300}-\u{1F321}\u{1F324}-\u{1F393}\u{1F396}\u{1F397}\u{1F399}-\u{1F39B}\u{1F39E}-\u{1F3F0}\u{1F3F3}-\u{1F3F5}\u{1F3F7}-\u{1F4FD}\u{1F4FF}-\u{1F53D}\u{1F549}-\u{1F54E}\u{1F550}-\u{1F567}\u{1F56F}\u{1F570}\u{1F573}-\u{1F57A}\u{1F587}\u{1F58A}-\u{1F58D}\u{1F590}\u{1F595}\u{1F596}\u{1F5A4}\u{1F5A5}\u{1F5A8}\u{1F5B1}\u{1F5B2}\u{1F5BC}\u{1F5C2}-\u{1F5C4}\u{1F5D1}-\u{1F5D3}\u{1F5DC}-\u{1F5DE}\u{1F5E1}\u{1F5E3}\u{1F5E8}\u{1F5EF}\u{1F5F3}\u{1F5FA}-\u{1F64F}\u{1F680}-\u{1F6C5}\u{1F6CB}-\u{1F6D2}\u{1F6D5}\u{1F6E0}-\u{1F6E5}\u{1F6E9}\u{1F6EB}\u{1F6EC}\u{1F6F0}\u{1F6F3}-\u{1F6FA}\u{1F7E0}-\u{1F7EB}\u{1F90D}-\u{1F93A}\u{1F93C}-\u{1F945}\u{1F947}-\u{1F971}\u{1F973}-\u{1F976}\u{1F97A}-\u{1F9A2}\u{1F9A5}-\u{1F9AA}\u{1F9AE}-\u{1F9CA}\u{1F9CD}-\u{1F9FF}\u{1FA70}-\u{1FA73}\u{1FA78}-\u{1FA7A}\u{1FA80}-\u{1FA82}\u{1FA90}-\u{1FA95}]\uFE0F|[\u261D\u26F9\u270A-\u270D\u{1F385}\u{1F3C2}-\u{1F3C4}\u{1F3C7}\u{1F3CA}-\u{1F3CC}\u{1F442}\u{1F443}\u{1F446}-\u{1F450}\u{1F466}-\u{1F478}\u{1F47C}\u{1F481}-\u{1F483}\u{1F485}-\u{1F487}\u{1F48F}\u{1F491}\u{1F4AA}\u{1F574}\u{1F575}\u{1F57A}\u{1F590}\u{1F595}\u{1F596}\u{1F645}-\u{1F647}\u{1F64B}-\u{1F64F}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F6C0}\u{1F6CC}\u{1F90F}\u{1F918}-\u{1F91F}\u{1F926}\u{1F930}-\u{1F939}\u{1F93C}-\u{1F93E}\u{1F9B5}\u{1F9B6}\u{1F9B8}\u{1F9B9}\u{1F9BB}\u{1F9CD}-\u{1F9CF}\u{1F9D1}-\u{1F9DD}]/gu; +}; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/emoji-regex/es2015/text.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/emoji-regex/es2015/text.js new file mode 100644 index 0000000000000000000000000000000000000000..780309df58f1a20d32e6fc14e564369951516d3b --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/emoji-regex/es2015/text.js @@ -0,0 +1,6 @@ +"use strict"; + +module.exports = () => { + // https://mths.be/emoji + return /\u{1F3F4}\u{E0067}\u{E0062}(?:\u{E0065}\u{E006E}\u{E0067}|\u{E0073}\u{E0063}\u{E0074}|\u{E0077}\u{E006C}\u{E0073})\u{E007F}|\u{1F468}(?:\u{1F3FC}\u200D(?:\u{1F91D}\u200D\u{1F468}\u{1F3FB}|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FE}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FE}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FD}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FD}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FC}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F468}|[\u{1F468}\u{1F469}]\u200D(?:\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}])|\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}]|[\u{1F468}\u{1F469}]\u200D[\u{1F466}\u{1F467}]|[\u2695\u2696\u2708]\uFE0F|[\u{1F466}\u{1F467}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|(?:\u{1F3FB}\u200D[\u2695\u2696\u2708]|\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708])\uFE0F|\u{1F3FB}\u200D[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|[\u{1F3FB}-\u{1F3FF}])|(?:\u{1F9D1}\u{1F3FB}\u200D\u{1F91D}\u200D\u{1F9D1}|\u{1F469}\u{1F3FC}\u200D\u{1F91D}\u200D\u{1F469})\u{1F3FB}|\u{1F9D1}(?:\u{1F3FF}\u200D\u{1F91D}\u200D\u{1F9D1}[\u{1F3FB}-\u{1F3FF}]|\u200D\u{1F91D}\u200D\u{1F9D1})|(?:\u{1F9D1}\u{1F3FE}\u200D\u{1F91D}\u200D\u{1F9D1}|\u{1F469}\u{1F3FF}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}-\u{1F3FE}]|(?:\u{1F9D1}\u{1F3FC}\u200D\u{1F91D}\u200D\u{1F9D1}|\u{1F469}\u{1F3FD}\u200D\u{1F91D}\u200D\u{1F469})[\u{1F3FB}\u{1F3FC}]|\u{1F469}(?:\u{1F3FE}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FD}\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FC}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FD}-\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FB}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FC}-\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FD}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FC}\u{1F3FE}\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D[\u{1F468}\u{1F469}]|[\u{1F468}\u{1F469}])|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F469}\u200D\u{1F469}\u200D(?:\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}])|(?:\u{1F9D1}\u{1F3FD}\u200D\u{1F91D}\u200D\u{1F9D1}|\u{1F469}\u{1F3FE}\u200D\u{1F91D}\u200D\u{1F469})[\u{1F3FB}-\u{1F3FD}]|\u{1F469}\u200D\u{1F466}\u200D\u{1F466}|\u{1F469}\u200D\u{1F469}\u200D[\u{1F466}\u{1F467}]|(?:\u{1F441}\uFE0F\u200D\u{1F5E8}|\u{1F469}(?:\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708]|\u{1F3FB}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|(?:[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}]\uFE0F|[\u{1F46F}\u{1F93C}\u{1F9DE}\u{1F9DF}])\u200D[\u2640\u2642]|[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}][\u{1F3FB}-\u{1F3FF}]\u200D[\u2640\u2642]|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D6}-\u{1F9DD}](?:[\u{1F3FB}-\u{1F3FF}]\u200D[\u2640\u2642]|\u200D[\u2640\u2642])|\u{1F3F4}\u200D\u2620)\uFE0F|\u{1F469}\u200D\u{1F467}\u200D[\u{1F466}\u{1F467}]|\u{1F3F3}\uFE0F\u200D\u{1F308}|\u{1F415}\u200D\u{1F9BA}|\u{1F469}\u200D\u{1F466}|\u{1F469}\u200D\u{1F467}|\u{1F1FD}\u{1F1F0}|\u{1F1F4}\u{1F1F2}|\u{1F1F6}\u{1F1E6}|[#\*0-9]\uFE0F\u20E3|\u{1F1E7}[\u{1F1E6}\u{1F1E7}\u{1F1E9}-\u{1F1EF}\u{1F1F1}-\u{1F1F4}\u{1F1F6}-\u{1F1F9}\u{1F1FB}\u{1F1FC}\u{1F1FE}\u{1F1FF}]|\u{1F1F9}[\u{1F1E6}\u{1F1E8}\u{1F1E9}\u{1F1EB}-\u{1F1ED}\u{1F1EF}-\u{1F1F4}\u{1F1F7}\u{1F1F9}\u{1F1FB}\u{1F1FC}\u{1F1FF}]|\u{1F1EA}[\u{1F1E6}\u{1F1E8}\u{1F1EA}\u{1F1EC}\u{1F1ED}\u{1F1F7}-\u{1F1FA}]|\u{1F9D1}[\u{1F3FB}-\u{1F3FF}]|\u{1F1F7}[\u{1F1EA}\u{1F1F4}\u{1F1F8}\u{1F1FA}\u{1F1FC}]|\u{1F469}[\u{1F3FB}-\u{1F3FF}]|\u{1F1F2}[\u{1F1E6}\u{1F1E8}-\u{1F1ED}\u{1F1F0}-\u{1F1FF}]|\u{1F1E6}[\u{1F1E8}-\u{1F1EC}\u{1F1EE}\u{1F1F1}\u{1F1F2}\u{1F1F4}\u{1F1F6}-\u{1F1FA}\u{1F1FC}\u{1F1FD}\u{1F1FF}]|\u{1F1F0}[\u{1F1EA}\u{1F1EC}-\u{1F1EE}\u{1F1F2}\u{1F1F3}\u{1F1F5}\u{1F1F7}\u{1F1FC}\u{1F1FE}\u{1F1FF}]|\u{1F1ED}[\u{1F1F0}\u{1F1F2}\u{1F1F3}\u{1F1F7}\u{1F1F9}\u{1F1FA}]|\u{1F1E9}[\u{1F1EA}\u{1F1EC}\u{1F1EF}\u{1F1F0}\u{1F1F2}\u{1F1F4}\u{1F1FF}]|\u{1F1FE}[\u{1F1EA}\u{1F1F9}]|\u{1F1EC}[\u{1F1E6}\u{1F1E7}\u{1F1E9}-\u{1F1EE}\u{1F1F1}-\u{1F1F3}\u{1F1F5}-\u{1F1FA}\u{1F1FC}\u{1F1FE}]|\u{1F1F8}[\u{1F1E6}-\u{1F1EA}\u{1F1EC}-\u{1F1F4}\u{1F1F7}-\u{1F1F9}\u{1F1FB}\u{1F1FD}-\u{1F1FF}]|\u{1F1EB}[\u{1F1EE}-\u{1F1F0}\u{1F1F2}\u{1F1F4}\u{1F1F7}]|\u{1F1F5}[\u{1F1E6}\u{1F1EA}-\u{1F1ED}\u{1F1F0}-\u{1F1F3}\u{1F1F7}-\u{1F1F9}\u{1F1FC}\u{1F1FE}]|\u{1F1FB}[\u{1F1E6}\u{1F1E8}\u{1F1EA}\u{1F1EC}\u{1F1EE}\u{1F1F3}\u{1F1FA}]|\u{1F1F3}[\u{1F1E6}\u{1F1E8}\u{1F1EA}-\u{1F1EC}\u{1F1EE}\u{1F1F1}\u{1F1F4}\u{1F1F5}\u{1F1F7}\u{1F1FA}\u{1F1FF}]|\u{1F1E8}[\u{1F1E6}\u{1F1E8}\u{1F1E9}\u{1F1EB}-\u{1F1EE}\u{1F1F0}-\u{1F1F5}\u{1F1F7}\u{1F1FA}-\u{1F1FF}]|\u{1F1F1}[\u{1F1E6}-\u{1F1E8}\u{1F1EE}\u{1F1F0}\u{1F1F7}-\u{1F1FB}\u{1F1FE}]|\u{1F1FF}[\u{1F1E6}\u{1F1F2}\u{1F1FC}]|\u{1F1FC}[\u{1F1EB}\u{1F1F8}]|\u{1F1FA}[\u{1F1E6}\u{1F1EC}\u{1F1F2}\u{1F1F3}\u{1F1F8}\u{1F1FE}\u{1F1FF}]|\u{1F1EE}[\u{1F1E8}-\u{1F1EA}\u{1F1F1}-\u{1F1F4}\u{1F1F6}-\u{1F1F9}]|\u{1F1EF}[\u{1F1EA}\u{1F1F2}\u{1F1F4}\u{1F1F5}]|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D6}-\u{1F9DD}][\u{1F3FB}-\u{1F3FF}]|[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}][\u{1F3FB}-\u{1F3FF}]|[\u261D\u270A-\u270D\u{1F385}\u{1F3C2}\u{1F3C7}\u{1F442}\u{1F443}\u{1F446}-\u{1F450}\u{1F466}\u{1F467}\u{1F46B}-\u{1F46D}\u{1F470}\u{1F472}\u{1F474}-\u{1F476}\u{1F478}\u{1F47C}\u{1F483}\u{1F485}\u{1F4AA}\u{1F574}\u{1F57A}\u{1F590}\u{1F595}\u{1F596}\u{1F64C}\u{1F64F}\u{1F6C0}\u{1F6CC}\u{1F90F}\u{1F918}-\u{1F91C}\u{1F91E}\u{1F91F}\u{1F930}-\u{1F936}\u{1F9B5}\u{1F9B6}\u{1F9BB}\u{1F9D2}-\u{1F9D5}][\u{1F3FB}-\u{1F3FF}]|[\u231A\u231B\u23E9-\u23EC\u23F0\u23F3\u25FD\u25FE\u2614\u2615\u2648-\u2653\u267F\u2693\u26A1\u26AA\u26AB\u26BD\u26BE\u26C4\u26C5\u26CE\u26D4\u26EA\u26F2\u26F3\u26F5\u26FA\u26FD\u2705\u270A\u270B\u2728\u274C\u274E\u2753-\u2755\u2757\u2795-\u2797\u27B0\u27BF\u2B1B\u2B1C\u2B50\u2B55\u{1F004}\u{1F0CF}\u{1F18E}\u{1F191}-\u{1F19A}\u{1F1E6}-\u{1F1FF}\u{1F201}\u{1F21A}\u{1F22F}\u{1F232}-\u{1F236}\u{1F238}-\u{1F23A}\u{1F250}\u{1F251}\u{1F300}-\u{1F320}\u{1F32D}-\u{1F335}\u{1F337}-\u{1F37C}\u{1F37E}-\u{1F393}\u{1F3A0}-\u{1F3CA}\u{1F3CF}-\u{1F3D3}\u{1F3E0}-\u{1F3F0}\u{1F3F4}\u{1F3F8}-\u{1F43E}\u{1F440}\u{1F442}-\u{1F4FC}\u{1F4FF}-\u{1F53D}\u{1F54B}-\u{1F54E}\u{1F550}-\u{1F567}\u{1F57A}\u{1F595}\u{1F596}\u{1F5A4}\u{1F5FB}-\u{1F64F}\u{1F680}-\u{1F6C5}\u{1F6CC}\u{1F6D0}-\u{1F6D2}\u{1F6D5}\u{1F6EB}\u{1F6EC}\u{1F6F4}-\u{1F6FA}\u{1F7E0}-\u{1F7EB}\u{1F90D}-\u{1F93A}\u{1F93C}-\u{1F945}\u{1F947}-\u{1F971}\u{1F973}-\u{1F976}\u{1F97A}-\u{1F9A2}\u{1F9A5}-\u{1F9AA}\u{1F9AE}-\u{1F9CA}\u{1F9CD}-\u{1F9FF}\u{1FA70}-\u{1FA73}\u{1FA78}-\u{1FA7A}\u{1FA80}-\u{1FA82}\u{1FA90}-\u{1FA95}]|[#\*0-9\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u231A\u231B\u2328\u23CF\u23E9-\u23F3\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB-\u25FE\u2600-\u2604\u260E\u2611\u2614\u2615\u2618\u261D\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u2648-\u2653\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u267F\u2692-\u2697\u2699\u269B\u269C\u26A0\u26A1\u26AA\u26AB\u26B0\u26B1\u26BD\u26BE\u26C4\u26C5\u26C8\u26CE\u26CF\u26D1\u26D3\u26D4\u26E9\u26EA\u26F0-\u26F5\u26F7-\u26FA\u26FD\u2702\u2705\u2708-\u270D\u270F\u2712\u2714\u2716\u271D\u2721\u2728\u2733\u2734\u2744\u2747\u274C\u274E\u2753-\u2755\u2757\u2763\u2764\u2795-\u2797\u27A1\u27B0\u27BF\u2934\u2935\u2B05-\u2B07\u2B1B\u2B1C\u2B50\u2B55\u3030\u303D\u3297\u3299\u{1F004}\u{1F0CF}\u{1F170}\u{1F171}\u{1F17E}\u{1F17F}\u{1F18E}\u{1F191}-\u{1F19A}\u{1F1E6}-\u{1F1FF}\u{1F201}\u{1F202}\u{1F21A}\u{1F22F}\u{1F232}-\u{1F23A}\u{1F250}\u{1F251}\u{1F300}-\u{1F321}\u{1F324}-\u{1F393}\u{1F396}\u{1F397}\u{1F399}-\u{1F39B}\u{1F39E}-\u{1F3F0}\u{1F3F3}-\u{1F3F5}\u{1F3F7}-\u{1F4FD}\u{1F4FF}-\u{1F53D}\u{1F549}-\u{1F54E}\u{1F550}-\u{1F567}\u{1F56F}\u{1F570}\u{1F573}-\u{1F57A}\u{1F587}\u{1F58A}-\u{1F58D}\u{1F590}\u{1F595}\u{1F596}\u{1F5A4}\u{1F5A5}\u{1F5A8}\u{1F5B1}\u{1F5B2}\u{1F5BC}\u{1F5C2}-\u{1F5C4}\u{1F5D1}-\u{1F5D3}\u{1F5DC}-\u{1F5DE}\u{1F5E1}\u{1F5E3}\u{1F5E8}\u{1F5EF}\u{1F5F3}\u{1F5FA}-\u{1F64F}\u{1F680}-\u{1F6C5}\u{1F6CB}-\u{1F6D2}\u{1F6D5}\u{1F6E0}-\u{1F6E5}\u{1F6E9}\u{1F6EB}\u{1F6EC}\u{1F6F0}\u{1F6F3}-\u{1F6FA}\u{1F7E0}-\u{1F7EB}\u{1F90D}-\u{1F93A}\u{1F93C}-\u{1F945}\u{1F947}-\u{1F971}\u{1F973}-\u{1F976}\u{1F97A}-\u{1F9A2}\u{1F9A5}-\u{1F9AA}\u{1F9AE}-\u{1F9CA}\u{1F9CD}-\u{1F9FF}\u{1FA70}-\u{1FA73}\u{1FA78}-\u{1FA7A}\u{1FA80}-\u{1FA82}\u{1FA90}-\u{1FA95}]\uFE0F?|[\u261D\u26F9\u270A-\u270D\u{1F385}\u{1F3C2}-\u{1F3C4}\u{1F3C7}\u{1F3CA}-\u{1F3CC}\u{1F442}\u{1F443}\u{1F446}-\u{1F450}\u{1F466}-\u{1F478}\u{1F47C}\u{1F481}-\u{1F483}\u{1F485}-\u{1F487}\u{1F48F}\u{1F491}\u{1F4AA}\u{1F574}\u{1F575}\u{1F57A}\u{1F590}\u{1F595}\u{1F596}\u{1F645}-\u{1F647}\u{1F64B}-\u{1F64F}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F6C0}\u{1F6CC}\u{1F90F}\u{1F918}-\u{1F91F}\u{1F926}\u{1F930}-\u{1F939}\u{1F93C}-\u{1F93E}\u{1F9B5}\u{1F9B6}\u{1F9B8}\u{1F9B9}\u{1F9BB}\u{1F9CD}-\u{1F9CF}\u{1F9D1}-\u{1F9DD}]/gu; +}; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/encoding/lib/encoding.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/encoding/lib/encoding.js new file mode 100644 index 0000000000000000000000000000000000000000..865c24bce2e06d5f0d5bbef4e2126f5b5ac41712 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/encoding/lib/encoding.js @@ -0,0 +1,83 @@ +'use strict'; + +var iconvLite = require('iconv-lite'); + +// Expose to the world +module.exports.convert = convert; + +/** + * Convert encoding of an UTF-8 string or a buffer + * + * @param {String|Buffer} str String to be converted + * @param {String} to Encoding to be converted to + * @param {String} [from='UTF-8'] Encoding to be converted from + * @return {Buffer} Encoded string + */ +function convert(str, to, from) { + from = checkEncoding(from || 'UTF-8'); + to = checkEncoding(to || 'UTF-8'); + str = str || ''; + + var result; + + if (from !== 'UTF-8' && typeof str === 'string') { + str = Buffer.from(str, 'binary'); + } + + if (from === to) { + if (typeof str === 'string') { + result = Buffer.from(str); + } else { + result = str; + } + } else { + try { + result = convertIconvLite(str, to, from); + } catch (E) { + console.error(E); + result = str; + } + } + + if (typeof result === 'string') { + result = Buffer.from(result, 'utf-8'); + } + + return result; +} + +/** + * Convert encoding of astring with iconv-lite + * + * @param {String|Buffer} str String to be converted + * @param {String} to Encoding to be converted to + * @param {String} [from='UTF-8'] Encoding to be converted from + * @return {Buffer} Encoded string + */ +function convertIconvLite(str, to, from) { + if (to === 'UTF-8') { + return iconvLite.decode(str, from); + } else if (from === 'UTF-8') { + return iconvLite.encode(str, to); + } else { + return iconvLite.encode(iconvLite.decode(str, from), to); + } +} + +/** + * Converts charset name if needed + * + * @param {String} name Character set + * @return {String} Character set name + */ +function checkEncoding(name) { + return (name || '') + .toString() + .trim() + .replace(/^latin[\-_]?(\d+)$/i, 'ISO-8859-$1') + .replace(/^win(?:dows)?[\-_]?(\d+)$/i, 'WINDOWS-$1') + .replace(/^utf[\-_]?(\d+)$/i, 'UTF-$1') + .replace(/^ks_c_5601\-1987$/i, 'CP949') + .replace(/^us[\-_]?ascii$/i, 'ASCII') + .toUpperCase(); +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/exponential-backoff/dist/backoff.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/exponential-backoff/dist/backoff.js new file mode 100644 index 0000000000000000000000000000000000000000..6a1b6bd3835ac9c98bf2fce99cc8333ca265acdf --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/exponential-backoff/dist/backoff.js @@ -0,0 +1,124 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __generator = (this && this.__generator) || function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +}; +Object.defineProperty(exports, "__esModule", { value: true }); +var options_1 = require("./options"); +var delay_factory_1 = require("./delay/delay.factory"); +/** + * Executes a function with exponential backoff. + * @param request the function to be executed + * @param options options to customize the backoff behavior + * @returns Promise that resolves to the result of the `request` function + */ +function backOff(request, options) { + if (options === void 0) { options = {}; } + return __awaiter(this, void 0, void 0, function () { + var sanitizedOptions, backOff; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + sanitizedOptions = options_1.getSanitizedOptions(options); + backOff = new BackOff(request, sanitizedOptions); + return [4 /*yield*/, backOff.execute()]; + case 1: return [2 /*return*/, _a.sent()]; + } + }); + }); +} +exports.backOff = backOff; +var BackOff = /** @class */ (function () { + function BackOff(request, options) { + this.request = request; + this.options = options; + this.attemptNumber = 0; + } + BackOff.prototype.execute = function () { + return __awaiter(this, void 0, void 0, function () { + var e_1, shouldRetry; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + if (!!this.attemptLimitReached) return [3 /*break*/, 7]; + _a.label = 1; + case 1: + _a.trys.push([1, 4, , 6]); + return [4 /*yield*/, this.applyDelay()]; + case 2: + _a.sent(); + return [4 /*yield*/, this.request()]; + case 3: return [2 /*return*/, _a.sent()]; + case 4: + e_1 = _a.sent(); + this.attemptNumber++; + return [4 /*yield*/, this.options.retry(e_1, this.attemptNumber)]; + case 5: + shouldRetry = _a.sent(); + if (!shouldRetry || this.attemptLimitReached) { + throw e_1; + } + return [3 /*break*/, 6]; + case 6: return [3 /*break*/, 0]; + case 7: throw new Error("Something went wrong."); + } + }); + }); + }; + Object.defineProperty(BackOff.prototype, "attemptLimitReached", { + get: function () { + return this.attemptNumber >= this.options.numOfAttempts; + }, + enumerable: true, + configurable: true + }); + BackOff.prototype.applyDelay = function () { + return __awaiter(this, void 0, void 0, function () { + var delay; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + delay = delay_factory_1.DelayFactory(this.options, this.attemptNumber); + return [4 /*yield*/, delay.apply()]; + case 1: + _a.sent(); + return [2 /*return*/]; + } + }); + }); + }; + return BackOff; +}()); +//# sourceMappingURL=backoff.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/exponential-backoff/dist/delay/always/always.delay.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/exponential-backoff/dist/delay/always/always.delay.js new file mode 100644 index 0000000000000000000000000000000000000000..40e34071e493d4af777dbdeb7484be958ff3eefa --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/exponential-backoff/dist/delay/always/always.delay.js @@ -0,0 +1,25 @@ +"use strict"; +var __extends = (this && this.__extends) || (function () { + var extendStatics = function (d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; + return extendStatics(d, b); + }; + return function (d, b) { + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; +})(); +Object.defineProperty(exports, "__esModule", { value: true }); +var delay_base_1 = require("../delay.base"); +var AlwaysDelay = /** @class */ (function (_super) { + __extends(AlwaysDelay, _super); + function AlwaysDelay() { + return _super !== null && _super.apply(this, arguments) || this; + } + return AlwaysDelay; +}(delay_base_1.Delay)); +exports.AlwaysDelay = AlwaysDelay; +//# sourceMappingURL=always.delay.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/exponential-backoff/dist/delay/delay.base.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/exponential-backoff/dist/delay/delay.base.js new file mode 100644 index 0000000000000000000000000000000000000000..b146c2fa62041232f933aad7071016e6cb2f37e5 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/exponential-backoff/dist/delay/delay.base.js @@ -0,0 +1,45 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +var jitter_factory_1 = require("../jitter/jitter.factory"); +var Delay = /** @class */ (function () { + function Delay(options) { + this.options = options; + this.attempt = 0; + } + Delay.prototype.apply = function () { + var _this = this; + return new Promise(function (resolve) { return setTimeout(resolve, _this.jitteredDelay); }); + }; + Delay.prototype.setAttemptNumber = function (attempt) { + this.attempt = attempt; + }; + Object.defineProperty(Delay.prototype, "jitteredDelay", { + get: function () { + var jitter = jitter_factory_1.JitterFactory(this.options); + return jitter(this.delay); + }, + enumerable: true, + configurable: true + }); + Object.defineProperty(Delay.prototype, "delay", { + get: function () { + var constant = this.options.startingDelay; + var base = this.options.timeMultiple; + var power = this.numOfDelayedAttempts; + var delay = constant * Math.pow(base, power); + return Math.min(delay, this.options.maxDelay); + }, + enumerable: true, + configurable: true + }); + Object.defineProperty(Delay.prototype, "numOfDelayedAttempts", { + get: function () { + return this.attempt; + }, + enumerable: true, + configurable: true + }); + return Delay; +}()); +exports.Delay = Delay; +//# sourceMappingURL=delay.base.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/exponential-backoff/dist/delay/delay.factory.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/exponential-backoff/dist/delay/delay.factory.js new file mode 100644 index 0000000000000000000000000000000000000000..33008dbfc51c4bfedb493836368ec93829b8e2ae --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/exponential-backoff/dist/delay/delay.factory.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +var skip_first_delay_1 = require("./skip-first/skip-first.delay"); +var always_delay_1 = require("./always/always.delay"); +function DelayFactory(options, attempt) { + var delay = initDelayClass(options); + delay.setAttemptNumber(attempt); + return delay; +} +exports.DelayFactory = DelayFactory; +function initDelayClass(options) { + if (!options.delayFirstAttempt) { + return new skip_first_delay_1.SkipFirstDelay(options); + } + return new always_delay_1.AlwaysDelay(options); +} +//# sourceMappingURL=delay.factory.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/exponential-backoff/dist/delay/delay.interface.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/exponential-backoff/dist/delay/delay.interface.js new file mode 100644 index 0000000000000000000000000000000000000000..6fe2a5a0e9d23246af52056f4a21f71981fb90a3 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/exponential-backoff/dist/delay/delay.interface.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=delay.interface.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/exponential-backoff/dist/delay/skip-first/skip-first.delay.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/exponential-backoff/dist/delay/skip-first/skip-first.delay.js new file mode 100644 index 0000000000000000000000000000000000000000..73f8841dadd0107b77628820395baa87b89c30af --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/exponential-backoff/dist/delay/skip-first/skip-first.delay.js @@ -0,0 +1,82 @@ +"use strict"; +var __extends = (this && this.__extends) || (function () { + var extendStatics = function (d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; + return extendStatics(d, b); + }; + return function (d, b) { + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; +})(); +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __generator = (this && this.__generator) || function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +}; +Object.defineProperty(exports, "__esModule", { value: true }); +var delay_base_1 = require("../delay.base"); +var SkipFirstDelay = /** @class */ (function (_super) { + __extends(SkipFirstDelay, _super); + function SkipFirstDelay() { + return _super !== null && _super.apply(this, arguments) || this; + } + SkipFirstDelay.prototype.apply = function () { + return __awaiter(this, void 0, void 0, function () { + return __generator(this, function (_a) { + return [2 /*return*/, this.isFirstAttempt ? true : _super.prototype.apply.call(this)]; + }); + }); + }; + Object.defineProperty(SkipFirstDelay.prototype, "isFirstAttempt", { + get: function () { + return this.attempt === 0; + }, + enumerable: true, + configurable: true + }); + Object.defineProperty(SkipFirstDelay.prototype, "numOfDelayedAttempts", { + get: function () { + return this.attempt - 1; + }, + enumerable: true, + configurable: true + }); + return SkipFirstDelay; +}(delay_base_1.Delay)); +exports.SkipFirstDelay = SkipFirstDelay; +//# sourceMappingURL=skip-first.delay.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/exponential-backoff/dist/jitter/full/full.jitter.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/exponential-backoff/dist/jitter/full/full.jitter.js new file mode 100644 index 0000000000000000000000000000000000000000..16cee36bb5fa5d1e1cc216bbf19e319168764e3e --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/exponential-backoff/dist/jitter/full/full.jitter.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +function fullJitter(delay) { + var jitteredDelay = Math.random() * delay; + return Math.round(jitteredDelay); +} +exports.fullJitter = fullJitter; +//# sourceMappingURL=full.jitter.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/exponential-backoff/dist/jitter/jitter.factory.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/exponential-backoff/dist/jitter/jitter.factory.js new file mode 100644 index 0000000000000000000000000000000000000000..8aafe45f8fbb04f3841e97b89b985d08801fcbf9 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/exponential-backoff/dist/jitter/jitter.factory.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +var full_jitter_1 = require("./full/full.jitter"); +var no_jitter_1 = require("./no/no.jitter"); +function JitterFactory(options) { + switch (options.jitter) { + case "full": + return full_jitter_1.fullJitter; + case "none": + default: + return no_jitter_1.noJitter; + } +} +exports.JitterFactory = JitterFactory; +//# sourceMappingURL=jitter.factory.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/exponential-backoff/dist/jitter/no/no.jitter.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/exponential-backoff/dist/jitter/no/no.jitter.js new file mode 100644 index 0000000000000000000000000000000000000000..15a40bb2a7bd63960d10e44a7e1b8a52eb90e6ab --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/exponential-backoff/dist/jitter/no/no.jitter.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +function noJitter(delay) { + return delay; +} +exports.noJitter = noJitter; +//# sourceMappingURL=no.jitter.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/exponential-backoff/dist/options.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/exponential-backoff/dist/options.js new file mode 100644 index 0000000000000000000000000000000000000000..1d2ca1705dcfd4c62ed411a61a89bf4d488b078f --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/exponential-backoff/dist/options.js @@ -0,0 +1,31 @@ +"use strict"; +var __assign = (this && this.__assign) || function () { + __assign = Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; + }; + return __assign.apply(this, arguments); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +var defaultOptions = { + delayFirstAttempt: false, + jitter: "none", + maxDelay: Infinity, + numOfAttempts: 10, + retry: function () { return true; }, + startingDelay: 100, + timeMultiple: 2 +}; +function getSanitizedOptions(options) { + var sanitized = __assign(__assign({}, defaultOptions), options); + if (sanitized.numOfAttempts < 1) { + sanitized.numOfAttempts = 1; + } + return sanitized; +} +exports.getSanitizedOptions = getSanitizedOptions; +//# sourceMappingURL=options.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/fastest-levenshtein/LICENSE.md b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/fastest-levenshtein/LICENSE.md new file mode 100644 index 0000000000000000000000000000000000000000..0e62da309fa1ebd816df676419cfd7c2f454390e --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/fastest-levenshtein/LICENSE.md @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Kasper Unn Weihe + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/fastest-levenshtein/bench.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/fastest-levenshtein/bench.js new file mode 100644 index 0000000000000000000000000000000000000000..1fd420bd737e8d53ec5322e312773a807e63a941 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/fastest-levenshtein/bench.js @@ -0,0 +1,96 @@ +"use strict"; +exports.__esModule = true; +/* eslint-disable @typescript-eslint/no-var-requires */ +/* eslint-disable no-console */ +var Benchmark = require("benchmark"); +var mod_js_1 = require("./mod.js"); +var fast_levenshtein_1 = require("fast-levenshtein"); +var fs = require("fs"); +var jslevenshtein = require("js-levenshtein"); +var leven = require("leven"); +var levenshteinEditDistance = require("levenshtein-edit-distance"); +var suite = new Benchmark.Suite(); +var randomstring = function (length) { + var result = ""; + var characters = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; + var charactersLength = characters.length; + for (var i = 0; i < length; i++) { + result += characters.charAt(Math.floor(Math.random() * charactersLength)); + } + return result; +}; +var randomstringArr = function (stringSize, arraySize) { + var i = 0; + var arr = []; + for (i = 0; i < arraySize; i++) { + arr.push(randomstring(stringSize)); + } + return arr; +}; +var arrSize = 1000; +if (!fs.existsSync("data.json")) { + var data_1 = [ + randomstringArr(4, arrSize), + randomstringArr(8, arrSize), + randomstringArr(16, arrSize), + randomstringArr(32, arrSize), + randomstringArr(64, arrSize), + randomstringArr(128, arrSize), + randomstringArr(256, arrSize), + randomstringArr(512, arrSize), + randomstringArr(1024, arrSize), + ]; + fs.writeFileSync("data.json", JSON.stringify(data_1)); +} +var data = JSON.parse(fs.readFileSync("data.json", "utf8")); +var _loop_1 = function (i) { + var datapick = data[i]; + if (process.argv[2] !== "no") { + suite + .add("".concat(i, " - js-levenshtein"), function () { + for (var j = 0; j < arrSize - 1; j += 2) { + jslevenshtein(datapick[j], datapick[j + 1]); + } + }) + .add("".concat(i, " - leven"), function () { + for (var j = 0; j < arrSize - 1; j += 2) { + leven(datapick[j], datapick[j + 1]); + } + }) + .add("".concat(i, " - fast-levenshtein"), function () { + for (var j = 0; j < arrSize - 1; j += 2) { + (0, fast_levenshtein_1.get)(datapick[j], datapick[j + 1]); + } + }) + .add("".concat(i, " - levenshtein-edit-distance"), function () { + for (var j = 0; j < arrSize - 1; j += 2) { + levenshteinEditDistance(datapick[j], datapick[j + 1]); + } + }); + } + suite.add("".concat(i, " - fastest-levenshtein"), function () { + for (var j = 0; j < arrSize - 1; j += 2) { + (0, mod_js_1.distance)(datapick[j], datapick[j + 1]); + } + }); +}; +// BENCHMARKS +for (var i = 0; i < 9; i++) { + _loop_1(i); +} +var results = new Map(); +suite + .on("cycle", function (event) { + console.log(String(event.target)); + if (results.has(event.target.name[0])) { + results.get(event.target.name[0]).push(event.target.hz); + } + else { + results.set(event.target.name[0], [event.target.hz]); + } +}) + .on("complete", function () { + console.log(results); +}) + // run async + .run({ async: true }); diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/fastest-levenshtein/mod.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/fastest-levenshtein/mod.js new file mode 100644 index 0000000000000000000000000000000000000000..6bc27459399e6d5a7ef1d991aa8b3764176e36c2 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/fastest-levenshtein/mod.js @@ -0,0 +1,142 @@ +"use strict"; +exports.__esModule = true; +exports.distance = exports.closest = void 0; +var peq = new Uint32Array(0x10000); +var myers_32 = function (a, b) { + var n = a.length; + var m = b.length; + var lst = 1 << (n - 1); + var pv = -1; + var mv = 0; + var sc = n; + var i = n; + while (i--) { + peq[a.charCodeAt(i)] |= 1 << i; + } + for (i = 0; i < m; i++) { + var eq = peq[b.charCodeAt(i)]; + var xv = eq | mv; + eq |= ((eq & pv) + pv) ^ pv; + mv |= ~(eq | pv); + pv &= eq; + if (mv & lst) { + sc++; + } + if (pv & lst) { + sc--; + } + mv = (mv << 1) | 1; + pv = (pv << 1) | ~(xv | mv); + mv &= xv; + } + i = n; + while (i--) { + peq[a.charCodeAt(i)] = 0; + } + return sc; +}; +var myers_x = function (b, a) { + var n = a.length; + var m = b.length; + var mhc = []; + var phc = []; + var hsize = Math.ceil(n / 32); + var vsize = Math.ceil(m / 32); + for (var i = 0; i < hsize; i++) { + phc[i] = -1; + mhc[i] = 0; + } + var j = 0; + for (; j < vsize - 1; j++) { + var mv_1 = 0; + var pv_1 = -1; + var start_1 = j * 32; + var vlen_1 = Math.min(32, m) + start_1; + for (var k = start_1; k < vlen_1; k++) { + peq[b.charCodeAt(k)] |= 1 << k; + } + for (var i = 0; i < n; i++) { + var eq = peq[a.charCodeAt(i)]; + var pb = (phc[(i / 32) | 0] >>> i) & 1; + var mb = (mhc[(i / 32) | 0] >>> i) & 1; + var xv = eq | mv_1; + var xh = ((((eq | mb) & pv_1) + pv_1) ^ pv_1) | eq | mb; + var ph = mv_1 | ~(xh | pv_1); + var mh = pv_1 & xh; + if ((ph >>> 31) ^ pb) { + phc[(i / 32) | 0] ^= 1 << i; + } + if ((mh >>> 31) ^ mb) { + mhc[(i / 32) | 0] ^= 1 << i; + } + ph = (ph << 1) | pb; + mh = (mh << 1) | mb; + pv_1 = mh | ~(xv | ph); + mv_1 = ph & xv; + } + for (var k = start_1; k < vlen_1; k++) { + peq[b.charCodeAt(k)] = 0; + } + } + var mv = 0; + var pv = -1; + var start = j * 32; + var vlen = Math.min(32, m - start) + start; + for (var k = start; k < vlen; k++) { + peq[b.charCodeAt(k)] |= 1 << k; + } + var score = m; + for (var i = 0; i < n; i++) { + var eq = peq[a.charCodeAt(i)]; + var pb = (phc[(i / 32) | 0] >>> i) & 1; + var mb = (mhc[(i / 32) | 0] >>> i) & 1; + var xv = eq | mv; + var xh = ((((eq | mb) & pv) + pv) ^ pv) | eq | mb; + var ph = mv | ~(xh | pv); + var mh = pv & xh; + score += (ph >>> (m - 1)) & 1; + score -= (mh >>> (m - 1)) & 1; + if ((ph >>> 31) ^ pb) { + phc[(i / 32) | 0] ^= 1 << i; + } + if ((mh >>> 31) ^ mb) { + mhc[(i / 32) | 0] ^= 1 << i; + } + ph = (ph << 1) | pb; + mh = (mh << 1) | mb; + pv = mh | ~(xv | ph); + mv = ph & xv; + } + for (var k = start; k < vlen; k++) { + peq[b.charCodeAt(k)] = 0; + } + return score; +}; +var distance = function (a, b) { + if (a.length < b.length) { + var tmp = b; + b = a; + a = tmp; + } + if (b.length === 0) { + return a.length; + } + if (a.length <= 32) { + return myers_32(a, b); + } + return myers_x(a, b); +}; +exports.distance = distance; +var closest = function (str, arr) { + var min_distance = Infinity; + var min_index = 0; + for (var i = 0; i < arr.length; i++) { + var dist = distance(str, arr[i]); + if (dist < min_distance) { + min_distance = dist; + min_index = i; + } + } + return arr[min_index]; +}; +exports.closest = closest; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/fastest-levenshtein/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/fastest-levenshtein/package.json new file mode 100644 index 0000000000000000000000000000000000000000..c395b852d5d927e0d37771a67cb4814f7ef71e72 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/fastest-levenshtein/package.json @@ -0,0 +1,72 @@ +{ + "name": "fastest-levenshtein", + "version": "1.0.16", + "description": "Fastest Levenshtein distance implementation in JS.", + "main": "mod.js", + "types": "mod.d.ts", + "module": "./esm/mod.js", + "repository": { + "type": "git", + "url": "git+https://github.com/ka-weihe/fastest-levenshtein.git" + }, + "keywords": [ + "levenshtein", + "distance", + "fast", + "fastest", + "edit", + "string", + "similarity", + "algorithm", + "match", + "comparison", + "fuzzy", + "search", + "string", + "matching", + "similar", + "node", + "difference" + ], + "author": "Kasper U. Weihe", + "license": "MIT", + "bugs": { + "url": "https://github.com/ka-weihe/fastest-levenshtein/issues" + }, + "homepage": "https://github.com/ka-weihe/fastest-levenshtein#README", + "scripts": { + "build": "tsc mod.ts --declaration", + "build:esm": "tsc --declaration -p tsconfig.esm.json", + "prepare": "npm run build && npm run build:esm", + "bench": "npm run build && tsc bench.ts && node bench.js", + "test": "npm run build && tsc test.ts && jest test.js", + "test:coverage": "npm run build && jest --coverage", + "test:coveralls": "npm run build && jest --coverage --coverageReporters=text-lcov | coveralls" + }, + "devDependencies": { + "@types/benchmark": "^1.0.33", + "@types/jest": "^26.0.15", + "@typescript-eslint/eslint-plugin": "^4.7.0", + "@typescript-eslint/parser": "^4.7.0", + "benchmark": "^2.1.4", + "coveralls": "^3.1.0", + "eslint": "^7.13.0", + "eslint-config-node": "^4.1.0", + "eslint-config-prettier": "^6.15.0", + "eslint-plugin-import": "^2.22.1", + "eslint-plugin-node": "^11.1.0", + "eslint-plugin-prettier": "^3.1.4", + "fast-levenshtein": "^2.0.6", + "jest": "^26.6.3", + "js-levenshtein": "^1.1.6", + "leven": "^3.1.0", + "levenshtein-edit-distance": "^2.0.5", + "natural": "^2.1.5", + "prettier": "^2.1.2", + "talisman": "^1.1.3", + "typescript": "^4.0.5" + }, + "engines": { + "node": ">= 4.9.1" + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/fastest-levenshtein/test.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/fastest-levenshtein/test.js new file mode 100644 index 0000000000000000000000000000000000000000..475063390a81b25a466703ba94a9df4a3e9cd2d7 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/fastest-levenshtein/test.js @@ -0,0 +1,55 @@ +var _a = require("./mod.js"), closest = _a.closest, distance = _a.distance; +var levenshtein = function (a, b) { + if (a.length === 0) { + return b.length; + } + if (b.length === 0) { + return a.length; + } + if (a.length > b.length) { + var tmp = a; + a = b; + b = tmp; + } + var row = []; + for (var i = 0; i <= a.length; i++) { + row[i] = i; + } + for (var i = 1; i <= b.length; i++) { + var prev = i; + for (var j = 1; j <= a.length; j++) { + var val = 0; + if (b.charAt(i - 1) === a.charAt(j - 1)) { + val = row[j - 1]; + } + else { + val = Math.min(row[j - 1] + 1, prev + 1, row[j] + 1); + } + row[j - 1] = prev; + prev = val; + } + row[a.length] = prev; + } + return row[a.length]; +}; +var makeid = function (length) { + var result = ""; + var characters = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; + var charactersLength = characters.length; + for (var i = 0; i < length; i++) { + result += characters.charAt(Math.floor(Math.random() * charactersLength)); + } + return result; +}; +for (var i = 0; i < 10000; i++) { + var rnd_num1 = (Math.random() * 1000) | 0; + var rnd_num2 = (Math.random() * 1000) | 0; + var rnd_string1 = makeid(rnd_num1); + var rnd_string2 = makeid(rnd_num2); + var actual = distance(rnd_string1, rnd_string2); + var expected = levenshtein(rnd_string1, rnd_string2); + console.log(i); + if (actual !== expected) { + console.log("fail"); + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/foreground-child/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/foreground-child/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..2d80720fe669c96dd0cfec56a90939666086e701 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/foreground-child/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2015-2023 Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/foreground-child/dist/commonjs/all-signals.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/foreground-child/dist/commonjs/all-signals.js new file mode 100644 index 0000000000000000000000000000000000000000..b5ee4638bb567f671e7e011134126e54540e52a3 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/foreground-child/dist/commonjs/all-signals.js @@ -0,0 +1,58 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.allSignals = void 0; +const node_constants_1 = __importDefault(require("node:constants")); +exports.allSignals = +// this is the full list of signals that Node will let us do anything with +Object.keys(node_constants_1.default).filter(k => k.startsWith('SIG') && + // https://github.com/tapjs/signal-exit/issues/21 + k !== 'SIGPROF' && + // no sense trying to listen for SIGKILL, it's impossible + k !== 'SIGKILL'); +// These are some obscure signals that are reported by kill -l +// on macOS, Linux, or Windows, but which don't have any mapping +// in Node.js. No sense trying if they're just going to throw +// every time on every platform. +// +// 'SIGEMT', +// 'SIGLOST', +// 'SIGPOLL', +// 'SIGRTMAX', +// 'SIGRTMAX-1', +// 'SIGRTMAX-10', +// 'SIGRTMAX-11', +// 'SIGRTMAX-12', +// 'SIGRTMAX-13', +// 'SIGRTMAX-14', +// 'SIGRTMAX-15', +// 'SIGRTMAX-2', +// 'SIGRTMAX-3', +// 'SIGRTMAX-4', +// 'SIGRTMAX-5', +// 'SIGRTMAX-6', +// 'SIGRTMAX-7', +// 'SIGRTMAX-8', +// 'SIGRTMAX-9', +// 'SIGRTMIN', +// 'SIGRTMIN+1', +// 'SIGRTMIN+10', +// 'SIGRTMIN+11', +// 'SIGRTMIN+12', +// 'SIGRTMIN+13', +// 'SIGRTMIN+14', +// 'SIGRTMIN+15', +// 'SIGRTMIN+16', +// 'SIGRTMIN+2', +// 'SIGRTMIN+3', +// 'SIGRTMIN+4', +// 'SIGRTMIN+5', +// 'SIGRTMIN+6', +// 'SIGRTMIN+7', +// 'SIGRTMIN+8', +// 'SIGRTMIN+9', +// 'SIGSTKFLT', +// 'SIGUNUSED', +//# sourceMappingURL=all-signals.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/foreground-child/dist/commonjs/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/foreground-child/dist/commonjs/index.js new file mode 100644 index 0000000000000000000000000000000000000000..6db65c65dca62d2e3072b3fe5b1ec07ceaf64cf7 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/foreground-child/dist/commonjs/index.js @@ -0,0 +1,123 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.normalizeFgArgs = void 0; +exports.foregroundChild = foregroundChild; +const child_process_1 = require("child_process"); +const cross_spawn_1 = __importDefault(require("cross-spawn")); +const signal_exit_1 = require("signal-exit"); +const proxy_signals_js_1 = require("./proxy-signals.js"); +const watchdog_js_1 = require("./watchdog.js"); +/* c8 ignore start */ +const spawn = process?.platform === 'win32' ? cross_spawn_1.default : child_process_1.spawn; +/** + * Normalizes the arguments passed to `foregroundChild`. + * + * Exposed for testing. + * + * @internal + */ +const normalizeFgArgs = (fgArgs) => { + let [program, args = [], spawnOpts = {}, cleanup = () => { }] = fgArgs; + if (typeof args === 'function') { + cleanup = args; + spawnOpts = {}; + args = []; + } + else if (!!args && typeof args === 'object' && !Array.isArray(args)) { + if (typeof spawnOpts === 'function') + cleanup = spawnOpts; + spawnOpts = args; + args = []; + } + else if (typeof spawnOpts === 'function') { + cleanup = spawnOpts; + spawnOpts = {}; + } + if (Array.isArray(program)) { + const [pp, ...pa] = program; + program = pp; + args = pa; + } + return [program, args, { ...spawnOpts }, cleanup]; +}; +exports.normalizeFgArgs = normalizeFgArgs; +function foregroundChild(...fgArgs) { + const [program, args, spawnOpts, cleanup] = (0, exports.normalizeFgArgs)(fgArgs); + spawnOpts.stdio = [0, 1, 2]; + if (process.send) { + spawnOpts.stdio.push('ipc'); + } + const child = spawn(program, args, spawnOpts); + const childHangup = () => { + try { + child.kill('SIGHUP'); + /* c8 ignore start */ + } + catch (_) { + // SIGHUP is weird on windows + child.kill('SIGTERM'); + } + /* c8 ignore stop */ + }; + const removeOnExit = (0, signal_exit_1.onExit)(childHangup); + (0, proxy_signals_js_1.proxySignals)(child); + const dog = (0, watchdog_js_1.watchdog)(child); + let done = false; + child.on('close', async (code, signal) => { + /* c8 ignore start */ + if (done) + return; + /* c8 ignore stop */ + done = true; + const result = cleanup(code, signal, { + watchdogPid: dog.pid, + }); + const res = isPromise(result) ? await result : result; + removeOnExit(); + if (res === false) + return; + else if (typeof res === 'string') { + signal = res; + code = null; + } + else if (typeof res === 'number') { + code = res; + signal = null; + } + if (signal) { + // If there is nothing else keeping the event loop alive, + // then there's a race between a graceful exit and getting + // the signal to this process. Put this timeout here to + // make sure we're still alive to get the signal, and thus + // exit with the intended signal code. + /* istanbul ignore next */ + setTimeout(() => { }, 2000); + try { + process.kill(process.pid, signal); + /* c8 ignore start */ + } + catch (_) { + process.kill(process.pid, 'SIGTERM'); + } + /* c8 ignore stop */ + } + else { + process.exit(code || 0); + } + }); + if (process.send) { + process.removeAllListeners('message'); + child.on('message', (message, sendHandle) => { + process.send?.(message, sendHandle); + }); + process.on('message', (message, sendHandle) => { + child.send(message, sendHandle); + }); + } + return child; +} +const isPromise = (o) => !!o && typeof o === 'object' && typeof o.then === 'function'; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/foreground-child/dist/commonjs/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/foreground-child/dist/commonjs/package.json new file mode 100644 index 0000000000000000000000000000000000000000..5bbefffbabee392d1855491b84dc0a716b6a3bf2 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/foreground-child/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/foreground-child/dist/commonjs/proxy-signals.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/foreground-child/dist/commonjs/proxy-signals.js new file mode 100644 index 0000000000000000000000000000000000000000..3913e7b45bce2dcdfba034878a05b3481aa3c7af --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/foreground-child/dist/commonjs/proxy-signals.js @@ -0,0 +1,38 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.proxySignals = void 0; +const all_signals_js_1 = require("./all-signals.js"); +/** + * Starts forwarding signals to `child` through `parent`. + */ +const proxySignals = (child) => { + const listeners = new Map(); + for (const sig of all_signals_js_1.allSignals) { + const listener = () => { + // some signals can only be received, not sent + try { + child.kill(sig); + /* c8 ignore start */ + } + catch (_) { } + /* c8 ignore stop */ + }; + try { + // if it's a signal this system doesn't recognize, skip it + process.on(sig, listener); + listeners.set(sig, listener); + /* c8 ignore start */ + } + catch (_) { } + /* c8 ignore stop */ + } + const unproxy = () => { + for (const [sig, listener] of listeners) { + process.removeListener(sig, listener); + } + }; + child.on('exit', unproxy); + return unproxy; +}; +exports.proxySignals = proxySignals; +//# sourceMappingURL=proxy-signals.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/foreground-child/dist/commonjs/watchdog.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/foreground-child/dist/commonjs/watchdog.js new file mode 100644 index 0000000000000000000000000000000000000000..514e234c2a0edfb42109464891567fb7980ee6d0 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/foreground-child/dist/commonjs/watchdog.js @@ -0,0 +1,50 @@ +"use strict"; +// this spawns a child process that listens for SIGHUP when the +// parent process exits, and after 200ms, sends a SIGKILL to the +// child, in case it did not terminate. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.watchdog = void 0; +const child_process_1 = require("child_process"); +const watchdogCode = String.raw ` +const pid = parseInt(process.argv[1], 10) +process.title = 'node (foreground-child watchdog pid=' + pid + ')' +if (!isNaN(pid)) { + let barked = false + // keepalive + const interval = setInterval(() => {}, 60000) + const bark = () => { + clearInterval(interval) + if (barked) return + barked = true + process.removeListener('SIGHUP', bark) + setTimeout(() => { + try { + process.kill(pid, 'SIGKILL') + setTimeout(() => process.exit(), 200) + } catch (_) {} + }, 500) + }) + process.on('SIGHUP', bark) +} +`; +/** + * Pass in a ChildProcess, and this will spawn a watchdog process that + * will make sure it exits if the parent does, thus preventing any + * dangling detached zombie processes. + * + * If the child ends before the parent, then the watchdog will terminate. + */ +const watchdog = (child) => { + let dogExited = false; + const dog = (0, child_process_1.spawn)(process.execPath, ['-e', watchdogCode, String(child.pid)], { + stdio: 'ignore', + }); + dog.on('exit', () => (dogExited = true)); + child.on('exit', () => { + if (!dogExited) + dog.kill('SIGKILL'); + }); + return dog; +}; +exports.watchdog = watchdog; +//# sourceMappingURL=watchdog.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/foreground-child/dist/esm/all-signals.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/foreground-child/dist/esm/all-signals.js new file mode 100644 index 0000000000000000000000000000000000000000..6b24993d22fea4d3a9a17648e552ade5ee1b9818 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/foreground-child/dist/esm/all-signals.js @@ -0,0 +1,52 @@ +import constants from 'node:constants'; +export const allSignals = +// this is the full list of signals that Node will let us do anything with +Object.keys(constants).filter(k => k.startsWith('SIG') && + // https://github.com/tapjs/signal-exit/issues/21 + k !== 'SIGPROF' && + // no sense trying to listen for SIGKILL, it's impossible + k !== 'SIGKILL'); +// These are some obscure signals that are reported by kill -l +// on macOS, Linux, or Windows, but which don't have any mapping +// in Node.js. No sense trying if they're just going to throw +// every time on every platform. +// +// 'SIGEMT', +// 'SIGLOST', +// 'SIGPOLL', +// 'SIGRTMAX', +// 'SIGRTMAX-1', +// 'SIGRTMAX-10', +// 'SIGRTMAX-11', +// 'SIGRTMAX-12', +// 'SIGRTMAX-13', +// 'SIGRTMAX-14', +// 'SIGRTMAX-15', +// 'SIGRTMAX-2', +// 'SIGRTMAX-3', +// 'SIGRTMAX-4', +// 'SIGRTMAX-5', +// 'SIGRTMAX-6', +// 'SIGRTMAX-7', +// 'SIGRTMAX-8', +// 'SIGRTMAX-9', +// 'SIGRTMIN', +// 'SIGRTMIN+1', +// 'SIGRTMIN+10', +// 'SIGRTMIN+11', +// 'SIGRTMIN+12', +// 'SIGRTMIN+13', +// 'SIGRTMIN+14', +// 'SIGRTMIN+15', +// 'SIGRTMIN+16', +// 'SIGRTMIN+2', +// 'SIGRTMIN+3', +// 'SIGRTMIN+4', +// 'SIGRTMIN+5', +// 'SIGRTMIN+6', +// 'SIGRTMIN+7', +// 'SIGRTMIN+8', +// 'SIGRTMIN+9', +// 'SIGSTKFLT', +// 'SIGUNUSED', +//# sourceMappingURL=all-signals.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/foreground-child/dist/esm/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/foreground-child/dist/esm/index.js new file mode 100644 index 0000000000000000000000000000000000000000..6266b5848cceda255c5803c49d7bea5113139f87 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/foreground-child/dist/esm/index.js @@ -0,0 +1,115 @@ +import { spawn as nodeSpawn, } from 'child_process'; +import crossSpawn from 'cross-spawn'; +import { onExit } from 'signal-exit'; +import { proxySignals } from './proxy-signals.js'; +import { watchdog } from './watchdog.js'; +/* c8 ignore start */ +const spawn = process?.platform === 'win32' ? crossSpawn : nodeSpawn; +/** + * Normalizes the arguments passed to `foregroundChild`. + * + * Exposed for testing. + * + * @internal + */ +export const normalizeFgArgs = (fgArgs) => { + let [program, args = [], spawnOpts = {}, cleanup = () => { }] = fgArgs; + if (typeof args === 'function') { + cleanup = args; + spawnOpts = {}; + args = []; + } + else if (!!args && typeof args === 'object' && !Array.isArray(args)) { + if (typeof spawnOpts === 'function') + cleanup = spawnOpts; + spawnOpts = args; + args = []; + } + else if (typeof spawnOpts === 'function') { + cleanup = spawnOpts; + spawnOpts = {}; + } + if (Array.isArray(program)) { + const [pp, ...pa] = program; + program = pp; + args = pa; + } + return [program, args, { ...spawnOpts }, cleanup]; +}; +export function foregroundChild(...fgArgs) { + const [program, args, spawnOpts, cleanup] = normalizeFgArgs(fgArgs); + spawnOpts.stdio = [0, 1, 2]; + if (process.send) { + spawnOpts.stdio.push('ipc'); + } + const child = spawn(program, args, spawnOpts); + const childHangup = () => { + try { + child.kill('SIGHUP'); + /* c8 ignore start */ + } + catch (_) { + // SIGHUP is weird on windows + child.kill('SIGTERM'); + } + /* c8 ignore stop */ + }; + const removeOnExit = onExit(childHangup); + proxySignals(child); + const dog = watchdog(child); + let done = false; + child.on('close', async (code, signal) => { + /* c8 ignore start */ + if (done) + return; + /* c8 ignore stop */ + done = true; + const result = cleanup(code, signal, { + watchdogPid: dog.pid, + }); + const res = isPromise(result) ? await result : result; + removeOnExit(); + if (res === false) + return; + else if (typeof res === 'string') { + signal = res; + code = null; + } + else if (typeof res === 'number') { + code = res; + signal = null; + } + if (signal) { + // If there is nothing else keeping the event loop alive, + // then there's a race between a graceful exit and getting + // the signal to this process. Put this timeout here to + // make sure we're still alive to get the signal, and thus + // exit with the intended signal code. + /* istanbul ignore next */ + setTimeout(() => { }, 2000); + try { + process.kill(process.pid, signal); + /* c8 ignore start */ + } + catch (_) { + process.kill(process.pid, 'SIGTERM'); + } + /* c8 ignore stop */ + } + else { + process.exit(code || 0); + } + }); + if (process.send) { + process.removeAllListeners('message'); + child.on('message', (message, sendHandle) => { + process.send?.(message, sendHandle); + }); + process.on('message', (message, sendHandle) => { + child.send(message, sendHandle); + }); + } + return child; +} +const isPromise = (o) => !!o && typeof o === 'object' && typeof o.then === 'function'; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/foreground-child/dist/esm/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/foreground-child/dist/esm/package.json new file mode 100644 index 0000000000000000000000000000000000000000..3dbc1ca591c0557e35b6004aeba250e6a70b56e3 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/foreground-child/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/foreground-child/dist/esm/proxy-signals.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/foreground-child/dist/esm/proxy-signals.js new file mode 100644 index 0000000000000000000000000000000000000000..8e1efe3e301d66236a6b5e6c795a9f43872a669f --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/foreground-child/dist/esm/proxy-signals.js @@ -0,0 +1,34 @@ +import { allSignals } from './all-signals.js'; +/** + * Starts forwarding signals to `child` through `parent`. + */ +export const proxySignals = (child) => { + const listeners = new Map(); + for (const sig of allSignals) { + const listener = () => { + // some signals can only be received, not sent + try { + child.kill(sig); + /* c8 ignore start */ + } + catch (_) { } + /* c8 ignore stop */ + }; + try { + // if it's a signal this system doesn't recognize, skip it + process.on(sig, listener); + listeners.set(sig, listener); + /* c8 ignore start */ + } + catch (_) { } + /* c8 ignore stop */ + } + const unproxy = () => { + for (const [sig, listener] of listeners) { + process.removeListener(sig, listener); + } + }; + child.on('exit', unproxy); + return unproxy; +}; +//# sourceMappingURL=proxy-signals.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/foreground-child/dist/esm/watchdog.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/foreground-child/dist/esm/watchdog.js new file mode 100644 index 0000000000000000000000000000000000000000..7aa184ede4f5a0e95243adcf24e037bf86a58f78 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/foreground-child/dist/esm/watchdog.js @@ -0,0 +1,46 @@ +// this spawns a child process that listens for SIGHUP when the +// parent process exits, and after 200ms, sends a SIGKILL to the +// child, in case it did not terminate. +import { spawn } from 'child_process'; +const watchdogCode = String.raw ` +const pid = parseInt(process.argv[1], 10) +process.title = 'node (foreground-child watchdog pid=' + pid + ')' +if (!isNaN(pid)) { + let barked = false + // keepalive + const interval = setInterval(() => {}, 60000) + const bark = () => { + clearInterval(interval) + if (barked) return + barked = true + process.removeListener('SIGHUP', bark) + setTimeout(() => { + try { + process.kill(pid, 'SIGKILL') + setTimeout(() => process.exit(), 200) + } catch (_) {} + }, 500) + }) + process.on('SIGHUP', bark) +} +`; +/** + * Pass in a ChildProcess, and this will spawn a watchdog process that + * will make sure it exits if the parent does, thus preventing any + * dangling detached zombie processes. + * + * If the child ends before the parent, then the watchdog will terminate. + */ +export const watchdog = (child) => { + let dogExited = false; + const dog = spawn(process.execPath, ['-e', watchdogCode, String(child.pid)], { + stdio: 'ignore', + }); + dog.on('exit', () => (dogExited = true)); + child.on('exit', () => { + if (!dogExited) + dog.kill('SIGKILL'); + }); + return dog; +}; +//# sourceMappingURL=watchdog.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/foreground-child/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/foreground-child/package.json new file mode 100644 index 0000000000000000000000000000000000000000..75f5b9969b282b0a269b976afd251ded58be17ed --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/foreground-child/package.json @@ -0,0 +1,106 @@ +{ + "name": "foreground-child", + "version": "3.3.1", + "description": "Run a child as if it's the foreground process. Give it stdio. Exit when it exits.", + "main": "./dist/commonjs/index.js", + "types": "./dist/commonjs/index.d.ts", + "exports": { + "./watchdog": { + "import": { + "types": "./dist/esm/watchdog.d.ts", + "default": "./dist/esm/watchdog.js" + }, + "require": { + "types": "./dist/commonjs/watchdog.d.ts", + "default": "./dist/commonjs/watchdog.js" + } + }, + "./proxy-signals": { + "import": { + "types": "./dist/esm/proxy-signals.d.ts", + "default": "./dist/esm/proxy-signals.js" + }, + "require": { + "types": "./dist/commonjs/proxy-signals.d.ts", + "default": "./dist/commonjs/proxy-signals.js" + } + }, + "./package.json": "./package.json", + ".": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } + }, + "files": [ + "dist" + ], + "engines": { + "node": ">=14" + }, + "dependencies": { + "cross-spawn": "^7.0.6", + "signal-exit": "^4.0.1" + }, + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "prepare": "tshy", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "test": "tap", + "snap": "tap", + "format": "prettier --write . --log-level warn", + "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts" + }, + "prettier": { + "experimentalTernaries": true, + "semi": false, + "printWidth": 75, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "tap": { + "typecheck": true + }, + "repository": { + "type": "git", + "url": "git+https://github.com/tapjs/foreground-child.git" + }, + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "devDependencies": { + "@types/cross-spawn": "^6.0.2", + "@types/node": "^18.15.11", + "@types/tap": "^15.0.8", + "prettier": "^3.3.2", + "tap": "^21.1.0", + "tshy": "^3.0.2", + "typedoc": "^0.24.2", + "typescript": "^5.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "tshy": { + "exports": { + "./watchdog": "./src/watchdog.ts", + "./proxy-signals": "./src/proxy-signals.ts", + "./package.json": "./package.json", + ".": "./src/index.ts" + } + }, + "type": "module", + "module": "./dist/esm/index.js" +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/fs-minipass/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/fs-minipass/lib/index.js new file mode 100644 index 0000000000000000000000000000000000000000..3b84ff661448a023ea7902ea5dd6d63eb4557010 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/fs-minipass/lib/index.js @@ -0,0 +1,443 @@ +'use strict' +const { Minipass } = require('minipass') +const EE = require('events').EventEmitter +const fs = require('fs') + +const writev = fs.writev + +const _autoClose = Symbol('_autoClose') +const _close = Symbol('_close') +const _ended = Symbol('_ended') +const _fd = Symbol('_fd') +const _finished = Symbol('_finished') +const _flags = Symbol('_flags') +const _flush = Symbol('_flush') +const _handleChunk = Symbol('_handleChunk') +const _makeBuf = Symbol('_makeBuf') +const _mode = Symbol('_mode') +const _needDrain = Symbol('_needDrain') +const _onerror = Symbol('_onerror') +const _onopen = Symbol('_onopen') +const _onread = Symbol('_onread') +const _onwrite = Symbol('_onwrite') +const _open = Symbol('_open') +const _path = Symbol('_path') +const _pos = Symbol('_pos') +const _queue = Symbol('_queue') +const _read = Symbol('_read') +const _readSize = Symbol('_readSize') +const _reading = Symbol('_reading') +const _remain = Symbol('_remain') +const _size = Symbol('_size') +const _write = Symbol('_write') +const _writing = Symbol('_writing') +const _defaultFlag = Symbol('_defaultFlag') +const _errored = Symbol('_errored') + +class ReadStream extends Minipass { + constructor (path, opt) { + opt = opt || {} + super(opt) + + this.readable = true + this.writable = false + + if (typeof path !== 'string') { + throw new TypeError('path must be a string') + } + + this[_errored] = false + this[_fd] = typeof opt.fd === 'number' ? opt.fd : null + this[_path] = path + this[_readSize] = opt.readSize || 16 * 1024 * 1024 + this[_reading] = false + this[_size] = typeof opt.size === 'number' ? opt.size : Infinity + this[_remain] = this[_size] + this[_autoClose] = typeof opt.autoClose === 'boolean' ? + opt.autoClose : true + + if (typeof this[_fd] === 'number') { + this[_read]() + } else { + this[_open]() + } + } + + get fd () { + return this[_fd] + } + + get path () { + return this[_path] + } + + write () { + throw new TypeError('this is a readable stream') + } + + end () { + throw new TypeError('this is a readable stream') + } + + [_open] () { + fs.open(this[_path], 'r', (er, fd) => this[_onopen](er, fd)) + } + + [_onopen] (er, fd) { + if (er) { + this[_onerror](er) + } else { + this[_fd] = fd + this.emit('open', fd) + this[_read]() + } + } + + [_makeBuf] () { + return Buffer.allocUnsafe(Math.min(this[_readSize], this[_remain])) + } + + [_read] () { + if (!this[_reading]) { + this[_reading] = true + const buf = this[_makeBuf]() + /* istanbul ignore if */ + if (buf.length === 0) { + return process.nextTick(() => this[_onread](null, 0, buf)) + } + fs.read(this[_fd], buf, 0, buf.length, null, (er, br, b) => + this[_onread](er, br, b)) + } + } + + [_onread] (er, br, buf) { + this[_reading] = false + if (er) { + this[_onerror](er) + } else if (this[_handleChunk](br, buf)) { + this[_read]() + } + } + + [_close] () { + if (this[_autoClose] && typeof this[_fd] === 'number') { + const fd = this[_fd] + this[_fd] = null + fs.close(fd, er => er ? this.emit('error', er) : this.emit('close')) + } + } + + [_onerror] (er) { + this[_reading] = true + this[_close]() + this.emit('error', er) + } + + [_handleChunk] (br, buf) { + let ret = false + // no effect if infinite + this[_remain] -= br + if (br > 0) { + ret = super.write(br < buf.length ? buf.slice(0, br) : buf) + } + + if (br === 0 || this[_remain] <= 0) { + ret = false + this[_close]() + super.end() + } + + return ret + } + + emit (ev, data) { + switch (ev) { + case 'prefinish': + case 'finish': + break + + case 'drain': + if (typeof this[_fd] === 'number') { + this[_read]() + } + break + + case 'error': + if (this[_errored]) { + return + } + this[_errored] = true + return super.emit(ev, data) + + default: + return super.emit(ev, data) + } + } +} + +class ReadStreamSync extends ReadStream { + [_open] () { + let threw = true + try { + this[_onopen](null, fs.openSync(this[_path], 'r')) + threw = false + } finally { + if (threw) { + this[_close]() + } + } + } + + [_read] () { + let threw = true + try { + if (!this[_reading]) { + this[_reading] = true + do { + const buf = this[_makeBuf]() + /* istanbul ignore next */ + const br = buf.length === 0 ? 0 + : fs.readSync(this[_fd], buf, 0, buf.length, null) + if (!this[_handleChunk](br, buf)) { + break + } + } while (true) + this[_reading] = false + } + threw = false + } finally { + if (threw) { + this[_close]() + } + } + } + + [_close] () { + if (this[_autoClose] && typeof this[_fd] === 'number') { + const fd = this[_fd] + this[_fd] = null + fs.closeSync(fd) + this.emit('close') + } + } +} + +class WriteStream extends EE { + constructor (path, opt) { + opt = opt || {} + super(opt) + this.readable = false + this.writable = true + this[_errored] = false + this[_writing] = false + this[_ended] = false + this[_needDrain] = false + this[_queue] = [] + this[_path] = path + this[_fd] = typeof opt.fd === 'number' ? opt.fd : null + this[_mode] = opt.mode === undefined ? 0o666 : opt.mode + this[_pos] = typeof opt.start === 'number' ? opt.start : null + this[_autoClose] = typeof opt.autoClose === 'boolean' ? + opt.autoClose : true + + // truncating makes no sense when writing into the middle + const defaultFlag = this[_pos] !== null ? 'r+' : 'w' + this[_defaultFlag] = opt.flags === undefined + this[_flags] = this[_defaultFlag] ? defaultFlag : opt.flags + + if (this[_fd] === null) { + this[_open]() + } + } + + emit (ev, data) { + if (ev === 'error') { + if (this[_errored]) { + return + } + this[_errored] = true + } + return super.emit(ev, data) + } + + get fd () { + return this[_fd] + } + + get path () { + return this[_path] + } + + [_onerror] (er) { + this[_close]() + this[_writing] = true + this.emit('error', er) + } + + [_open] () { + fs.open(this[_path], this[_flags], this[_mode], + (er, fd) => this[_onopen](er, fd)) + } + + [_onopen] (er, fd) { + if (this[_defaultFlag] && + this[_flags] === 'r+' && + er && er.code === 'ENOENT') { + this[_flags] = 'w' + this[_open]() + } else if (er) { + this[_onerror](er) + } else { + this[_fd] = fd + this.emit('open', fd) + if (!this[_writing]) { + this[_flush]() + } + } + } + + end (buf, enc) { + if (buf) { + this.write(buf, enc) + } + + this[_ended] = true + + // synthetic after-write logic, where drain/finish live + if (!this[_writing] && !this[_queue].length && + typeof this[_fd] === 'number') { + this[_onwrite](null, 0) + } + return this + } + + write (buf, enc) { + if (typeof buf === 'string') { + buf = Buffer.from(buf, enc) + } + + if (this[_ended]) { + this.emit('error', new Error('write() after end()')) + return false + } + + if (this[_fd] === null || this[_writing] || this[_queue].length) { + this[_queue].push(buf) + this[_needDrain] = true + return false + } + + this[_writing] = true + this[_write](buf) + return true + } + + [_write] (buf) { + fs.write(this[_fd], buf, 0, buf.length, this[_pos], (er, bw) => + this[_onwrite](er, bw)) + } + + [_onwrite] (er, bw) { + if (er) { + this[_onerror](er) + } else { + if (this[_pos] !== null) { + this[_pos] += bw + } + if (this[_queue].length) { + this[_flush]() + } else { + this[_writing] = false + + if (this[_ended] && !this[_finished]) { + this[_finished] = true + this[_close]() + this.emit('finish') + } else if (this[_needDrain]) { + this[_needDrain] = false + this.emit('drain') + } + } + } + } + + [_flush] () { + if (this[_queue].length === 0) { + if (this[_ended]) { + this[_onwrite](null, 0) + } + } else if (this[_queue].length === 1) { + this[_write](this[_queue].pop()) + } else { + const iovec = this[_queue] + this[_queue] = [] + writev(this[_fd], iovec, this[_pos], + (er, bw) => this[_onwrite](er, bw)) + } + } + + [_close] () { + if (this[_autoClose] && typeof this[_fd] === 'number') { + const fd = this[_fd] + this[_fd] = null + fs.close(fd, er => er ? this.emit('error', er) : this.emit('close')) + } + } +} + +class WriteStreamSync extends WriteStream { + [_open] () { + let fd + // only wrap in a try{} block if we know we'll retry, to avoid + // the rethrow obscuring the error's source frame in most cases. + if (this[_defaultFlag] && this[_flags] === 'r+') { + try { + fd = fs.openSync(this[_path], this[_flags], this[_mode]) + } catch (er) { + if (er.code === 'ENOENT') { + this[_flags] = 'w' + return this[_open]() + } else { + throw er + } + } + } else { + fd = fs.openSync(this[_path], this[_flags], this[_mode]) + } + + this[_onopen](null, fd) + } + + [_close] () { + if (this[_autoClose] && typeof this[_fd] === 'number') { + const fd = this[_fd] + this[_fd] = null + fs.closeSync(fd) + this.emit('close') + } + } + + [_write] (buf) { + // throw the original, but try to close if it fails + let threw = true + try { + this[_onwrite](null, + fs.writeSync(this[_fd], buf, 0, buf.length, this[_pos])) + threw = false + } finally { + if (threw) { + try { + this[_close]() + } catch { + // ok error + } + } + } + } +} + +exports.ReadStream = ReadStream +exports.ReadStreamSync = ReadStreamSync + +exports.WriteStream = WriteStream +exports.WriteStreamSync = WriteStreamSync diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/glob.d.ts b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/glob.d.ts new file mode 100644 index 0000000000000000000000000000000000000000..314ad1f5ccd3ccdadff2f3cc5bba5136db5a0d03 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/glob.d.ts @@ -0,0 +1,388 @@ +import { Minimatch } from 'minimatch'; +import { Minipass } from 'minipass'; +import { FSOption, Path, PathScurry } from 'path-scurry'; +import { IgnoreLike } from './ignore.js'; +import { Pattern } from './pattern.js'; +export type MatchSet = Minimatch['set']; +export type GlobParts = Exclude; +/** + * A `GlobOptions` object may be provided to any of the exported methods, and + * must be provided to the `Glob` constructor. + * + * All options are optional, boolean, and false by default, unless otherwise + * noted. + * + * All resolved options are added to the Glob object as properties. + * + * If you are running many `glob` operations, you can pass a Glob object as the + * `options` argument to a subsequent operation to share the previously loaded + * cache. + */ +export interface GlobOptions { + /** + * Set to `true` to always receive absolute paths for + * matched files. Set to `false` to always return relative paths. + * + * When this option is not set, absolute paths are returned for patterns + * that are absolute, and otherwise paths are returned that are relative + * to the `cwd` setting. + * + * This does _not_ make an extra system call to get + * the realpath, it only does string path resolution. + * + * Conflicts with {@link withFileTypes} + */ + absolute?: boolean; + /** + * Set to false to enable {@link windowsPathsNoEscape} + * + * @deprecated + */ + allowWindowsEscape?: boolean; + /** + * The current working directory in which to search. Defaults to + * `process.cwd()`. + * + * May be eiher a string path or a `file://` URL object or string. + */ + cwd?: string | URL; + /** + * Include `.dot` files in normal matches and `globstar` + * matches. Note that an explicit dot in a portion of the pattern + * will always match dot files. + */ + dot?: boolean; + /** + * Prepend all relative path strings with `./` (or `.\` on Windows). + * + * Without this option, returned relative paths are "bare", so instead of + * returning `'./foo/bar'`, they are returned as `'foo/bar'`. + * + * Relative patterns starting with `'../'` are not prepended with `./`, even + * if this option is set. + */ + dotRelative?: boolean; + /** + * Follow symlinked directories when expanding `**` + * patterns. This can result in a lot of duplicate references in + * the presence of cyclic links, and make performance quite bad. + * + * By default, a `**` in a pattern will follow 1 symbolic link if + * it is not the first item in the pattern, or none if it is the + * first item in the pattern, following the same behavior as Bash. + */ + follow?: boolean; + /** + * string or string[], or an object with `ignored` and `childrenIgnored` + * methods. + * + * If a string or string[] is provided, then this is treated as a glob + * pattern or array of glob patterns to exclude from matches. To ignore all + * children within a directory, as well as the entry itself, append `'/**'` + * to the ignore pattern. + * + * **Note** `ignore` patterns are _always_ in `dot:true` mode, regardless of + * any other settings. + * + * If an object is provided that has `ignored(path)` and/or + * `childrenIgnored(path)` methods, then these methods will be called to + * determine whether any Path is a match or if its children should be + * traversed, respectively. + */ + ignore?: string | string[] | IgnoreLike; + /** + * Treat brace expansion like `{a,b}` as a "magic" pattern. Has no + * effect if {@link nobrace} is set. + * + * Only has effect on the {@link hasMagic} function. + */ + magicalBraces?: boolean; + /** + * Add a `/` character to directory matches. Note that this requires + * additional stat calls in some cases. + */ + mark?: boolean; + /** + * Perform a basename-only match if the pattern does not contain any slash + * characters. That is, `*.js` would be treated as equivalent to + * `**\/*.js`, matching all js files in all directories. + */ + matchBase?: boolean; + /** + * Limit the directory traversal to a given depth below the cwd. + * Note that this does NOT prevent traversal to sibling folders, + * root patterns, and so on. It only limits the maximum folder depth + * that the walk will descend, relative to the cwd. + */ + maxDepth?: number; + /** + * Do not expand `{a,b}` and `{1..3}` brace sets. + */ + nobrace?: boolean; + /** + * Perform a case-insensitive match. This defaults to `true` on macOS and + * Windows systems, and `false` on all others. + * + * **Note** `nocase` should only be explicitly set when it is + * known that the filesystem's case sensitivity differs from the + * platform default. If set `true` on case-sensitive file + * systems, or `false` on case-insensitive file systems, then the + * walk may return more or less results than expected. + */ + nocase?: boolean; + /** + * Do not match directories, only files. (Note: to match + * _only_ directories, put a `/` at the end of the pattern.) + */ + nodir?: boolean; + /** + * Do not match "extglob" patterns such as `+(a|b)`. + */ + noext?: boolean; + /** + * Do not match `**` against multiple filenames. (Ie, treat it as a normal + * `*` instead.) + * + * Conflicts with {@link matchBase} + */ + noglobstar?: boolean; + /** + * Defaults to value of `process.platform` if available, or `'linux'` if + * not. Setting `platform:'win32'` on non-Windows systems may cause strange + * behavior. + */ + platform?: NodeJS.Platform; + /** + * Set to true to call `fs.realpath` on all of the + * results. In the case of an entry that cannot be resolved, the + * entry is omitted. This incurs a slight performance penalty, of + * course, because of the added system calls. + */ + realpath?: boolean; + /** + * + * A string path resolved against the `cwd` option, which + * is used as the starting point for absolute patterns that start + * with `/`, (but not drive letters or UNC paths on Windows). + * + * Note that this _doesn't_ necessarily limit the walk to the + * `root` directory, and doesn't affect the cwd starting point for + * non-absolute patterns. A pattern containing `..` will still be + * able to traverse out of the root directory, if it is not an + * actual root directory on the filesystem, and any non-absolute + * patterns will be matched in the `cwd`. For example, the + * pattern `/../*` with `{root:'/some/path'}` will return all + * files in `/some`, not all files in `/some/path`. The pattern + * `*` with `{root:'/some/path'}` will return all the entries in + * the cwd, not the entries in `/some/path`. + * + * To start absolute and non-absolute patterns in the same + * path, you can use `{root:''}`. However, be aware that on + * Windows systems, a pattern like `x:/*` or `//host/share/*` will + * _always_ start in the `x:/` or `//host/share` directory, + * regardless of the `root` setting. + */ + root?: string; + /** + * A [PathScurry](http://npm.im/path-scurry) object used + * to traverse the file system. If the `nocase` option is set + * explicitly, then any provided `scurry` object must match this + * setting. + */ + scurry?: PathScurry; + /** + * Call `lstat()` on all entries, whether required or not to determine + * if it's a valid match. When used with {@link withFileTypes}, this means + * that matches will include data such as modified time, permissions, and + * so on. Note that this will incur a performance cost due to the added + * system calls. + */ + stat?: boolean; + /** + * An AbortSignal which will cancel the Glob walk when + * triggered. + */ + signal?: AbortSignal; + /** + * Use `\\` as a path separator _only_, and + * _never_ as an escape character. If set, all `\\` characters are + * replaced with `/` in the pattern. + * + * Note that this makes it **impossible** to match against paths + * containing literal glob pattern characters, but allows matching + * with patterns constructed using `path.join()` and + * `path.resolve()` on Windows platforms, mimicking the (buggy!) + * behavior of Glob v7 and before on Windows. Please use with + * caution, and be mindful of [the caveat below about Windows + * paths](#windows). (For legacy reasons, this is also set if + * `allowWindowsEscape` is set to the exact value `false`.) + */ + windowsPathsNoEscape?: boolean; + /** + * Return [PathScurry](http://npm.im/path-scurry) + * `Path` objects instead of strings. These are similar to a + * NodeJS `Dirent` object, but with additional methods and + * properties. + * + * Conflicts with {@link absolute} + */ + withFileTypes?: boolean; + /** + * An fs implementation to override some or all of the defaults. See + * http://npm.im/path-scurry for details about what can be overridden. + */ + fs?: FSOption; + /** + * Just passed along to Minimatch. Note that this makes all pattern + * matching operations slower and *extremely* noisy. + */ + debug?: boolean; + /** + * Return `/` delimited paths, even on Windows. + * + * On posix systems, this has no effect. But, on Windows, it means that + * paths will be `/` delimited, and absolute paths will be their full + * resolved UNC forms, eg instead of `'C:\\foo\\bar'`, it would return + * `'//?/C:/foo/bar'` + */ + posix?: boolean; + /** + * Do not match any children of any matches. For example, the pattern + * `**\/foo` would match `a/foo`, but not `a/foo/b/foo` in this mode. + * + * This is especially useful for cases like "find all `node_modules` + * folders, but not the ones in `node_modules`". + * + * In order to support this, the `Ignore` implementation must support an + * `add(pattern: string)` method. If using the default `Ignore` class, then + * this is fine, but if this is set to `false`, and a custom `Ignore` is + * provided that does not have an `add()` method, then it will throw an + * error. + * + * **Caveat** It *only* ignores matches that would be a descendant of a + * previous match, and only if that descendant is matched *after* the + * ancestor is encountered. Since the file system walk happens in + * indeterminate order, it's possible that a match will already be added + * before its ancestor, if multiple or braced patterns are used. + * + * For example: + * + * ```ts + * const results = await glob([ + * // likely to match first, since it's just a stat + * 'a/b/c/d/e/f', + * + * // this pattern is more complicated! It must to various readdir() + * // calls and test the results against a regular expression, and that + * // is certainly going to take a little bit longer. + * // + * // So, later on, it encounters a match at 'a/b/c/d/e', but it's too + * // late to ignore a/b/c/d/e/f, because it's already been emitted. + * 'a/[bdf]/?/[a-z]/*', + * ], { includeChildMatches: false }) + * ``` + * + * It's best to only set this to `false` if you can be reasonably sure that + * no components of the pattern will potentially match one another's file + * system descendants, or if the occasional included child entry will not + * cause problems. + * + * @default true + */ + includeChildMatches?: boolean; +} +export type GlobOptionsWithFileTypesTrue = GlobOptions & { + withFileTypes: true; + absolute?: undefined; + mark?: undefined; + posix?: undefined; +}; +export type GlobOptionsWithFileTypesFalse = GlobOptions & { + withFileTypes?: false; +}; +export type GlobOptionsWithFileTypesUnset = GlobOptions & { + withFileTypes?: undefined; +}; +export type Result = Opts extends GlobOptionsWithFileTypesTrue ? Path : Opts extends GlobOptionsWithFileTypesFalse ? string : Opts extends GlobOptionsWithFileTypesUnset ? string : string | Path; +export type Results = Result[]; +export type FileTypes = Opts extends GlobOptionsWithFileTypesTrue ? true : Opts extends GlobOptionsWithFileTypesFalse ? false : Opts extends GlobOptionsWithFileTypesUnset ? false : boolean; +/** + * An object that can perform glob pattern traversals. + */ +export declare class Glob implements GlobOptions { + absolute?: boolean; + cwd: string; + root?: string; + dot: boolean; + dotRelative: boolean; + follow: boolean; + ignore?: string | string[] | IgnoreLike; + magicalBraces: boolean; + mark?: boolean; + matchBase: boolean; + maxDepth: number; + nobrace: boolean; + nocase: boolean; + nodir: boolean; + noext: boolean; + noglobstar: boolean; + pattern: string[]; + platform: NodeJS.Platform; + realpath: boolean; + scurry: PathScurry; + stat: boolean; + signal?: AbortSignal; + windowsPathsNoEscape: boolean; + withFileTypes: FileTypes; + includeChildMatches: boolean; + /** + * The options provided to the constructor. + */ + opts: Opts; + /** + * An array of parsed immutable {@link Pattern} objects. + */ + patterns: Pattern[]; + /** + * All options are stored as properties on the `Glob` object. + * + * See {@link GlobOptions} for full options descriptions. + * + * Note that a previous `Glob` object can be passed as the + * `GlobOptions` to another `Glob` instantiation to re-use settings + * and caches with a new pattern. + * + * Traversal functions can be called multiple times to run the walk + * again. + */ + constructor(pattern: string | string[], opts: Opts); + /** + * Returns a Promise that resolves to the results array. + */ + walk(): Promise>; + /** + * synchronous {@link Glob.walk} + */ + walkSync(): Results; + /** + * Stream results asynchronously. + */ + stream(): Minipass, Result>; + /** + * Stream results synchronously. + */ + streamSync(): Minipass, Result>; + /** + * Default sync iteration function. Returns a Generator that + * iterates over the results. + */ + iterateSync(): Generator, void, void>; + [Symbol.iterator](): Generator, void, void>; + /** + * Default async iteration function. Returns an AsyncGenerator that + * iterates over the results. + */ + iterate(): AsyncGenerator, void, void>; + [Symbol.asyncIterator](): AsyncGenerator, void, void>; +} +//# sourceMappingURL=glob.d.ts.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/glob.d.ts.map b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/glob.d.ts.map new file mode 100644 index 0000000000000000000000000000000000000000..c32dc74c96774177b949cc137befa0edb6489e3f --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/glob.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"glob.d.ts","sourceRoot":"","sources":["../../src/glob.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAoB,MAAM,WAAW,CAAA;AACvD,OAAO,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAA;AAEnC,OAAO,EACL,QAAQ,EACR,IAAI,EACJ,UAAU,EAIX,MAAM,aAAa,CAAA;AACpB,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AACxC,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAA;AAGtC,MAAM,MAAM,QAAQ,GAAG,SAAS,CAAC,KAAK,CAAC,CAAA;AACvC,MAAM,MAAM,SAAS,GAAG,OAAO,CAAC,SAAS,CAAC,WAAW,CAAC,EAAE,SAAS,CAAC,CAAA;AAalE;;;;;;;;;;;;GAYG;AACH,MAAM,WAAW,WAAW;IAC1B;;;;;;;;;;;;OAYG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAA;IAElB;;;;OAIG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAA;IAE5B;;;;;OAKG;IACH,GAAG,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;IAElB;;;;OAIG;IACH,GAAG,CAAC,EAAE,OAAO,CAAA;IAEb;;;;;;;;OAQG;IACH,WAAW,CAAC,EAAE,OAAO,CAAA;IAErB;;;;;;;;OAQG;IACH,MAAM,CAAC,EAAE,OAAO,CAAA;IAEhB;;;;;;;;;;;;;;;;OAgBG;IACH,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,UAAU,CAAA;IAEvC;;;;;OAKG;IACH,aAAa,CAAC,EAAE,OAAO,CAAA;IAEvB;;;OAGG;IACH,IAAI,CAAC,EAAE,OAAO,CAAA;IAEd;;;;OAIG;IACH,SAAS,CAAC,EAAE,OAAO,CAAA;IAEnB;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAA;IAEjB;;OAEG;IACH,OAAO,CAAC,EAAE,OAAO,CAAA;IAEjB;;;;;;;;;OASG;IACH,MAAM,CAAC,EAAE,OAAO,CAAA;IAEhB;;;OAGG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;OAEG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;;;;OAKG;IACH,UAAU,CAAC,EAAE,OAAO,CAAA;IAEpB;;;;OAIG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC,QAAQ,CAAA;IAE1B;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAA;IAElB;;;;;;;;;;;;;;;;;;;;;;OAsBG;IACH,IAAI,CAAC,EAAE,MAAM,CAAA;IAEb;;;;;OAKG;IACH,MAAM,CAAC,EAAE,UAAU,CAAA;IAEnB;;;;;;OAMG;IACH,IAAI,CAAC,EAAE,OAAO,CAAA;IAEd;;;OAGG;IACH,MAAM,CAAC,EAAE,WAAW,CAAA;IAEpB;;;;;;;;;;;;;OAaG;IACH,oBAAoB,CAAC,EAAE,OAAO,CAAA;IAE9B;;;;;;;OAOG;IACH,aAAa,CAAC,EAAE,OAAO,CAAA;IAEvB;;;OAGG;IACH,EAAE,CAAC,EAAE,QAAQ,CAAA;IAEb;;;OAGG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;;;;;;OAOG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OA0CG;IACH,mBAAmB,CAAC,EAAE,OAAO,CAAA;CAC9B;AAED,MAAM,MAAM,4BAA4B,GAAG,WAAW,GAAG;IACvD,aAAa,EAAE,IAAI,CAAA;IAEnB,QAAQ,CAAC,EAAE,SAAS,CAAA;IACpB,IAAI,CAAC,EAAE,SAAS,CAAA;IAChB,KAAK,CAAC,EAAE,SAAS,CAAA;CAClB,CAAA;AAED,MAAM,MAAM,6BAA6B,GAAG,WAAW,GAAG;IACxD,aAAa,CAAC,EAAE,KAAK,CAAA;CACtB,CAAA;AAED,MAAM,MAAM,6BAA6B,GAAG,WAAW,GAAG;IACxD,aAAa,CAAC,EAAE,SAAS,CAAA;CAC1B,CAAA;AAED,MAAM,MAAM,MAAM,CAAC,IAAI,IACrB,IAAI,SAAS,4BAA4B,GAAG,IAAI,GAC9C,IAAI,SAAS,6BAA6B,GAAG,MAAM,GACnD,IAAI,SAAS,6BAA6B,GAAG,MAAM,GACnD,MAAM,GAAG,IAAI,CAAA;AACjB,MAAM,MAAM,OAAO,CAAC,IAAI,IAAI,MAAM,CAAC,IAAI,CAAC,EAAE,CAAA;AAE1C,MAAM,MAAM,SAAS,CAAC,IAAI,IACxB,IAAI,SAAS,4BAA4B,GAAG,IAAI,GAC9C,IAAI,SAAS,6BAA6B,GAAG,KAAK,GAClD,IAAI,SAAS,6BAA6B,GAAG,KAAK,GAClD,OAAO,CAAA;AAEX;;GAEG;AACH,qBAAa,IAAI,CAAC,IAAI,SAAS,WAAW,CAAE,YAAW,WAAW;IAChE,QAAQ,CAAC,EAAE,OAAO,CAAA;IAClB,GAAG,EAAE,MAAM,CAAA;IACX,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,GAAG,EAAE,OAAO,CAAA;IACZ,WAAW,EAAE,OAAO,CAAA;IACpB,MAAM,EAAE,OAAO,CAAA;IACf,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,UAAU,CAAA;IACvC,aAAa,EAAE,OAAO,CAAA;IACtB,IAAI,CAAC,EAAE,OAAO,CAAA;IACd,SAAS,EAAE,OAAO,CAAA;IAClB,QAAQ,EAAE,MAAM,CAAA;IAChB,OAAO,EAAE,OAAO,CAAA;IAChB,MAAM,EAAE,OAAO,CAAA;IACf,KAAK,EAAE,OAAO,CAAA;IACd,KAAK,EAAE,OAAO,CAAA;IACd,UAAU,EAAE,OAAO,CAAA;IACnB,OAAO,EAAE,MAAM,EAAE,CAAA;IACjB,QAAQ,EAAE,MAAM,CAAC,QAAQ,CAAA;IACzB,QAAQ,EAAE,OAAO,CAAA;IACjB,MAAM,EAAE,UAAU,CAAA;IAClB,IAAI,EAAE,OAAO,CAAA;IACb,MAAM,CAAC,EAAE,WAAW,CAAA;IACpB,oBAAoB,EAAE,OAAO,CAAA;IAC7B,aAAa,EAAE,SAAS,CAAC,IAAI,CAAC,CAAA;IAC9B,mBAAmB,EAAE,OAAO,CAAA;IAE5B;;OAEG;IACH,IAAI,EAAE,IAAI,CAAA;IAEV;;OAEG;IACH,QAAQ,EAAE,OAAO,EAAE,CAAA;IAEnB;;;;;;;;;;;OAWG;gBACS,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI;IA2HlD;;OAEG;IACG,IAAI,IAAI,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;IAoBpC;;OAEG;IACH,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;IAgBzB;;OAEG;IACH,MAAM,IAAI,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC;IAc9C;;OAEG;IACH,UAAU,IAAI,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC;IAclD;;;OAGG;IACH,WAAW,IAAI,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC;IAGlD,CAAC,MAAM,CAAC,QAAQ,CAAC;IAIjB;;;OAGG;IACH,OAAO,IAAI,cAAc,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC;IAGnD,CAAC,MAAM,CAAC,aAAa,CAAC;CAGvB"} \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/glob.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/glob.js new file mode 100644 index 0000000000000000000000000000000000000000..e1339bbbcf57f3b158e89ff7c5fb8e21a6179c25 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/glob.js @@ -0,0 +1,247 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Glob = void 0; +const minimatch_1 = require("minimatch"); +const node_url_1 = require("node:url"); +const path_scurry_1 = require("path-scurry"); +const pattern_js_1 = require("./pattern.js"); +const walker_js_1 = require("./walker.js"); +// if no process global, just call it linux. +// so we default to case-sensitive, / separators +const defaultPlatform = (typeof process === 'object' && + process && + typeof process.platform === 'string') ? + process.platform + : 'linux'; +/** + * An object that can perform glob pattern traversals. + */ +class Glob { + absolute; + cwd; + root; + dot; + dotRelative; + follow; + ignore; + magicalBraces; + mark; + matchBase; + maxDepth; + nobrace; + nocase; + nodir; + noext; + noglobstar; + pattern; + platform; + realpath; + scurry; + stat; + signal; + windowsPathsNoEscape; + withFileTypes; + includeChildMatches; + /** + * The options provided to the constructor. + */ + opts; + /** + * An array of parsed immutable {@link Pattern} objects. + */ + patterns; + /** + * All options are stored as properties on the `Glob` object. + * + * See {@link GlobOptions} for full options descriptions. + * + * Note that a previous `Glob` object can be passed as the + * `GlobOptions` to another `Glob` instantiation to re-use settings + * and caches with a new pattern. + * + * Traversal functions can be called multiple times to run the walk + * again. + */ + constructor(pattern, opts) { + /* c8 ignore start */ + if (!opts) + throw new TypeError('glob options required'); + /* c8 ignore stop */ + this.withFileTypes = !!opts.withFileTypes; + this.signal = opts.signal; + this.follow = !!opts.follow; + this.dot = !!opts.dot; + this.dotRelative = !!opts.dotRelative; + this.nodir = !!opts.nodir; + this.mark = !!opts.mark; + if (!opts.cwd) { + this.cwd = ''; + } + else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) { + opts.cwd = (0, node_url_1.fileURLToPath)(opts.cwd); + } + this.cwd = opts.cwd || ''; + this.root = opts.root; + this.magicalBraces = !!opts.magicalBraces; + this.nobrace = !!opts.nobrace; + this.noext = !!opts.noext; + this.realpath = !!opts.realpath; + this.absolute = opts.absolute; + this.includeChildMatches = opts.includeChildMatches !== false; + this.noglobstar = !!opts.noglobstar; + this.matchBase = !!opts.matchBase; + this.maxDepth = + typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity; + this.stat = !!opts.stat; + this.ignore = opts.ignore; + if (this.withFileTypes && this.absolute !== undefined) { + throw new Error('cannot set absolute and withFileTypes:true'); + } + if (typeof pattern === 'string') { + pattern = [pattern]; + } + this.windowsPathsNoEscape = + !!opts.windowsPathsNoEscape || + opts.allowWindowsEscape === + false; + if (this.windowsPathsNoEscape) { + pattern = pattern.map(p => p.replace(/\\/g, '/')); + } + if (this.matchBase) { + if (opts.noglobstar) { + throw new TypeError('base matching requires globstar'); + } + pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`)); + } + this.pattern = pattern; + this.platform = opts.platform || defaultPlatform; + this.opts = { ...opts, platform: this.platform }; + if (opts.scurry) { + this.scurry = opts.scurry; + if (opts.nocase !== undefined && + opts.nocase !== opts.scurry.nocase) { + throw new Error('nocase option contradicts provided scurry option'); + } + } + else { + const Scurry = opts.platform === 'win32' ? path_scurry_1.PathScurryWin32 + : opts.platform === 'darwin' ? path_scurry_1.PathScurryDarwin + : opts.platform ? path_scurry_1.PathScurryPosix + : path_scurry_1.PathScurry; + this.scurry = new Scurry(this.cwd, { + nocase: opts.nocase, + fs: opts.fs, + }); + } + this.nocase = this.scurry.nocase; + // If you do nocase:true on a case-sensitive file system, then + // we need to use regexps instead of strings for non-magic + // path portions, because statting `aBc` won't return results + // for the file `AbC` for example. + const nocaseMagicOnly = this.platform === 'darwin' || this.platform === 'win32'; + const mmo = { + // default nocase based on platform + ...opts, + dot: this.dot, + matchBase: this.matchBase, + nobrace: this.nobrace, + nocase: this.nocase, + nocaseMagicOnly, + nocomment: true, + noext: this.noext, + nonegate: true, + optimizationLevel: 2, + platform: this.platform, + windowsPathsNoEscape: this.windowsPathsNoEscape, + debug: !!this.opts.debug, + }; + const mms = this.pattern.map(p => new minimatch_1.Minimatch(p, mmo)); + const [matchSet, globParts] = mms.reduce((set, m) => { + set[0].push(...m.set); + set[1].push(...m.globParts); + return set; + }, [[], []]); + this.patterns = matchSet.map((set, i) => { + const g = globParts[i]; + /* c8 ignore start */ + if (!g) + throw new Error('invalid pattern object'); + /* c8 ignore stop */ + return new pattern_js_1.Pattern(set, g, 0, this.platform); + }); + } + async walk() { + // Walkers always return array of Path objects, so we just have to + // coerce them into the right shape. It will have already called + // realpath() if the option was set to do so, so we know that's cached. + // start out knowing the cwd, at least + return [ + ...(await new walker_js_1.GlobWalker(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity ? + this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + includeChildMatches: this.includeChildMatches, + }).walk()), + ]; + } + walkSync() { + return [ + ...new walker_js_1.GlobWalker(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity ? + this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + includeChildMatches: this.includeChildMatches, + }).walkSync(), + ]; + } + stream() { + return new walker_js_1.GlobStream(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity ? + this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + includeChildMatches: this.includeChildMatches, + }).stream(); + } + streamSync() { + return new walker_js_1.GlobStream(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity ? + this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + includeChildMatches: this.includeChildMatches, + }).streamSync(); + } + /** + * Default sync iteration function. Returns a Generator that + * iterates over the results. + */ + iterateSync() { + return this.streamSync()[Symbol.iterator](); + } + [Symbol.iterator]() { + return this.iterateSync(); + } + /** + * Default async iteration function. Returns an AsyncGenerator that + * iterates over the results. + */ + iterate() { + return this.stream()[Symbol.asyncIterator](); + } + [Symbol.asyncIterator]() { + return this.iterate(); + } +} +exports.Glob = Glob; +//# sourceMappingURL=glob.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/glob.js.map b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/glob.js.map new file mode 100644 index 0000000000000000000000000000000000000000..551a9fc24f5b56c9f828a778fdca098cf0d5e352 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/glob.js.map @@ -0,0 +1 @@ +{"version":3,"file":"glob.js","sourceRoot":"","sources":["../../src/glob.ts"],"names":[],"mappings":";;;AAAA,yCAAuD;AAEvD,uCAAwC;AACxC,6CAOoB;AAEpB,6CAAsC;AACtC,2CAAoD;AAKpD,4CAA4C;AAC5C,gDAAgD;AAChD,MAAM,eAAe,GACnB,CACE,OAAO,OAAO,KAAK,QAAQ;IAC3B,OAAO;IACP,OAAO,OAAO,CAAC,QAAQ,KAAK,QAAQ,CACrC,CAAC,CAAC;IACD,OAAO,CAAC,QAAQ;IAClB,CAAC,CAAC,OAAO,CAAA;AAyVX;;GAEG;AACH,MAAa,IAAI;IACf,QAAQ,CAAU;IAClB,GAAG,CAAQ;IACX,IAAI,CAAS;IACb,GAAG,CAAS;IACZ,WAAW,CAAS;IACpB,MAAM,CAAS;IACf,MAAM,CAAiC;IACvC,aAAa,CAAS;IACtB,IAAI,CAAU;IACd,SAAS,CAAS;IAClB,QAAQ,CAAQ;IAChB,OAAO,CAAS;IAChB,MAAM,CAAS;IACf,KAAK,CAAS;IACd,KAAK,CAAS;IACd,UAAU,CAAS;IACnB,OAAO,CAAU;IACjB,QAAQ,CAAiB;IACzB,QAAQ,CAAS;IACjB,MAAM,CAAY;IAClB,IAAI,CAAS;IACb,MAAM,CAAc;IACpB,oBAAoB,CAAS;IAC7B,aAAa,CAAiB;IAC9B,mBAAmB,CAAS;IAE5B;;OAEG;IACH,IAAI,CAAM;IAEV;;OAEG;IACH,QAAQ,CAAW;IAEnB;;;;;;;;;;;OAWG;IACH,YAAY,OAA0B,EAAE,IAAU;QAChD,qBAAqB;QACrB,IAAI,CAAC,IAAI;YAAE,MAAM,IAAI,SAAS,CAAC,uBAAuB,CAAC,CAAA;QACvD,oBAAoB;QACpB,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC,IAAI,CAAC,aAAgC,CAAA;QAC5D,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;QACzB,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,IAAI,CAAC,MAAM,CAAA;QAC3B,IAAI,CAAC,GAAG,GAAG,CAAC,CAAC,IAAI,CAAC,GAAG,CAAA;QACrB,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC,IAAI,CAAC,WAAW,CAAA;QACrC,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,IAAI,CAAC,KAAK,CAAA;QACzB,IAAI,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,CAAA;QACvB,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC;YACd,IAAI,CAAC,GAAG,GAAG,EAAE,CAAA;QACf,CAAC;aAAM,IAAI,IAAI,CAAC,GAAG,YAAY,GAAG,IAAI,IAAI,CAAC,GAAG,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE,CAAC;YACrE,IAAI,CAAC,GAAG,GAAG,IAAA,wBAAa,EAAC,IAAI,CAAC,GAAG,CAAC,CAAA;QACpC,CAAC;QACD,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,IAAI,EAAE,CAAA;QACzB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,CAAA;QACrB,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC,IAAI,CAAC,aAAa,CAAA;QACzC,IAAI,CAAC,OAAO,GAAG,CAAC,CAAC,IAAI,CAAC,OAAO,CAAA;QAC7B,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,IAAI,CAAC,KAAK,CAAA;QACzB,IAAI,CAAC,QAAQ,GAAG,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAA;QAC/B,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAA;QAC7B,IAAI,CAAC,mBAAmB,GAAG,IAAI,CAAC,mBAAmB,KAAK,KAAK,CAAA;QAE7D,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC,IAAI,CAAC,UAAU,CAAA;QACnC,IAAI,CAAC,SAAS,GAAG,CAAC,CAAC,IAAI,CAAC,SAAS,CAAA;QACjC,IAAI,CAAC,QAAQ;YACX,OAAO,IAAI,CAAC,QAAQ,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAA;QAC9D,IAAI,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,CAAA;QACvB,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;QAEzB,IAAI,IAAI,CAAC,aAAa,IAAI,IAAI,CAAC,QAAQ,KAAK,SAAS,EAAE,CAAC;YACtD,MAAM,IAAI,KAAK,CAAC,4CAA4C,CAAC,CAAA;QAC/D,CAAC;QAED,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE,CAAC;YAChC,OAAO,GAAG,CAAC,OAAO,CAAC,CAAA;QACrB,CAAC;QAED,IAAI,CAAC,oBAAoB;YACvB,CAAC,CAAC,IAAI,CAAC,oBAAoB;gBAC1B,IAAyC,CAAC,kBAAkB;oBAC3D,KAAK,CAAA;QAET,IAAI,IAAI,CAAC,oBAAoB,EAAE,CAAC;YAC9B,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAA;QACnD,CAAC;QAED,IAAI,IAAI,CAAC,SAAS,EAAE,CAAC;YACnB,IAAI,IAAI,CAAC,UAAU,EAAE,CAAC;gBACpB,MAAM,IAAI,SAAS,CAAC,iCAAiC,CAAC,CAAA;YACxD,CAAC;YACD,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAA;QACjE,CAAC;QAED,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;QAEtB,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,IAAI,eAAe,CAAA;QAChD,IAAI,CAAC,IAAI,GAAG,EAAE,GAAG,IAAI,EAAE,QAAQ,EAAE,IAAI,CAAC,QAAQ,EAAE,CAAA;QAChD,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;YAChB,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;YACzB,IACE,IAAI,CAAC,MAAM,KAAK,SAAS;gBACzB,IAAI,CAAC,MAAM,KAAK,IAAI,CAAC,MAAM,CAAC,MAAM,EAClC,CAAC;gBACD,MAAM,IAAI,KAAK,CAAC,kDAAkD,CAAC,CAAA;YACrE,CAAC;QACH,CAAC;aAAM,CAAC;YACN,MAAM,MAAM,GACV,IAAI,CAAC,QAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,6BAAe;gBAC3C,CAAC,CAAC,IAAI,CAAC,QAAQ,KAAK,QAAQ,CAAC,CAAC,CAAC,8BAAgB;oBAC/C,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,6BAAe;wBACjC,CAAC,CAAC,wBAAU,CAAA;YACd,IAAI,CAAC,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE;gBACjC,MAAM,EAAE,IAAI,CAAC,MAAM;gBACnB,EAAE,EAAE,IAAI,CAAC,EAAE;aACZ,CAAC,CAAA;QACJ,CAAC;QACD,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,CAAA;QAEhC,8DAA8D;QAC9D,0DAA0D;QAC1D,6DAA6D;QAC7D,kCAAkC;QAClC,MAAM,eAAe,GACnB,IAAI,CAAC,QAAQ,KAAK,QAAQ,IAAI,IAAI,CAAC,QAAQ,KAAK,OAAO,CAAA;QAEzD,MAAM,GAAG,GAAqB;YAC5B,mCAAmC;YACnC,GAAG,IAAI;YACP,GAAG,EAAE,IAAI,CAAC,GAAG;YACb,SAAS,EAAE,IAAI,CAAC,SAAS;YACzB,OAAO,EAAE,IAAI,CAAC,OAAO;YACrB,MAAM,EAAE,IAAI,CAAC,MAAM;YACnB,eAAe;YACf,SAAS,EAAE,IAAI;YACf,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,QAAQ,EAAE,IAAI;YACd,iBAAiB,EAAE,CAAC;YACpB,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,oBAAoB,EAAE,IAAI,CAAC,oBAAoB;YAC/C,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK;SACzB,CAAA;QAED,MAAM,GAAG,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,qBAAS,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAA;QACxD,MAAM,CAAC,QAAQ,EAAE,SAAS,CAAC,GAAG,GAAG,CAAC,MAAM,CACtC,CAAC,GAA0B,EAAE,CAAC,EAAE,EAAE;YAChC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAA;YACrB,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,SAAS,CAAC,CAAA;YAC3B,OAAO,GAAG,CAAA;QACZ,CAAC,EACD,CAAC,EAAE,EAAE,EAAE,CAAC,CACT,CAAA;QACD,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,CAAC,EAAE,EAAE;YACtC,MAAM,CAAC,GAAG,SAAS,CAAC,CAAC,CAAC,CAAA;YACtB,qBAAqB;YACrB,IAAI,CAAC,CAAC;gBAAE,MAAM,IAAI,KAAK,CAAC,wBAAwB,CAAC,CAAA;YACjD,oBAAoB;YACpB,OAAO,IAAI,oBAAO,CAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAA;QAC9C,CAAC,CAAC,CAAA;IACJ,CAAC;IAMD,KAAK,CAAC,IAAI;QACR,kEAAkE;QAClE,iEAAiE;QACjE,uEAAuE;QACvE,sCAAsC;QACtC,OAAO;YACL,GAAG,CAAC,MAAM,IAAI,sBAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;gBACvD,GAAG,IAAI,CAAC,IAAI;gBACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ,CAAC,CAAC;oBAC1B,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;oBACzC,CAAC,CAAC,QAAQ;gBACZ,QAAQ,EAAE,IAAI,CAAC,QAAQ;gBACvB,MAAM,EAAE,IAAI,CAAC,MAAM;gBACnB,mBAAmB,EAAE,IAAI,CAAC,mBAAmB;aAC9C,CAAC,CAAC,IAAI,EAAE,CAAC;SACX,CAAA;IACH,CAAC;IAMD,QAAQ;QACN,OAAO;YACL,GAAG,IAAI,sBAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;gBAChD,GAAG,IAAI,CAAC,IAAI;gBACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ,CAAC,CAAC;oBAC1B,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;oBACzC,CAAC,CAAC,QAAQ;gBACZ,QAAQ,EAAE,IAAI,CAAC,QAAQ;gBACvB,MAAM,EAAE,IAAI,CAAC,MAAM;gBACnB,mBAAmB,EAAE,IAAI,CAAC,mBAAmB;aAC9C,CAAC,CAAC,QAAQ,EAAE;SACd,CAAA;IACH,CAAC;IAMD,MAAM;QACJ,OAAO,IAAI,sBAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;YACpD,GAAG,IAAI,CAAC,IAAI;YACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ,CAAC,CAAC;gBAC1B,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;gBACzC,CAAC,CAAC,QAAQ;YACZ,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,MAAM,EAAE,IAAI,CAAC,MAAM;YACnB,mBAAmB,EAAE,IAAI,CAAC,mBAAmB;SAC9C,CAAC,CAAC,MAAM,EAAE,CAAA;IACb,CAAC;IAMD,UAAU;QACR,OAAO,IAAI,sBAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;YACpD,GAAG,IAAI,CAAC,IAAI;YACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ,CAAC,CAAC;gBAC1B,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;gBACzC,CAAC,CAAC,QAAQ;YACZ,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,MAAM,EAAE,IAAI,CAAC,MAAM;YACnB,mBAAmB,EAAE,IAAI,CAAC,mBAAmB;SAC9C,CAAC,CAAC,UAAU,EAAE,CAAA;IACjB,CAAC;IAED;;;OAGG;IACH,WAAW;QACT,OAAO,IAAI,CAAC,UAAU,EAAE,CAAC,MAAM,CAAC,QAAQ,CAAC,EAAE,CAAA;IAC7C,CAAC;IACD,CAAC,MAAM,CAAC,QAAQ,CAAC;QACf,OAAO,IAAI,CAAC,WAAW,EAAE,CAAA;IAC3B,CAAC;IAED;;;OAGG;IACH,OAAO;QACL,OAAO,IAAI,CAAC,MAAM,EAAE,CAAC,MAAM,CAAC,aAAa,CAAC,EAAE,CAAA;IAC9C,CAAC;IACD,CAAC,MAAM,CAAC,aAAa,CAAC;QACpB,OAAO,IAAI,CAAC,OAAO,EAAE,CAAA;IACvB,CAAC;CACF;AA7QD,oBA6QC","sourcesContent":["import { Minimatch, MinimatchOptions } from 'minimatch'\nimport { Minipass } from 'minipass'\nimport { fileURLToPath } from 'node:url'\nimport {\n FSOption,\n Path,\n PathScurry,\n PathScurryDarwin,\n PathScurryPosix,\n PathScurryWin32,\n} from 'path-scurry'\nimport { IgnoreLike } from './ignore.js'\nimport { Pattern } from './pattern.js'\nimport { GlobStream, GlobWalker } from './walker.js'\n\nexport type MatchSet = Minimatch['set']\nexport type GlobParts = Exclude\n\n// if no process global, just call it linux.\n// so we default to case-sensitive, / separators\nconst defaultPlatform: NodeJS.Platform =\n (\n typeof process === 'object' &&\n process &&\n typeof process.platform === 'string'\n ) ?\n process.platform\n : 'linux'\n\n/**\n * A `GlobOptions` object may be provided to any of the exported methods, and\n * must be provided to the `Glob` constructor.\n *\n * All options are optional, boolean, and false by default, unless otherwise\n * noted.\n *\n * All resolved options are added to the Glob object as properties.\n *\n * If you are running many `glob` operations, you can pass a Glob object as the\n * `options` argument to a subsequent operation to share the previously loaded\n * cache.\n */\nexport interface GlobOptions {\n /**\n * Set to `true` to always receive absolute paths for\n * matched files. Set to `false` to always return relative paths.\n *\n * When this option is not set, absolute paths are returned for patterns\n * that are absolute, and otherwise paths are returned that are relative\n * to the `cwd` setting.\n *\n * This does _not_ make an extra system call to get\n * the realpath, it only does string path resolution.\n *\n * Conflicts with {@link withFileTypes}\n */\n absolute?: boolean\n\n /**\n * Set to false to enable {@link windowsPathsNoEscape}\n *\n * @deprecated\n */\n allowWindowsEscape?: boolean\n\n /**\n * The current working directory in which to search. Defaults to\n * `process.cwd()`.\n *\n * May be eiher a string path or a `file://` URL object or string.\n */\n cwd?: string | URL\n\n /**\n * Include `.dot` files in normal matches and `globstar`\n * matches. Note that an explicit dot in a portion of the pattern\n * will always match dot files.\n */\n dot?: boolean\n\n /**\n * Prepend all relative path strings with `./` (or `.\\` on Windows).\n *\n * Without this option, returned relative paths are \"bare\", so instead of\n * returning `'./foo/bar'`, they are returned as `'foo/bar'`.\n *\n * Relative patterns starting with `'../'` are not prepended with `./`, even\n * if this option is set.\n */\n dotRelative?: boolean\n\n /**\n * Follow symlinked directories when expanding `**`\n * patterns. This can result in a lot of duplicate references in\n * the presence of cyclic links, and make performance quite bad.\n *\n * By default, a `**` in a pattern will follow 1 symbolic link if\n * it is not the first item in the pattern, or none if it is the\n * first item in the pattern, following the same behavior as Bash.\n */\n follow?: boolean\n\n /**\n * string or string[], or an object with `ignored` and `childrenIgnored`\n * methods.\n *\n * If a string or string[] is provided, then this is treated as a glob\n * pattern or array of glob patterns to exclude from matches. To ignore all\n * children within a directory, as well as the entry itself, append `'/**'`\n * to the ignore pattern.\n *\n * **Note** `ignore` patterns are _always_ in `dot:true` mode, regardless of\n * any other settings.\n *\n * If an object is provided that has `ignored(path)` and/or\n * `childrenIgnored(path)` methods, then these methods will be called to\n * determine whether any Path is a match or if its children should be\n * traversed, respectively.\n */\n ignore?: string | string[] | IgnoreLike\n\n /**\n * Treat brace expansion like `{a,b}` as a \"magic\" pattern. Has no\n * effect if {@link nobrace} is set.\n *\n * Only has effect on the {@link hasMagic} function.\n */\n magicalBraces?: boolean\n\n /**\n * Add a `/` character to directory matches. Note that this requires\n * additional stat calls in some cases.\n */\n mark?: boolean\n\n /**\n * Perform a basename-only match if the pattern does not contain any slash\n * characters. That is, `*.js` would be treated as equivalent to\n * `**\\/*.js`, matching all js files in all directories.\n */\n matchBase?: boolean\n\n /**\n * Limit the directory traversal to a given depth below the cwd.\n * Note that this does NOT prevent traversal to sibling folders,\n * root patterns, and so on. It only limits the maximum folder depth\n * that the walk will descend, relative to the cwd.\n */\n maxDepth?: number\n\n /**\n * Do not expand `{a,b}` and `{1..3}` brace sets.\n */\n nobrace?: boolean\n\n /**\n * Perform a case-insensitive match. This defaults to `true` on macOS and\n * Windows systems, and `false` on all others.\n *\n * **Note** `nocase` should only be explicitly set when it is\n * known that the filesystem's case sensitivity differs from the\n * platform default. If set `true` on case-sensitive file\n * systems, or `false` on case-insensitive file systems, then the\n * walk may return more or less results than expected.\n */\n nocase?: boolean\n\n /**\n * Do not match directories, only files. (Note: to match\n * _only_ directories, put a `/` at the end of the pattern.)\n */\n nodir?: boolean\n\n /**\n * Do not match \"extglob\" patterns such as `+(a|b)`.\n */\n noext?: boolean\n\n /**\n * Do not match `**` against multiple filenames. (Ie, treat it as a normal\n * `*` instead.)\n *\n * Conflicts with {@link matchBase}\n */\n noglobstar?: boolean\n\n /**\n * Defaults to value of `process.platform` if available, or `'linux'` if\n * not. Setting `platform:'win32'` on non-Windows systems may cause strange\n * behavior.\n */\n platform?: NodeJS.Platform\n\n /**\n * Set to true to call `fs.realpath` on all of the\n * results. In the case of an entry that cannot be resolved, the\n * entry is omitted. This incurs a slight performance penalty, of\n * course, because of the added system calls.\n */\n realpath?: boolean\n\n /**\n *\n * A string path resolved against the `cwd` option, which\n * is used as the starting point for absolute patterns that start\n * with `/`, (but not drive letters or UNC paths on Windows).\n *\n * Note that this _doesn't_ necessarily limit the walk to the\n * `root` directory, and doesn't affect the cwd starting point for\n * non-absolute patterns. A pattern containing `..` will still be\n * able to traverse out of the root directory, if it is not an\n * actual root directory on the filesystem, and any non-absolute\n * patterns will be matched in the `cwd`. For example, the\n * pattern `/../*` with `{root:'/some/path'}` will return all\n * files in `/some`, not all files in `/some/path`. The pattern\n * `*` with `{root:'/some/path'}` will return all the entries in\n * the cwd, not the entries in `/some/path`.\n *\n * To start absolute and non-absolute patterns in the same\n * path, you can use `{root:''}`. However, be aware that on\n * Windows systems, a pattern like `x:/*` or `//host/share/*` will\n * _always_ start in the `x:/` or `//host/share` directory,\n * regardless of the `root` setting.\n */\n root?: string\n\n /**\n * A [PathScurry](http://npm.im/path-scurry) object used\n * to traverse the file system. If the `nocase` option is set\n * explicitly, then any provided `scurry` object must match this\n * setting.\n */\n scurry?: PathScurry\n\n /**\n * Call `lstat()` on all entries, whether required or not to determine\n * if it's a valid match. When used with {@link withFileTypes}, this means\n * that matches will include data such as modified time, permissions, and\n * so on. Note that this will incur a performance cost due to the added\n * system calls.\n */\n stat?: boolean\n\n /**\n * An AbortSignal which will cancel the Glob walk when\n * triggered.\n */\n signal?: AbortSignal\n\n /**\n * Use `\\\\` as a path separator _only_, and\n * _never_ as an escape character. If set, all `\\\\` characters are\n * replaced with `/` in the pattern.\n *\n * Note that this makes it **impossible** to match against paths\n * containing literal glob pattern characters, but allows matching\n * with patterns constructed using `path.join()` and\n * `path.resolve()` on Windows platforms, mimicking the (buggy!)\n * behavior of Glob v7 and before on Windows. Please use with\n * caution, and be mindful of [the caveat below about Windows\n * paths](#windows). (For legacy reasons, this is also set if\n * `allowWindowsEscape` is set to the exact value `false`.)\n */\n windowsPathsNoEscape?: boolean\n\n /**\n * Return [PathScurry](http://npm.im/path-scurry)\n * `Path` objects instead of strings. These are similar to a\n * NodeJS `Dirent` object, but with additional methods and\n * properties.\n *\n * Conflicts with {@link absolute}\n */\n withFileTypes?: boolean\n\n /**\n * An fs implementation to override some or all of the defaults. See\n * http://npm.im/path-scurry for details about what can be overridden.\n */\n fs?: FSOption\n\n /**\n * Just passed along to Minimatch. Note that this makes all pattern\n * matching operations slower and *extremely* noisy.\n */\n debug?: boolean\n\n /**\n * Return `/` delimited paths, even on Windows.\n *\n * On posix systems, this has no effect. But, on Windows, it means that\n * paths will be `/` delimited, and absolute paths will be their full\n * resolved UNC forms, eg instead of `'C:\\\\foo\\\\bar'`, it would return\n * `'//?/C:/foo/bar'`\n */\n posix?: boolean\n\n /**\n * Do not match any children of any matches. For example, the pattern\n * `**\\/foo` would match `a/foo`, but not `a/foo/b/foo` in this mode.\n *\n * This is especially useful for cases like \"find all `node_modules`\n * folders, but not the ones in `node_modules`\".\n *\n * In order to support this, the `Ignore` implementation must support an\n * `add(pattern: string)` method. If using the default `Ignore` class, then\n * this is fine, but if this is set to `false`, and a custom `Ignore` is\n * provided that does not have an `add()` method, then it will throw an\n * error.\n *\n * **Caveat** It *only* ignores matches that would be a descendant of a\n * previous match, and only if that descendant is matched *after* the\n * ancestor is encountered. Since the file system walk happens in\n * indeterminate order, it's possible that a match will already be added\n * before its ancestor, if multiple or braced patterns are used.\n *\n * For example:\n *\n * ```ts\n * const results = await glob([\n * // likely to match first, since it's just a stat\n * 'a/b/c/d/e/f',\n *\n * // this pattern is more complicated! It must to various readdir()\n * // calls and test the results against a regular expression, and that\n * // is certainly going to take a little bit longer.\n * //\n * // So, later on, it encounters a match at 'a/b/c/d/e', but it's too\n * // late to ignore a/b/c/d/e/f, because it's already been emitted.\n * 'a/[bdf]/?/[a-z]/*',\n * ], { includeChildMatches: false })\n * ```\n *\n * It's best to only set this to `false` if you can be reasonably sure that\n * no components of the pattern will potentially match one another's file\n * system descendants, or if the occasional included child entry will not\n * cause problems.\n *\n * @default true\n */\n includeChildMatches?: boolean\n}\n\nexport type GlobOptionsWithFileTypesTrue = GlobOptions & {\n withFileTypes: true\n // string options not relevant if returning Path objects.\n absolute?: undefined\n mark?: undefined\n posix?: undefined\n}\n\nexport type GlobOptionsWithFileTypesFalse = GlobOptions & {\n withFileTypes?: false\n}\n\nexport type GlobOptionsWithFileTypesUnset = GlobOptions & {\n withFileTypes?: undefined\n}\n\nexport type Result =\n Opts extends GlobOptionsWithFileTypesTrue ? Path\n : Opts extends GlobOptionsWithFileTypesFalse ? string\n : Opts extends GlobOptionsWithFileTypesUnset ? string\n : string | Path\nexport type Results = Result[]\n\nexport type FileTypes =\n Opts extends GlobOptionsWithFileTypesTrue ? true\n : Opts extends GlobOptionsWithFileTypesFalse ? false\n : Opts extends GlobOptionsWithFileTypesUnset ? false\n : boolean\n\n/**\n * An object that can perform glob pattern traversals.\n */\nexport class Glob implements GlobOptions {\n absolute?: boolean\n cwd: string\n root?: string\n dot: boolean\n dotRelative: boolean\n follow: boolean\n ignore?: string | string[] | IgnoreLike\n magicalBraces: boolean\n mark?: boolean\n matchBase: boolean\n maxDepth: number\n nobrace: boolean\n nocase: boolean\n nodir: boolean\n noext: boolean\n noglobstar: boolean\n pattern: string[]\n platform: NodeJS.Platform\n realpath: boolean\n scurry: PathScurry\n stat: boolean\n signal?: AbortSignal\n windowsPathsNoEscape: boolean\n withFileTypes: FileTypes\n includeChildMatches: boolean\n\n /**\n * The options provided to the constructor.\n */\n opts: Opts\n\n /**\n * An array of parsed immutable {@link Pattern} objects.\n */\n patterns: Pattern[]\n\n /**\n * All options are stored as properties on the `Glob` object.\n *\n * See {@link GlobOptions} for full options descriptions.\n *\n * Note that a previous `Glob` object can be passed as the\n * `GlobOptions` to another `Glob` instantiation to re-use settings\n * and caches with a new pattern.\n *\n * Traversal functions can be called multiple times to run the walk\n * again.\n */\n constructor(pattern: string | string[], opts: Opts) {\n /* c8 ignore start */\n if (!opts) throw new TypeError('glob options required')\n /* c8 ignore stop */\n this.withFileTypes = !!opts.withFileTypes as FileTypes\n this.signal = opts.signal\n this.follow = !!opts.follow\n this.dot = !!opts.dot\n this.dotRelative = !!opts.dotRelative\n this.nodir = !!opts.nodir\n this.mark = !!opts.mark\n if (!opts.cwd) {\n this.cwd = ''\n } else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) {\n opts.cwd = fileURLToPath(opts.cwd)\n }\n this.cwd = opts.cwd || ''\n this.root = opts.root\n this.magicalBraces = !!opts.magicalBraces\n this.nobrace = !!opts.nobrace\n this.noext = !!opts.noext\n this.realpath = !!opts.realpath\n this.absolute = opts.absolute\n this.includeChildMatches = opts.includeChildMatches !== false\n\n this.noglobstar = !!opts.noglobstar\n this.matchBase = !!opts.matchBase\n this.maxDepth =\n typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity\n this.stat = !!opts.stat\n this.ignore = opts.ignore\n\n if (this.withFileTypes && this.absolute !== undefined) {\n throw new Error('cannot set absolute and withFileTypes:true')\n }\n\n if (typeof pattern === 'string') {\n pattern = [pattern]\n }\n\n this.windowsPathsNoEscape =\n !!opts.windowsPathsNoEscape ||\n (opts as { allowWindowsEscape?: boolean }).allowWindowsEscape ===\n false\n\n if (this.windowsPathsNoEscape) {\n pattern = pattern.map(p => p.replace(/\\\\/g, '/'))\n }\n\n if (this.matchBase) {\n if (opts.noglobstar) {\n throw new TypeError('base matching requires globstar')\n }\n pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`))\n }\n\n this.pattern = pattern\n\n this.platform = opts.platform || defaultPlatform\n this.opts = { ...opts, platform: this.platform }\n if (opts.scurry) {\n this.scurry = opts.scurry\n if (\n opts.nocase !== undefined &&\n opts.nocase !== opts.scurry.nocase\n ) {\n throw new Error('nocase option contradicts provided scurry option')\n }\n } else {\n const Scurry =\n opts.platform === 'win32' ? PathScurryWin32\n : opts.platform === 'darwin' ? PathScurryDarwin\n : opts.platform ? PathScurryPosix\n : PathScurry\n this.scurry = new Scurry(this.cwd, {\n nocase: opts.nocase,\n fs: opts.fs,\n })\n }\n this.nocase = this.scurry.nocase\n\n // If you do nocase:true on a case-sensitive file system, then\n // we need to use regexps instead of strings for non-magic\n // path portions, because statting `aBc` won't return results\n // for the file `AbC` for example.\n const nocaseMagicOnly =\n this.platform === 'darwin' || this.platform === 'win32'\n\n const mmo: MinimatchOptions = {\n // default nocase based on platform\n ...opts,\n dot: this.dot,\n matchBase: this.matchBase,\n nobrace: this.nobrace,\n nocase: this.nocase,\n nocaseMagicOnly,\n nocomment: true,\n noext: this.noext,\n nonegate: true,\n optimizationLevel: 2,\n platform: this.platform,\n windowsPathsNoEscape: this.windowsPathsNoEscape,\n debug: !!this.opts.debug,\n }\n\n const mms = this.pattern.map(p => new Minimatch(p, mmo))\n const [matchSet, globParts] = mms.reduce(\n (set: [MatchSet, GlobParts], m) => {\n set[0].push(...m.set)\n set[1].push(...m.globParts)\n return set\n },\n [[], []],\n )\n this.patterns = matchSet.map((set, i) => {\n const g = globParts[i]\n /* c8 ignore start */\n if (!g) throw new Error('invalid pattern object')\n /* c8 ignore stop */\n return new Pattern(set, g, 0, this.platform)\n })\n }\n\n /**\n * Returns a Promise that resolves to the results array.\n */\n async walk(): Promise>\n async walk(): Promise<(string | Path)[]> {\n // Walkers always return array of Path objects, so we just have to\n // coerce them into the right shape. It will have already called\n // realpath() if the option was set to do so, so we know that's cached.\n // start out knowing the cwd, at least\n return [\n ...(await new GlobWalker(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity ?\n this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n includeChildMatches: this.includeChildMatches,\n }).walk()),\n ]\n }\n\n /**\n * synchronous {@link Glob.walk}\n */\n walkSync(): Results\n walkSync(): (string | Path)[] {\n return [\n ...new GlobWalker(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity ?\n this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n includeChildMatches: this.includeChildMatches,\n }).walkSync(),\n ]\n }\n\n /**\n * Stream results asynchronously.\n */\n stream(): Minipass, Result>\n stream(): Minipass {\n return new GlobStream(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity ?\n this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n includeChildMatches: this.includeChildMatches,\n }).stream()\n }\n\n /**\n * Stream results synchronously.\n */\n streamSync(): Minipass, Result>\n streamSync(): Minipass {\n return new GlobStream(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity ?\n this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n includeChildMatches: this.includeChildMatches,\n }).streamSync()\n }\n\n /**\n * Default sync iteration function. Returns a Generator that\n * iterates over the results.\n */\n iterateSync(): Generator, void, void> {\n return this.streamSync()[Symbol.iterator]()\n }\n [Symbol.iterator]() {\n return this.iterateSync()\n }\n\n /**\n * Default async iteration function. Returns an AsyncGenerator that\n * iterates over the results.\n */\n iterate(): AsyncGenerator, void, void> {\n return this.stream()[Symbol.asyncIterator]()\n }\n [Symbol.asyncIterator]() {\n return this.iterate()\n }\n}\n"]} \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/has-magic.d.ts b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/has-magic.d.ts new file mode 100644 index 0000000000000000000000000000000000000000..8aec3bd9725175d9c72be14476fea2a117ca8b09 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/has-magic.d.ts @@ -0,0 +1,14 @@ +import { GlobOptions } from './glob.js'; +/** + * Return true if the patterns provided contain any magic glob characters, + * given the options provided. + * + * Brace expansion is not considered "magic" unless the `magicalBraces` option + * is set, as brace expansion just turns one string into an array of strings. + * So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and + * `'xby'` both do not contain any magic glob characters, and it's treated the + * same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true` + * is in the options, brace expansion _is_ treated as a pattern having magic. + */ +export declare const hasMagic: (pattern: string | string[], options?: GlobOptions) => boolean; +//# sourceMappingURL=has-magic.d.ts.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/has-magic.d.ts.map b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/has-magic.d.ts.map new file mode 100644 index 0000000000000000000000000000000000000000..b24dd4ec47e0bbc37be06a58f4622cc183b710af --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/has-magic.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"has-magic.d.ts","sourceRoot":"","sources":["../../src/has-magic.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,WAAW,EAAE,MAAM,WAAW,CAAA;AAEvC;;;;;;;;;;GAUG;AACH,eAAO,MAAM,QAAQ,YACV,MAAM,GAAG,MAAM,EAAE,YACjB,WAAW,KACnB,OAQF,CAAA"} \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/has-magic.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/has-magic.js new file mode 100644 index 0000000000000000000000000000000000000000..0918bd57e0f1c2af51957ade7288a1fd75c7e23d --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/has-magic.js @@ -0,0 +1,27 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.hasMagic = void 0; +const minimatch_1 = require("minimatch"); +/** + * Return true if the patterns provided contain any magic glob characters, + * given the options provided. + * + * Brace expansion is not considered "magic" unless the `magicalBraces` option + * is set, as brace expansion just turns one string into an array of strings. + * So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and + * `'xby'` both do not contain any magic glob characters, and it's treated the + * same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true` + * is in the options, brace expansion _is_ treated as a pattern having magic. + */ +const hasMagic = (pattern, options = {}) => { + if (!Array.isArray(pattern)) { + pattern = [pattern]; + } + for (const p of pattern) { + if (new minimatch_1.Minimatch(p, options).hasMagic()) + return true; + } + return false; +}; +exports.hasMagic = hasMagic; +//# sourceMappingURL=has-magic.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/has-magic.js.map b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/has-magic.js.map new file mode 100644 index 0000000000000000000000000000000000000000..44deab290582768bc18722a0f6cf0d4b03312097 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/has-magic.js.map @@ -0,0 +1 @@ +{"version":3,"file":"has-magic.js","sourceRoot":"","sources":["../../src/has-magic.ts"],"names":[],"mappings":";;;AAAA,yCAAqC;AAGrC;;;;;;;;;;GAUG;AACI,MAAM,QAAQ,GAAG,CACtB,OAA0B,EAC1B,UAAuB,EAAE,EAChB,EAAE;IACX,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;QAC5B,OAAO,GAAG,CAAC,OAAO,CAAC,CAAA;IACrB,CAAC;IACD,KAAK,MAAM,CAAC,IAAI,OAAO,EAAE,CAAC;QACxB,IAAI,IAAI,qBAAS,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,QAAQ,EAAE;YAAE,OAAO,IAAI,CAAA;IACvD,CAAC;IACD,OAAO,KAAK,CAAA;AACd,CAAC,CAAA;AAXY,QAAA,QAAQ,YAWpB","sourcesContent":["import { Minimatch } from 'minimatch'\nimport { GlobOptions } from './glob.js'\n\n/**\n * Return true if the patterns provided contain any magic glob characters,\n * given the options provided.\n *\n * Brace expansion is not considered \"magic\" unless the `magicalBraces` option\n * is set, as brace expansion just turns one string into an array of strings.\n * So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and\n * `'xby'` both do not contain any magic glob characters, and it's treated the\n * same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true`\n * is in the options, brace expansion _is_ treated as a pattern having magic.\n */\nexport const hasMagic = (\n pattern: string | string[],\n options: GlobOptions = {},\n): boolean => {\n if (!Array.isArray(pattern)) {\n pattern = [pattern]\n }\n for (const p of pattern) {\n if (new Minimatch(p, options).hasMagic()) return true\n }\n return false\n}\n"]} \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/ignore.d.ts b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/ignore.d.ts new file mode 100644 index 0000000000000000000000000000000000000000..1893b16df877c9b50c071ee7b066715f6e58c43e --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/ignore.d.ts @@ -0,0 +1,24 @@ +import { Minimatch, MinimatchOptions } from 'minimatch'; +import { Path } from 'path-scurry'; +import { GlobWalkerOpts } from './walker.js'; +export interface IgnoreLike { + ignored?: (p: Path) => boolean; + childrenIgnored?: (p: Path) => boolean; + add?: (ignore: string) => void; +} +/** + * Class used to process ignored patterns + */ +export declare class Ignore implements IgnoreLike { + relative: Minimatch[]; + relativeChildren: Minimatch[]; + absolute: Minimatch[]; + absoluteChildren: Minimatch[]; + platform: NodeJS.Platform; + mmopts: MinimatchOptions; + constructor(ignored: string[], { nobrace, nocase, noext, noglobstar, platform, }: GlobWalkerOpts); + add(ign: string): void; + ignored(p: Path): boolean; + childrenIgnored(p: Path): boolean; +} +//# sourceMappingURL=ignore.d.ts.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/ignore.d.ts.map b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/ignore.d.ts.map new file mode 100644 index 0000000000000000000000000000000000000000..57d6ab6153d770397a5acb7881ccf52a048dee89 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/ignore.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"ignore.d.ts","sourceRoot":"","sources":["../../src/ignore.ts"],"names":[],"mappings":"AAKA,OAAO,EAAE,SAAS,EAAE,gBAAgB,EAAE,MAAM,WAAW,CAAA;AACvD,OAAO,EAAE,IAAI,EAAE,MAAM,aAAa,CAAA;AAElC,OAAO,EAAE,cAAc,EAAE,MAAM,aAAa,CAAA;AAE5C,MAAM,WAAW,UAAU;IACzB,OAAO,CAAC,EAAE,CAAC,CAAC,EAAE,IAAI,KAAK,OAAO,CAAA;IAC9B,eAAe,CAAC,EAAE,CAAC,CAAC,EAAE,IAAI,KAAK,OAAO,CAAA;IACtC,GAAG,CAAC,EAAE,CAAC,MAAM,EAAE,MAAM,KAAK,IAAI,CAAA;CAC/B;AAWD;;GAEG;AACH,qBAAa,MAAO,YAAW,UAAU;IACvC,QAAQ,EAAE,SAAS,EAAE,CAAA;IACrB,gBAAgB,EAAE,SAAS,EAAE,CAAA;IAC7B,QAAQ,EAAE,SAAS,EAAE,CAAA;IACrB,gBAAgB,EAAE,SAAS,EAAE,CAAA;IAC7B,QAAQ,EAAE,MAAM,CAAC,QAAQ,CAAA;IACzB,MAAM,EAAE,gBAAgB,CAAA;gBAGtB,OAAO,EAAE,MAAM,EAAE,EACjB,EACE,OAAO,EACP,MAAM,EACN,KAAK,EACL,UAAU,EACV,QAA0B,GAC3B,EAAE,cAAc;IAqBnB,GAAG,CAAC,GAAG,EAAE,MAAM;IAyCf,OAAO,CAAC,CAAC,EAAE,IAAI,GAAG,OAAO;IAczB,eAAe,CAAC,CAAC,EAAE,IAAI,GAAG,OAAO;CAWlC"} \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/ignore.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/ignore.js new file mode 100644 index 0000000000000000000000000000000000000000..5f1fde0680dea3736262381efa776c7f8268a70d --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/ignore.js @@ -0,0 +1,119 @@ +"use strict"; +// give it a pattern, and it'll be able to tell you if +// a given path should be ignored. +// Ignoring a path ignores its children if the pattern ends in /** +// Ignores are always parsed in dot:true mode +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Ignore = void 0; +const minimatch_1 = require("minimatch"); +const pattern_js_1 = require("./pattern.js"); +const defaultPlatform = (typeof process === 'object' && + process && + typeof process.platform === 'string') ? + process.platform + : 'linux'; +/** + * Class used to process ignored patterns + */ +class Ignore { + relative; + relativeChildren; + absolute; + absoluteChildren; + platform; + mmopts; + constructor(ignored, { nobrace, nocase, noext, noglobstar, platform = defaultPlatform, }) { + this.relative = []; + this.absolute = []; + this.relativeChildren = []; + this.absoluteChildren = []; + this.platform = platform; + this.mmopts = { + dot: true, + nobrace, + nocase, + noext, + noglobstar, + optimizationLevel: 2, + platform, + nocomment: true, + nonegate: true, + }; + for (const ign of ignored) + this.add(ign); + } + add(ign) { + // this is a little weird, but it gives us a clean set of optimized + // minimatch matchers, without getting tripped up if one of them + // ends in /** inside a brace section, and it's only inefficient at + // the start of the walk, not along it. + // It'd be nice if the Pattern class just had a .test() method, but + // handling globstars is a bit of a pita, and that code already lives + // in minimatch anyway. + // Another way would be if maybe Minimatch could take its set/globParts + // as an option, and then we could at least just use Pattern to test + // for absolute-ness. + // Yet another way, Minimatch could take an array of glob strings, and + // a cwd option, and do the right thing. + const mm = new minimatch_1.Minimatch(ign, this.mmopts); + for (let i = 0; i < mm.set.length; i++) { + const parsed = mm.set[i]; + const globParts = mm.globParts[i]; + /* c8 ignore start */ + if (!parsed || !globParts) { + throw new Error('invalid pattern object'); + } + // strip off leading ./ portions + // https://github.com/isaacs/node-glob/issues/570 + while (parsed[0] === '.' && globParts[0] === '.') { + parsed.shift(); + globParts.shift(); + } + /* c8 ignore stop */ + const p = new pattern_js_1.Pattern(parsed, globParts, 0, this.platform); + const m = new minimatch_1.Minimatch(p.globString(), this.mmopts); + const children = globParts[globParts.length - 1] === '**'; + const absolute = p.isAbsolute(); + if (absolute) + this.absolute.push(m); + else + this.relative.push(m); + if (children) { + if (absolute) + this.absoluteChildren.push(m); + else + this.relativeChildren.push(m); + } + } + } + ignored(p) { + const fullpath = p.fullpath(); + const fullpaths = `${fullpath}/`; + const relative = p.relative() || '.'; + const relatives = `${relative}/`; + for (const m of this.relative) { + if (m.match(relative) || m.match(relatives)) + return true; + } + for (const m of this.absolute) { + if (m.match(fullpath) || m.match(fullpaths)) + return true; + } + return false; + } + childrenIgnored(p) { + const fullpath = p.fullpath() + '/'; + const relative = (p.relative() || '.') + '/'; + for (const m of this.relativeChildren) { + if (m.match(relative)) + return true; + } + for (const m of this.absoluteChildren) { + if (m.match(fullpath)) + return true; + } + return false; + } +} +exports.Ignore = Ignore; +//# sourceMappingURL=ignore.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/ignore.js.map b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/ignore.js.map new file mode 100644 index 0000000000000000000000000000000000000000..d9dfdfa34ab5c0e734952abd3c8070f804efcb1f --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/ignore.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ignore.js","sourceRoot":"","sources":["../../src/ignore.ts"],"names":[],"mappings":";AAAA,sDAAsD;AACtD,kCAAkC;AAClC,kEAAkE;AAClE,6CAA6C;;;AAE7C,yCAAuD;AAEvD,6CAAsC;AAStC,MAAM,eAAe,GACnB,CACE,OAAO,OAAO,KAAK,QAAQ;IAC3B,OAAO;IACP,OAAO,OAAO,CAAC,QAAQ,KAAK,QAAQ,CACrC,CAAC,CAAC;IACD,OAAO,CAAC,QAAQ;IAClB,CAAC,CAAC,OAAO,CAAA;AAEX;;GAEG;AACH,MAAa,MAAM;IACjB,QAAQ,CAAa;IACrB,gBAAgB,CAAa;IAC7B,QAAQ,CAAa;IACrB,gBAAgB,CAAa;IAC7B,QAAQ,CAAiB;IACzB,MAAM,CAAkB;IAExB,YACE,OAAiB,EACjB,EACE,OAAO,EACP,MAAM,EACN,KAAK,EACL,UAAU,EACV,QAAQ,GAAG,eAAe,GACX;QAEjB,IAAI,CAAC,QAAQ,GAAG,EAAE,CAAA;QAClB,IAAI,CAAC,QAAQ,GAAG,EAAE,CAAA;QAClB,IAAI,CAAC,gBAAgB,GAAG,EAAE,CAAA;QAC1B,IAAI,CAAC,gBAAgB,GAAG,EAAE,CAAA;QAC1B,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;QACxB,IAAI,CAAC,MAAM,GAAG;YACZ,GAAG,EAAE,IAAI;YACT,OAAO;YACP,MAAM;YACN,KAAK;YACL,UAAU;YACV,iBAAiB,EAAE,CAAC;YACpB,QAAQ;YACR,SAAS,EAAE,IAAI;YACf,QAAQ,EAAE,IAAI;SACf,CAAA;QACD,KAAK,MAAM,GAAG,IAAI,OAAO;YAAE,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,CAAA;IAC1C,CAAC;IAED,GAAG,CAAC,GAAW;QACb,mEAAmE;QACnE,gEAAgE;QAChE,mEAAmE;QACnE,uCAAuC;QACvC,mEAAmE;QACnE,qEAAqE;QACrE,uBAAuB;QACvB,uEAAuE;QACvE,oEAAoE;QACpE,qBAAqB;QACrB,sEAAsE;QACtE,wCAAwC;QACxC,MAAM,EAAE,GAAG,IAAI,qBAAS,CAAC,GAAG,EAAE,IAAI,CAAC,MAAM,CAAC,CAAA;QAC1C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,EAAE,CAAC,GAAG,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;YACvC,MAAM,MAAM,GAAG,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;YACxB,MAAM,SAAS,GAAG,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC,CAAA;YACjC,qBAAqB;YACrB,IAAI,CAAC,MAAM,IAAI,CAAC,SAAS,EAAE,CAAC;gBAC1B,MAAM,IAAI,KAAK,CAAC,wBAAwB,CAAC,CAAA;YAC3C,CAAC;YACD,gCAAgC;YAChC,iDAAiD;YACjD,OAAO,MAAM,CAAC,CAAC,CAAC,KAAK,GAAG,IAAI,SAAS,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE,CAAC;gBACjD,MAAM,CAAC,KAAK,EAAE,CAAA;gBACd,SAAS,CAAC,KAAK,EAAE,CAAA;YACnB,CAAC;YACD,oBAAoB;YACpB,MAAM,CAAC,GAAG,IAAI,oBAAO,CAAC,MAAM,EAAE,SAAS,EAAE,CAAC,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAA;YAC1D,MAAM,CAAC,GAAG,IAAI,qBAAS,CAAC,CAAC,CAAC,UAAU,EAAE,EAAE,IAAI,CAAC,MAAM,CAAC,CAAA;YACpD,MAAM,QAAQ,GAAG,SAAS,CAAC,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC,KAAK,IAAI,CAAA;YACzD,MAAM,QAAQ,GAAG,CAAC,CAAC,UAAU,EAAE,CAAA;YAC/B,IAAI,QAAQ;gBAAE,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;;gBAC9B,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;YAC1B,IAAI,QAAQ,EAAE,CAAC;gBACb,IAAI,QAAQ;oBAAE,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;;oBACtC,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;YACpC,CAAC;QACH,CAAC;IACH,CAAC;IAED,OAAO,CAAC,CAAO;QACb,MAAM,QAAQ,GAAG,CAAC,CAAC,QAAQ,EAAE,CAAA;QAC7B,MAAM,SAAS,GAAG,GAAG,QAAQ,GAAG,CAAA;QAChC,MAAM,QAAQ,GAAG,CAAC,CAAC,QAAQ,EAAE,IAAI,GAAG,CAAA;QACpC,MAAM,SAAS,GAAG,GAAG,QAAQ,GAAG,CAAA;QAChC,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC9B,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,SAAS,CAAC;gBAAE,OAAO,IAAI,CAAA;QAC1D,CAAC;QACD,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC9B,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,SAAS,CAAC;gBAAE,OAAO,IAAI,CAAA;QAC1D,CAAC;QACD,OAAO,KAAK,CAAA;IACd,CAAC;IAED,eAAe,CAAC,CAAO;QACrB,MAAM,QAAQ,GAAG,CAAC,CAAC,QAAQ,EAAE,GAAG,GAAG,CAAA;QACnC,MAAM,QAAQ,GAAG,CAAC,CAAC,CAAC,QAAQ,EAAE,IAAI,GAAG,CAAC,GAAG,GAAG,CAAA;QAC5C,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,gBAAgB,EAAE,CAAC;YACtC,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC;gBAAE,OAAO,IAAI,CAAA;QACpC,CAAC;QACD,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,gBAAgB,EAAE,CAAC;YACtC,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC;gBAAE,OAAO,IAAI,CAAA;QACpC,CAAC;QACD,OAAO,KAAK,CAAA;IACd,CAAC;CACF;AAvGD,wBAuGC","sourcesContent":["// give it a pattern, and it'll be able to tell you if\n// a given path should be ignored.\n// Ignoring a path ignores its children if the pattern ends in /**\n// Ignores are always parsed in dot:true mode\n\nimport { Minimatch, MinimatchOptions } from 'minimatch'\nimport { Path } from 'path-scurry'\nimport { Pattern } from './pattern.js'\nimport { GlobWalkerOpts } from './walker.js'\n\nexport interface IgnoreLike {\n ignored?: (p: Path) => boolean\n childrenIgnored?: (p: Path) => boolean\n add?: (ignore: string) => void\n}\n\nconst defaultPlatform: NodeJS.Platform =\n (\n typeof process === 'object' &&\n process &&\n typeof process.platform === 'string'\n ) ?\n process.platform\n : 'linux'\n\n/**\n * Class used to process ignored patterns\n */\nexport class Ignore implements IgnoreLike {\n relative: Minimatch[]\n relativeChildren: Minimatch[]\n absolute: Minimatch[]\n absoluteChildren: Minimatch[]\n platform: NodeJS.Platform\n mmopts: MinimatchOptions\n\n constructor(\n ignored: string[],\n {\n nobrace,\n nocase,\n noext,\n noglobstar,\n platform = defaultPlatform,\n }: GlobWalkerOpts,\n ) {\n this.relative = []\n this.absolute = []\n this.relativeChildren = []\n this.absoluteChildren = []\n this.platform = platform\n this.mmopts = {\n dot: true,\n nobrace,\n nocase,\n noext,\n noglobstar,\n optimizationLevel: 2,\n platform,\n nocomment: true,\n nonegate: true,\n }\n for (const ign of ignored) this.add(ign)\n }\n\n add(ign: string) {\n // this is a little weird, but it gives us a clean set of optimized\n // minimatch matchers, without getting tripped up if one of them\n // ends in /** inside a brace section, and it's only inefficient at\n // the start of the walk, not along it.\n // It'd be nice if the Pattern class just had a .test() method, but\n // handling globstars is a bit of a pita, and that code already lives\n // in minimatch anyway.\n // Another way would be if maybe Minimatch could take its set/globParts\n // as an option, and then we could at least just use Pattern to test\n // for absolute-ness.\n // Yet another way, Minimatch could take an array of glob strings, and\n // a cwd option, and do the right thing.\n const mm = new Minimatch(ign, this.mmopts)\n for (let i = 0; i < mm.set.length; i++) {\n const parsed = mm.set[i]\n const globParts = mm.globParts[i]\n /* c8 ignore start */\n if (!parsed || !globParts) {\n throw new Error('invalid pattern object')\n }\n // strip off leading ./ portions\n // https://github.com/isaacs/node-glob/issues/570\n while (parsed[0] === '.' && globParts[0] === '.') {\n parsed.shift()\n globParts.shift()\n }\n /* c8 ignore stop */\n const p = new Pattern(parsed, globParts, 0, this.platform)\n const m = new Minimatch(p.globString(), this.mmopts)\n const children = globParts[globParts.length - 1] === '**'\n const absolute = p.isAbsolute()\n if (absolute) this.absolute.push(m)\n else this.relative.push(m)\n if (children) {\n if (absolute) this.absoluteChildren.push(m)\n else this.relativeChildren.push(m)\n }\n }\n }\n\n ignored(p: Path): boolean {\n const fullpath = p.fullpath()\n const fullpaths = `${fullpath}/`\n const relative = p.relative() || '.'\n const relatives = `${relative}/`\n for (const m of this.relative) {\n if (m.match(relative) || m.match(relatives)) return true\n }\n for (const m of this.absolute) {\n if (m.match(fullpath) || m.match(fullpaths)) return true\n }\n return false\n }\n\n childrenIgnored(p: Path): boolean {\n const fullpath = p.fullpath() + '/'\n const relative = (p.relative() || '.') + '/'\n for (const m of this.relativeChildren) {\n if (m.match(relative)) return true\n }\n for (const m of this.absoluteChildren) {\n if (m.match(fullpath)) return true\n }\n return false\n }\n}\n"]} \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/index.d.ts b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/index.d.ts new file mode 100644 index 0000000000000000000000000000000000000000..9c326ddc895b6184475510d700a583732535a635 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/index.d.ts @@ -0,0 +1,97 @@ +import { Minipass } from 'minipass'; +import { Path } from 'path-scurry'; +import type { GlobOptions, GlobOptionsWithFileTypesFalse, GlobOptionsWithFileTypesTrue, GlobOptionsWithFileTypesUnset } from './glob.js'; +import { Glob } from './glob.js'; +export { escape, unescape } from 'minimatch'; +export type { FSOption, Path, WalkOptions, WalkOptionsWithFileTypesTrue, WalkOptionsWithFileTypesUnset, } from 'path-scurry'; +export { Glob } from './glob.js'; +export type { GlobOptions, GlobOptionsWithFileTypesFalse, GlobOptionsWithFileTypesTrue, GlobOptionsWithFileTypesUnset, } from './glob.js'; +export { hasMagic } from './has-magic.js'; +export { Ignore } from './ignore.js'; +export type { IgnoreLike } from './ignore.js'; +export type { MatchStream } from './walker.js'; +/** + * Syncronous form of {@link globStream}. Will read all the matches as fast as + * you consume them, even all in a single tick if you consume them immediately, + * but will still respond to backpressure if they're not consumed immediately. + */ +export declare function globStreamSync(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): Minipass; +export declare function globStreamSync(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): Minipass; +export declare function globStreamSync(pattern: string | string[], options: GlobOptionsWithFileTypesUnset): Minipass; +export declare function globStreamSync(pattern: string | string[], options: GlobOptions): Minipass | Minipass; +/** + * Return a stream that emits all the strings or `Path` objects and + * then emits `end` when completed. + */ +export declare function globStream(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): Minipass; +export declare function globStream(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): Minipass; +export declare function globStream(pattern: string | string[], options?: GlobOptionsWithFileTypesUnset | undefined): Minipass; +export declare function globStream(pattern: string | string[], options: GlobOptions): Minipass | Minipass; +/** + * Synchronous form of {@link glob} + */ +export declare function globSync(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): string[]; +export declare function globSync(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): Path[]; +export declare function globSync(pattern: string | string[], options?: GlobOptionsWithFileTypesUnset | undefined): string[]; +export declare function globSync(pattern: string | string[], options: GlobOptions): Path[] | string[]; +/** + * Perform an asynchronous glob search for the pattern(s) specified. Returns + * [Path](https://isaacs.github.io/path-scurry/classes/PathBase) objects if the + * {@link withFileTypes} option is set to `true`. See {@link GlobOptions} for + * full option descriptions. + */ +declare function glob_(pattern: string | string[], options?: GlobOptionsWithFileTypesUnset | undefined): Promise; +declare function glob_(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): Promise; +declare function glob_(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): Promise; +declare function glob_(pattern: string | string[], options: GlobOptions): Promise; +/** + * Return a sync iterator for walking glob pattern matches. + */ +export declare function globIterateSync(pattern: string | string[], options?: GlobOptionsWithFileTypesUnset | undefined): Generator; +export declare function globIterateSync(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): Generator; +export declare function globIterateSync(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): Generator; +export declare function globIterateSync(pattern: string | string[], options: GlobOptions): Generator | Generator; +/** + * Return an async iterator for walking glob pattern matches. + */ +export declare function globIterate(pattern: string | string[], options?: GlobOptionsWithFileTypesUnset | undefined): AsyncGenerator; +export declare function globIterate(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): AsyncGenerator; +export declare function globIterate(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): AsyncGenerator; +export declare function globIterate(pattern: string | string[], options: GlobOptions): AsyncGenerator | AsyncGenerator; +export declare const streamSync: typeof globStreamSync; +export declare const stream: typeof globStream & { + sync: typeof globStreamSync; +}; +export declare const iterateSync: typeof globIterateSync; +export declare const iterate: typeof globIterate & { + sync: typeof globIterateSync; +}; +export declare const sync: typeof globSync & { + stream: typeof globStreamSync; + iterate: typeof globIterateSync; +}; +export declare const glob: typeof glob_ & { + glob: typeof glob_; + globSync: typeof globSync; + sync: typeof globSync & { + stream: typeof globStreamSync; + iterate: typeof globIterateSync; + }; + globStream: typeof globStream; + stream: typeof globStream & { + sync: typeof globStreamSync; + }; + globStreamSync: typeof globStreamSync; + streamSync: typeof globStreamSync; + globIterate: typeof globIterate; + iterate: typeof globIterate & { + sync: typeof globIterateSync; + }; + globIterateSync: typeof globIterateSync; + iterateSync: typeof globIterateSync; + Glob: typeof Glob; + hasMagic: (pattern: string | string[], options?: GlobOptions) => boolean; + escape: (s: string, { windowsPathsNoEscape, }?: Pick) => string; + unescape: (s: string, { windowsPathsNoEscape, }?: Pick) => string; +}; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/index.d.ts.map b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/index.d.ts.map new file mode 100644 index 0000000000000000000000000000000000000000..5fb32252b63747526a971d82bf6ab2ee61b53631 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAA;AACnC,OAAO,EAAE,IAAI,EAAE,MAAM,aAAa,CAAA;AAClC,OAAO,KAAK,EACV,WAAW,EACX,6BAA6B,EAC7B,4BAA4B,EAC5B,6BAA6B,EAC9B,MAAM,WAAW,CAAA;AAClB,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAA;AAGhC,OAAO,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,WAAW,CAAA;AAC5C,YAAY,EACV,QAAQ,EACR,IAAI,EACJ,WAAW,EACX,4BAA4B,EAC5B,6BAA6B,GAC9B,MAAM,aAAa,CAAA;AACpB,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAA;AAChC,YAAY,EACV,WAAW,EACX,6BAA6B,EAC7B,4BAA4B,EAC5B,6BAA6B,GAC9B,MAAM,WAAW,CAAA;AAClB,OAAO,EAAE,QAAQ,EAAE,MAAM,gBAAgB,CAAA;AACzC,OAAO,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AACpC,YAAY,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AAC7C,YAAY,EAAE,WAAW,EAAE,MAAM,aAAa,CAAA;AAE9C;;;;GAIG;AACH,wBAAgB,cAAc,CAC5B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;AACvB,wBAAgB,cAAc,CAC5B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAC3B,wBAAgB,cAAc,CAC5B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAC3B,wBAAgB,cAAc,CAC5B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAQlD;;;GAGG;AACH,wBAAgB,UAAU,CACxB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAC3B,wBAAgB,UAAU,CACxB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;AACvB,wBAAgB,UAAU,CACxB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,CAAC,EAAE,6BAA6B,GAAG,SAAS,GAClD,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAC3B,wBAAgB,UAAU,CACxB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAQlD;;GAEG;AACH,wBAAgB,QAAQ,CACtB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,MAAM,EAAE,CAAA;AACX,wBAAgB,QAAQ,CACtB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,IAAI,EAAE,CAAA;AACT,wBAAgB,QAAQ,CACtB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,CAAC,EAAE,6BAA6B,GAAG,SAAS,GAClD,MAAM,EAAE,CAAA;AACX,wBAAgB,QAAQ,CACtB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,IAAI,EAAE,GAAG,MAAM,EAAE,CAAA;AAQpB;;;;;GAKG;AACH,iBAAe,KAAK,CAClB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,CAAC,EAAE,6BAA6B,GAAG,SAAS,GAClD,OAAO,CAAC,MAAM,EAAE,CAAC,CAAA;AACpB,iBAAe,KAAK,CAClB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,OAAO,CAAC,IAAI,EAAE,CAAC,CAAA;AAClB,iBAAe,KAAK,CAClB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,OAAO,CAAC,MAAM,EAAE,CAAC,CAAA;AACpB,iBAAe,KAAK,CAClB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,OAAO,CAAC,IAAI,EAAE,GAAG,MAAM,EAAE,CAAC,CAAA;AAQ7B;;GAEG;AACH,wBAAgB,eAAe,CAC7B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,CAAC,EAAE,6BAA6B,GAAG,SAAS,GAClD,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AAChC,wBAAgB,eAAe,CAC7B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,SAAS,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AAC9B,wBAAgB,eAAe,CAC7B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AAChC,wBAAgB,eAAe,CAC7B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,SAAS,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AAQ9D;;GAEG;AACH,wBAAgB,WAAW,CACzB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,CAAC,EAAE,6BAA6B,GAAG,SAAS,GAClD,cAAc,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AACrC,wBAAgB,WAAW,CACzB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,cAAc,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AACnC,wBAAgB,WAAW,CACzB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,cAAc,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AACrC,wBAAgB,WAAW,CACzB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,cAAc,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,GAAG,cAAc,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AASxE,eAAO,MAAM,UAAU,uBAAiB,CAAA;AACxC,eAAO,MAAM,MAAM;;CAAsD,CAAA;AACzE,eAAO,MAAM,WAAW,wBAAkB,CAAA;AAC1C,eAAO,MAAM,OAAO;;CAElB,CAAA;AACF,eAAO,MAAM,IAAI;;;CAGf,CAAA;AAEF,eAAO,MAAM,IAAI;;;;;;;;;;;;;;;;;;;;;;;CAgBf,CAAA"} \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/index.js new file mode 100644 index 0000000000000000000000000000000000000000..151495d170efa2ddefa9d40819abf488999f7037 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/index.js @@ -0,0 +1,68 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.glob = exports.sync = exports.iterate = exports.iterateSync = exports.stream = exports.streamSync = exports.Ignore = exports.hasMagic = exports.Glob = exports.unescape = exports.escape = void 0; +exports.globStreamSync = globStreamSync; +exports.globStream = globStream; +exports.globSync = globSync; +exports.globIterateSync = globIterateSync; +exports.globIterate = globIterate; +const minimatch_1 = require("minimatch"); +const glob_js_1 = require("./glob.js"); +const has_magic_js_1 = require("./has-magic.js"); +var minimatch_2 = require("minimatch"); +Object.defineProperty(exports, "escape", { enumerable: true, get: function () { return minimatch_2.escape; } }); +Object.defineProperty(exports, "unescape", { enumerable: true, get: function () { return minimatch_2.unescape; } }); +var glob_js_2 = require("./glob.js"); +Object.defineProperty(exports, "Glob", { enumerable: true, get: function () { return glob_js_2.Glob; } }); +var has_magic_js_2 = require("./has-magic.js"); +Object.defineProperty(exports, "hasMagic", { enumerable: true, get: function () { return has_magic_js_2.hasMagic; } }); +var ignore_js_1 = require("./ignore.js"); +Object.defineProperty(exports, "Ignore", { enumerable: true, get: function () { return ignore_js_1.Ignore; } }); +function globStreamSync(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).streamSync(); +} +function globStream(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).stream(); +} +function globSync(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).walkSync(); +} +async function glob_(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).walk(); +} +function globIterateSync(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).iterateSync(); +} +function globIterate(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).iterate(); +} +// aliases: glob.sync.stream() glob.stream.sync() glob.sync() etc +exports.streamSync = globStreamSync; +exports.stream = Object.assign(globStream, { sync: globStreamSync }); +exports.iterateSync = globIterateSync; +exports.iterate = Object.assign(globIterate, { + sync: globIterateSync, +}); +exports.sync = Object.assign(globSync, { + stream: globStreamSync, + iterate: globIterateSync, +}); +exports.glob = Object.assign(glob_, { + glob: glob_, + globSync, + sync: exports.sync, + globStream, + stream: exports.stream, + globStreamSync, + streamSync: exports.streamSync, + globIterate, + iterate: exports.iterate, + globIterateSync, + iterateSync: exports.iterateSync, + Glob: glob_js_1.Glob, + hasMagic: has_magic_js_1.hasMagic, + escape: minimatch_1.escape, + unescape: minimatch_1.unescape, +}); +exports.glob.glob = exports.glob; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/index.js.map b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/index.js.map new file mode 100644 index 0000000000000000000000000000000000000000..e648b1d01939bc921393d446606768c86edfba71 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;;AAqDA,wCAKC;AAsBD,gCAKC;AAqBD,4BAKC;AAkDD,0CAKC;AAqBD,kCAKC;AAhMD,yCAA4C;AAS5C,uCAAgC;AAChC,iDAAyC;AAEzC,uCAA4C;AAAnC,mGAAA,MAAM,OAAA;AAAE,qGAAA,QAAQ,OAAA;AAQzB,qCAAgC;AAAvB,+FAAA,IAAI,OAAA;AAOb,+CAAyC;AAAhC,wGAAA,QAAQ,OAAA;AACjB,yCAAoC;AAA3B,mGAAA,MAAM,OAAA;AAyBf,SAAgB,cAAc,CAC5B,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,cAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,UAAU,EAAE,CAAA;AAChD,CAAC;AAsBD,SAAgB,UAAU,CACxB,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,cAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;AAC5C,CAAC;AAqBD,SAAgB,QAAQ,CACtB,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,cAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,QAAQ,EAAE,CAAA;AAC9C,CAAC;AAwBD,KAAK,UAAU,KAAK,CAClB,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,cAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,IAAI,EAAE,CAAA;AAC1C,CAAC;AAqBD,SAAgB,eAAe,CAC7B,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,cAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,WAAW,EAAE,CAAA;AACjD,CAAC;AAqBD,SAAgB,WAAW,CACzB,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,cAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,OAAO,EAAE,CAAA;AAC7C,CAAC;AAED,iEAAiE;AACpD,QAAA,UAAU,GAAG,cAAc,CAAA;AAC3B,QAAA,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,IAAI,EAAE,cAAc,EAAE,CAAC,CAAA;AAC5D,QAAA,WAAW,GAAG,eAAe,CAAA;AAC7B,QAAA,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,WAAW,EAAE;IAChD,IAAI,EAAE,eAAe;CACtB,CAAC,CAAA;AACW,QAAA,IAAI,GAAG,MAAM,CAAC,MAAM,CAAC,QAAQ,EAAE;IAC1C,MAAM,EAAE,cAAc;IACtB,OAAO,EAAE,eAAe;CACzB,CAAC,CAAA;AAEW,QAAA,IAAI,GAAG,MAAM,CAAC,MAAM,CAAC,KAAK,EAAE;IACvC,IAAI,EAAE,KAAK;IACX,QAAQ;IACR,IAAI,EAAJ,YAAI;IACJ,UAAU;IACV,MAAM,EAAN,cAAM;IACN,cAAc;IACd,UAAU,EAAV,kBAAU;IACV,WAAW;IACX,OAAO,EAAP,eAAO;IACP,eAAe;IACf,WAAW,EAAX,mBAAW;IACX,IAAI,EAAJ,cAAI;IACJ,QAAQ,EAAR,uBAAQ;IACR,MAAM,EAAN,kBAAM;IACN,QAAQ,EAAR,oBAAQ;CACT,CAAC,CAAA;AACF,YAAI,CAAC,IAAI,GAAG,YAAI,CAAA","sourcesContent":["import { escape, unescape } from 'minimatch'\nimport { Minipass } from 'minipass'\nimport { Path } from 'path-scurry'\nimport type {\n GlobOptions,\n GlobOptionsWithFileTypesFalse,\n GlobOptionsWithFileTypesTrue,\n GlobOptionsWithFileTypesUnset,\n} from './glob.js'\nimport { Glob } from './glob.js'\nimport { hasMagic } from './has-magic.js'\n\nexport { escape, unescape } from 'minimatch'\nexport type {\n FSOption,\n Path,\n WalkOptions,\n WalkOptionsWithFileTypesTrue,\n WalkOptionsWithFileTypesUnset,\n} from 'path-scurry'\nexport { Glob } from './glob.js'\nexport type {\n GlobOptions,\n GlobOptionsWithFileTypesFalse,\n GlobOptionsWithFileTypesTrue,\n GlobOptionsWithFileTypesUnset,\n} from './glob.js'\nexport { hasMagic } from './has-magic.js'\nexport { Ignore } from './ignore.js'\nexport type { IgnoreLike } from './ignore.js'\nexport type { MatchStream } from './walker.js'\n\n/**\n * Syncronous form of {@link globStream}. Will read all the matches as fast as\n * you consume them, even all in a single tick if you consume them immediately,\n * but will still respond to backpressure if they're not consumed immediately.\n */\nexport function globStreamSync(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesTrue,\n): Minipass\nexport function globStreamSync(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesFalse,\n): Minipass\nexport function globStreamSync(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesUnset,\n): Minipass\nexport function globStreamSync(\n pattern: string | string[],\n options: GlobOptions,\n): Minipass | Minipass\nexport function globStreamSync(\n pattern: string | string[],\n options: GlobOptions = {},\n) {\n return new Glob(pattern, options).streamSync()\n}\n\n/**\n * Return a stream that emits all the strings or `Path` objects and\n * then emits `end` when completed.\n */\nexport function globStream(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesFalse,\n): Minipass\nexport function globStream(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesTrue,\n): Minipass\nexport function globStream(\n pattern: string | string[],\n options?: GlobOptionsWithFileTypesUnset | undefined,\n): Minipass\nexport function globStream(\n pattern: string | string[],\n options: GlobOptions,\n): Minipass | Minipass\nexport function globStream(\n pattern: string | string[],\n options: GlobOptions = {},\n) {\n return new Glob(pattern, options).stream()\n}\n\n/**\n * Synchronous form of {@link glob}\n */\nexport function globSync(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesFalse,\n): string[]\nexport function globSync(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesTrue,\n): Path[]\nexport function globSync(\n pattern: string | string[],\n options?: GlobOptionsWithFileTypesUnset | undefined,\n): string[]\nexport function globSync(\n pattern: string | string[],\n options: GlobOptions,\n): Path[] | string[]\nexport function globSync(\n pattern: string | string[],\n options: GlobOptions = {},\n) {\n return new Glob(pattern, options).walkSync()\n}\n\n/**\n * Perform an asynchronous glob search for the pattern(s) specified. Returns\n * [Path](https://isaacs.github.io/path-scurry/classes/PathBase) objects if the\n * {@link withFileTypes} option is set to `true`. See {@link GlobOptions} for\n * full option descriptions.\n */\nasync function glob_(\n pattern: string | string[],\n options?: GlobOptionsWithFileTypesUnset | undefined,\n): Promise\nasync function glob_(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesTrue,\n): Promise\nasync function glob_(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesFalse,\n): Promise\nasync function glob_(\n pattern: string | string[],\n options: GlobOptions,\n): Promise\nasync function glob_(\n pattern: string | string[],\n options: GlobOptions = {},\n) {\n return new Glob(pattern, options).walk()\n}\n\n/**\n * Return a sync iterator for walking glob pattern matches.\n */\nexport function globIterateSync(\n pattern: string | string[],\n options?: GlobOptionsWithFileTypesUnset | undefined,\n): Generator\nexport function globIterateSync(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesTrue,\n): Generator\nexport function globIterateSync(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesFalse,\n): Generator\nexport function globIterateSync(\n pattern: string | string[],\n options: GlobOptions,\n): Generator | Generator\nexport function globIterateSync(\n pattern: string | string[],\n options: GlobOptions = {},\n) {\n return new Glob(pattern, options).iterateSync()\n}\n\n/**\n * Return an async iterator for walking glob pattern matches.\n */\nexport function globIterate(\n pattern: string | string[],\n options?: GlobOptionsWithFileTypesUnset | undefined,\n): AsyncGenerator\nexport function globIterate(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesTrue,\n): AsyncGenerator\nexport function globIterate(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesFalse,\n): AsyncGenerator\nexport function globIterate(\n pattern: string | string[],\n options: GlobOptions,\n): AsyncGenerator | AsyncGenerator\nexport function globIterate(\n pattern: string | string[],\n options: GlobOptions = {},\n) {\n return new Glob(pattern, options).iterate()\n}\n\n// aliases: glob.sync.stream() glob.stream.sync() glob.sync() etc\nexport const streamSync = globStreamSync\nexport const stream = Object.assign(globStream, { sync: globStreamSync })\nexport const iterateSync = globIterateSync\nexport const iterate = Object.assign(globIterate, {\n sync: globIterateSync,\n})\nexport const sync = Object.assign(globSync, {\n stream: globStreamSync,\n iterate: globIterateSync,\n})\n\nexport const glob = Object.assign(glob_, {\n glob: glob_,\n globSync,\n sync,\n globStream,\n stream,\n globStreamSync,\n streamSync,\n globIterate,\n iterate,\n globIterateSync,\n iterateSync,\n Glob,\n hasMagic,\n escape,\n unescape,\n})\nglob.glob = glob\n"]} \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/package.json new file mode 100644 index 0000000000000000000000000000000000000000..5bbefffbabee392d1855491b84dc0a716b6a3bf2 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/pattern.d.ts b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/pattern.d.ts new file mode 100644 index 0000000000000000000000000000000000000000..9636df3b54df2912790cdb6d4551c3ebe6e4d42e --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/pattern.d.ts @@ -0,0 +1,76 @@ +import { GLOBSTAR } from 'minimatch'; +export type MMPattern = string | RegExp | typeof GLOBSTAR; +export type PatternList = [p: MMPattern, ...rest: MMPattern[]]; +export type UNCPatternList = [ + p0: '', + p1: '', + p2: string, + p3: string, + ...rest: MMPattern[] +]; +export type DrivePatternList = [p0: string, ...rest: MMPattern[]]; +export type AbsolutePatternList = [p0: '', ...rest: MMPattern[]]; +export type GlobList = [p: string, ...rest: string[]]; +/** + * An immutable-ish view on an array of glob parts and their parsed + * results + */ +export declare class Pattern { + #private; + readonly length: number; + constructor(patternList: MMPattern[], globList: string[], index: number, platform: NodeJS.Platform); + /** + * The first entry in the parsed list of patterns + */ + pattern(): MMPattern; + /** + * true of if pattern() returns a string + */ + isString(): boolean; + /** + * true of if pattern() returns GLOBSTAR + */ + isGlobstar(): boolean; + /** + * true if pattern() returns a regexp + */ + isRegExp(): boolean; + /** + * The /-joined set of glob parts that make up this pattern + */ + globString(): string; + /** + * true if there are more pattern parts after this one + */ + hasMore(): boolean; + /** + * The rest of the pattern after this part, or null if this is the end + */ + rest(): Pattern | null; + /** + * true if the pattern represents a //unc/path/ on windows + */ + isUNC(): boolean; + /** + * True if the pattern starts with a drive letter on Windows + */ + isDrive(): boolean; + /** + * True if the pattern is rooted on an absolute path + */ + isAbsolute(): boolean; + /** + * consume the root of the pattern, and return it + */ + root(): string; + /** + * Check to see if the current globstar pattern is allowed to follow + * a symbolic link. + */ + checkFollowGlobstar(): boolean; + /** + * Mark that the current globstar pattern is following a symbolic link + */ + markFollowGlobstar(): boolean; +} +//# sourceMappingURL=pattern.d.ts.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/pattern.d.ts.map b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/pattern.d.ts.map new file mode 100644 index 0000000000000000000000000000000000000000..cdf322346317d8a12efa4c8fa613b693d2bf8bbe --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/pattern.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"pattern.d.ts","sourceRoot":"","sources":["../../src/pattern.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,QAAQ,EAAE,MAAM,WAAW,CAAA;AACpC,MAAM,MAAM,SAAS,GAAG,MAAM,GAAG,MAAM,GAAG,OAAO,QAAQ,CAAA;AAGzD,MAAM,MAAM,WAAW,GAAG,CAAC,CAAC,EAAE,SAAS,EAAE,GAAG,IAAI,EAAE,SAAS,EAAE,CAAC,CAAA;AAC9D,MAAM,MAAM,cAAc,GAAG;IAC3B,EAAE,EAAE,EAAE;IACN,EAAE,EAAE,EAAE;IACN,EAAE,EAAE,MAAM;IACV,EAAE,EAAE,MAAM;IACV,GAAG,IAAI,EAAE,SAAS,EAAE;CACrB,CAAA;AACD,MAAM,MAAM,gBAAgB,GAAG,CAAC,EAAE,EAAE,MAAM,EAAE,GAAG,IAAI,EAAE,SAAS,EAAE,CAAC,CAAA;AACjE,MAAM,MAAM,mBAAmB,GAAG,CAAC,EAAE,EAAE,EAAE,EAAE,GAAG,IAAI,EAAE,SAAS,EAAE,CAAC,CAAA;AAChE,MAAM,MAAM,QAAQ,GAAG,CAAC,CAAC,EAAE,MAAM,EAAE,GAAG,IAAI,EAAE,MAAM,EAAE,CAAC,CAAA;AAMrD;;;GAGG;AACH,qBAAa,OAAO;;IAIlB,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAA;gBAUrB,WAAW,EAAE,SAAS,EAAE,EACxB,QAAQ,EAAE,MAAM,EAAE,EAClB,KAAK,EAAE,MAAM,EACb,QAAQ,EAAE,MAAM,CAAC,QAAQ;IA6D3B;;OAEG;IACH,OAAO,IAAI,SAAS;IAIpB;;OAEG;IACH,QAAQ,IAAI,OAAO;IAGnB;;OAEG;IACH,UAAU,IAAI,OAAO;IAGrB;;OAEG;IACH,QAAQ,IAAI,OAAO;IAInB;;OAEG;IACH,UAAU,IAAI,MAAM;IAUpB;;OAEG;IACH,OAAO,IAAI,OAAO;IAIlB;;OAEG;IACH,IAAI,IAAI,OAAO,GAAG,IAAI;IAetB;;OAEG;IACH,KAAK,IAAI,OAAO;IAoBhB;;OAEG;IACH,OAAO,IAAI,OAAO;IAelB;;OAEG;IACH,UAAU,IAAI,OAAO;IAUrB;;OAEG;IACH,IAAI,IAAI,MAAM;IASd;;;OAGG;IACH,mBAAmB,IAAI,OAAO;IAQ9B;;OAEG;IACH,kBAAkB,IAAI,OAAO;CAM9B"} \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/pattern.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/pattern.js new file mode 100644 index 0000000000000000000000000000000000000000..f0de35fb5bed9d89d2dd3cecae31704b9153234b --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/pattern.js @@ -0,0 +1,219 @@ +"use strict"; +// this is just a very light wrapper around 2 arrays with an offset index +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Pattern = void 0; +const minimatch_1 = require("minimatch"); +const isPatternList = (pl) => pl.length >= 1; +const isGlobList = (gl) => gl.length >= 1; +/** + * An immutable-ish view on an array of glob parts and their parsed + * results + */ +class Pattern { + #patternList; + #globList; + #index; + length; + #platform; + #rest; + #globString; + #isDrive; + #isUNC; + #isAbsolute; + #followGlobstar = true; + constructor(patternList, globList, index, platform) { + if (!isPatternList(patternList)) { + throw new TypeError('empty pattern list'); + } + if (!isGlobList(globList)) { + throw new TypeError('empty glob list'); + } + if (globList.length !== patternList.length) { + throw new TypeError('mismatched pattern list and glob list lengths'); + } + this.length = patternList.length; + if (index < 0 || index >= this.length) { + throw new TypeError('index out of range'); + } + this.#patternList = patternList; + this.#globList = globList; + this.#index = index; + this.#platform = platform; + // normalize root entries of absolute patterns on initial creation. + if (this.#index === 0) { + // c: => ['c:/'] + // C:/ => ['C:/'] + // C:/x => ['C:/', 'x'] + // //host/share => ['//host/share/'] + // //host/share/ => ['//host/share/'] + // //host/share/x => ['//host/share/', 'x'] + // /etc => ['/', 'etc'] + // / => ['/'] + if (this.isUNC()) { + // '' / '' / 'host' / 'share' + const [p0, p1, p2, p3, ...prest] = this.#patternList; + const [g0, g1, g2, g3, ...grest] = this.#globList; + if (prest[0] === '') { + // ends in / + prest.shift(); + grest.shift(); + } + const p = [p0, p1, p2, p3, ''].join('/'); + const g = [g0, g1, g2, g3, ''].join('/'); + this.#patternList = [p, ...prest]; + this.#globList = [g, ...grest]; + this.length = this.#patternList.length; + } + else if (this.isDrive() || this.isAbsolute()) { + const [p1, ...prest] = this.#patternList; + const [g1, ...grest] = this.#globList; + if (prest[0] === '') { + // ends in / + prest.shift(); + grest.shift(); + } + const p = p1 + '/'; + const g = g1 + '/'; + this.#patternList = [p, ...prest]; + this.#globList = [g, ...grest]; + this.length = this.#patternList.length; + } + } + } + /** + * The first entry in the parsed list of patterns + */ + pattern() { + return this.#patternList[this.#index]; + } + /** + * true of if pattern() returns a string + */ + isString() { + return typeof this.#patternList[this.#index] === 'string'; + } + /** + * true of if pattern() returns GLOBSTAR + */ + isGlobstar() { + return this.#patternList[this.#index] === minimatch_1.GLOBSTAR; + } + /** + * true if pattern() returns a regexp + */ + isRegExp() { + return this.#patternList[this.#index] instanceof RegExp; + } + /** + * The /-joined set of glob parts that make up this pattern + */ + globString() { + return (this.#globString = + this.#globString || + (this.#index === 0 ? + this.isAbsolute() ? + this.#globList[0] + this.#globList.slice(1).join('/') + : this.#globList.join('/') + : this.#globList.slice(this.#index).join('/'))); + } + /** + * true if there are more pattern parts after this one + */ + hasMore() { + return this.length > this.#index + 1; + } + /** + * The rest of the pattern after this part, or null if this is the end + */ + rest() { + if (this.#rest !== undefined) + return this.#rest; + if (!this.hasMore()) + return (this.#rest = null); + this.#rest = new Pattern(this.#patternList, this.#globList, this.#index + 1, this.#platform); + this.#rest.#isAbsolute = this.#isAbsolute; + this.#rest.#isUNC = this.#isUNC; + this.#rest.#isDrive = this.#isDrive; + return this.#rest; + } + /** + * true if the pattern represents a //unc/path/ on windows + */ + isUNC() { + const pl = this.#patternList; + return this.#isUNC !== undefined ? + this.#isUNC + : (this.#isUNC = + this.#platform === 'win32' && + this.#index === 0 && + pl[0] === '' && + pl[1] === '' && + typeof pl[2] === 'string' && + !!pl[2] && + typeof pl[3] === 'string' && + !!pl[3]); + } + // pattern like C:/... + // split = ['C:', ...] + // XXX: would be nice to handle patterns like `c:*` to test the cwd + // in c: for *, but I don't know of a way to even figure out what that + // cwd is without actually chdir'ing into it? + /** + * True if the pattern starts with a drive letter on Windows + */ + isDrive() { + const pl = this.#patternList; + return this.#isDrive !== undefined ? + this.#isDrive + : (this.#isDrive = + this.#platform === 'win32' && + this.#index === 0 && + this.length > 1 && + typeof pl[0] === 'string' && + /^[a-z]:$/i.test(pl[0])); + } + // pattern = '/' or '/...' or '/x/...' + // split = ['', ''] or ['', ...] or ['', 'x', ...] + // Drive and UNC both considered absolute on windows + /** + * True if the pattern is rooted on an absolute path + */ + isAbsolute() { + const pl = this.#patternList; + return this.#isAbsolute !== undefined ? + this.#isAbsolute + : (this.#isAbsolute = + (pl[0] === '' && pl.length > 1) || + this.isDrive() || + this.isUNC()); + } + /** + * consume the root of the pattern, and return it + */ + root() { + const p = this.#patternList[0]; + return (typeof p === 'string' && this.isAbsolute() && this.#index === 0) ? + p + : ''; + } + /** + * Check to see if the current globstar pattern is allowed to follow + * a symbolic link. + */ + checkFollowGlobstar() { + return !(this.#index === 0 || + !this.isGlobstar() || + !this.#followGlobstar); + } + /** + * Mark that the current globstar pattern is following a symbolic link + */ + markFollowGlobstar() { + if (this.#index === 0 || !this.isGlobstar() || !this.#followGlobstar) + return false; + this.#followGlobstar = false; + return true; + } +} +exports.Pattern = Pattern; +//# sourceMappingURL=pattern.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/pattern.js.map b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/pattern.js.map new file mode 100644 index 0000000000000000000000000000000000000000..fc10ea5d6c4ef4d274e2e1b73171f38beebab3e3 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/pattern.js.map @@ -0,0 +1 @@ +{"version":3,"file":"pattern.js","sourceRoot":"","sources":["../../src/pattern.ts"],"names":[],"mappings":";AAAA,yEAAyE;;;AAEzE,yCAAoC;AAgBpC,MAAM,aAAa,GAAG,CAAC,EAAe,EAAqB,EAAE,CAC3D,EAAE,CAAC,MAAM,IAAI,CAAC,CAAA;AAChB,MAAM,UAAU,GAAG,CAAC,EAAY,EAAkB,EAAE,CAAC,EAAE,CAAC,MAAM,IAAI,CAAC,CAAA;AAEnE;;;GAGG;AACH,MAAa,OAAO;IACT,YAAY,CAAa;IACzB,SAAS,CAAU;IACnB,MAAM,CAAQ;IACd,MAAM,CAAQ;IACd,SAAS,CAAiB;IACnC,KAAK,CAAiB;IACtB,WAAW,CAAS;IACpB,QAAQ,CAAU;IAClB,MAAM,CAAU;IAChB,WAAW,CAAU;IACrB,eAAe,GAAY,IAAI,CAAA;IAE/B,YACE,WAAwB,EACxB,QAAkB,EAClB,KAAa,EACb,QAAyB;QAEzB,IAAI,CAAC,aAAa,CAAC,WAAW,CAAC,EAAE,CAAC;YAChC,MAAM,IAAI,SAAS,CAAC,oBAAoB,CAAC,CAAA;QAC3C,CAAC;QACD,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE,CAAC;YAC1B,MAAM,IAAI,SAAS,CAAC,iBAAiB,CAAC,CAAA;QACxC,CAAC;QACD,IAAI,QAAQ,CAAC,MAAM,KAAK,WAAW,CAAC,MAAM,EAAE,CAAC;YAC3C,MAAM,IAAI,SAAS,CAAC,+CAA+C,CAAC,CAAA;QACtE,CAAC;QACD,IAAI,CAAC,MAAM,GAAG,WAAW,CAAC,MAAM,CAAA;QAChC,IAAI,KAAK,GAAG,CAAC,IAAI,KAAK,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;YACtC,MAAM,IAAI,SAAS,CAAC,oBAAoB,CAAC,CAAA;QAC3C,CAAC;QACD,IAAI,CAAC,YAAY,GAAG,WAAW,CAAA;QAC/B,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAA;QACzB,IAAI,CAAC,MAAM,GAAG,KAAK,CAAA;QACnB,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAA;QAEzB,mEAAmE;QACnE,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YACtB,gBAAgB;YAChB,iBAAiB;YACjB,uBAAuB;YACvB,oCAAoC;YACpC,qCAAqC;YACrC,2CAA2C;YAC3C,uBAAuB;YACvB,aAAa;YACb,IAAI,IAAI,CAAC,KAAK,EAAE,EAAE,CAAC;gBACjB,6BAA6B;gBAC7B,MAAM,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,GAAG,KAAK,CAAC,GAAG,IAAI,CAAC,YAAY,CAAA;gBACpD,MAAM,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,GAAG,KAAK,CAAC,GAAG,IAAI,CAAC,SAAS,CAAA;gBACjD,IAAI,KAAK,CAAC,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC;oBACpB,YAAY;oBACZ,KAAK,CAAC,KAAK,EAAE,CAAA;oBACb,KAAK,CAAC,KAAK,EAAE,CAAA;gBACf,CAAC;gBACD,MAAM,CAAC,GAAG,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;gBACxC,MAAM,CAAC,GAAG,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;gBACxC,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC,EAAE,GAAG,KAAK,CAAC,CAAA;gBACjC,IAAI,CAAC,SAAS,GAAG,CAAC,CAAC,EAAE,GAAG,KAAK,CAAC,CAAA;gBAC9B,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,YAAY,CAAC,MAAM,CAAA;YACxC,CAAC;iBAAM,IAAI,IAAI,CAAC,OAAO,EAAE,IAAI,IAAI,CAAC,UAAU,EAAE,EAAE,CAAC;gBAC/C,MAAM,CAAC,EAAE,EAAE,GAAG,KAAK,CAAC,GAAG,IAAI,CAAC,YAAY,CAAA;gBACxC,MAAM,CAAC,EAAE,EAAE,GAAG,KAAK,CAAC,GAAG,IAAI,CAAC,SAAS,CAAA;gBACrC,IAAI,KAAK,CAAC,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC;oBACpB,YAAY;oBACZ,KAAK,CAAC,KAAK,EAAE,CAAA;oBACb,KAAK,CAAC,KAAK,EAAE,CAAA;gBACf,CAAC;gBACD,MAAM,CAAC,GAAI,EAAa,GAAG,GAAG,CAAA;gBAC9B,MAAM,CAAC,GAAG,EAAE,GAAG,GAAG,CAAA;gBAClB,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC,EAAE,GAAG,KAAK,CAAC,CAAA;gBACjC,IAAI,CAAC,SAAS,GAAG,CAAC,CAAC,EAAE,GAAG,KAAK,CAAC,CAAA;gBAC9B,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,YAAY,CAAC,MAAM,CAAA;YACxC,CAAC;QACH,CAAC;IACH,CAAC;IAED;;OAEG;IACH,OAAO;QACL,OAAO,IAAI,CAAC,YAAY,CAAC,IAAI,CAAC,MAAM,CAAc,CAAA;IACpD,CAAC;IAED;;OAEG;IACH,QAAQ;QACN,OAAO,OAAO,IAAI,CAAC,YAAY,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,QAAQ,CAAA;IAC3D,CAAC;IACD;;OAEG;IACH,UAAU;QACR,OAAO,IAAI,CAAC,YAAY,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,oBAAQ,CAAA;IACpD,CAAC;IACD;;OAEG;IACH,QAAQ;QACN,OAAO,IAAI,CAAC,YAAY,CAAC,IAAI,CAAC,MAAM,CAAC,YAAY,MAAM,CAAA;IACzD,CAAC;IAED;;OAEG;IACH,UAAU;QACR,OAAO,CAAC,IAAI,CAAC,WAAW;YACtB,IAAI,CAAC,WAAW;gBAChB,CAAC,IAAI,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC;oBAClB,IAAI,CAAC,UAAU,EAAE,CAAC,CAAC;wBACjB,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC;wBACvD,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC;oBAC5B,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;IACnD,CAAC;IAED;;OAEG;IACH,OAAO;QACL,OAAO,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,GAAG,CAAC,CAAA;IACtC,CAAC;IAED;;OAEG;IACH,IAAI;QACF,IAAI,IAAI,CAAC,KAAK,KAAK,SAAS;YAAE,OAAO,IAAI,CAAC,KAAK,CAAA;QAC/C,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE;YAAE,OAAO,CAAC,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,CAAA;QAC/C,IAAI,CAAC,KAAK,GAAG,IAAI,OAAO,CACtB,IAAI,CAAC,YAAY,EACjB,IAAI,CAAC,SAAS,EACd,IAAI,CAAC,MAAM,GAAG,CAAC,EACf,IAAI,CAAC,SAAS,CACf,CAAA;QACD,IAAI,CAAC,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC,WAAW,CAAA;QACzC,IAAI,CAAC,KAAK,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;QAC/B,IAAI,CAAC,KAAK,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAA;QACnC,OAAO,IAAI,CAAC,KAAK,CAAA;IACnB,CAAC;IAED;;OAEG;IACH,KAAK;QACH,MAAM,EAAE,GAAG,IAAI,CAAC,YAAY,CAAA;QAC5B,OAAO,IAAI,CAAC,MAAM,KAAK,SAAS,CAAC,CAAC;YAC9B,IAAI,CAAC,MAAM;YACb,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM;gBACV,IAAI,CAAC,SAAS,KAAK,OAAO;oBAC1B,IAAI,CAAC,MAAM,KAAK,CAAC;oBACjB,EAAE,CAAC,CAAC,CAAC,KAAK,EAAE;oBACZ,EAAE,CAAC,CAAC,CAAC,KAAK,EAAE;oBACZ,OAAO,EAAE,CAAC,CAAC,CAAC,KAAK,QAAQ;oBACzB,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;oBACP,OAAO,EAAE,CAAC,CAAC,CAAC,KAAK,QAAQ;oBACzB,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAA;IAChB,CAAC;IAED,sBAAsB;IACtB,sBAAsB;IACtB,mEAAmE;IACnE,sEAAsE;IACtE,6CAA6C;IAC7C;;OAEG;IACH,OAAO;QACL,MAAM,EAAE,GAAG,IAAI,CAAC,YAAY,CAAA;QAC5B,OAAO,IAAI,CAAC,QAAQ,KAAK,SAAS,CAAC,CAAC;YAChC,IAAI,CAAC,QAAQ;YACf,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ;gBACZ,IAAI,CAAC,SAAS,KAAK,OAAO;oBAC1B,IAAI,CAAC,MAAM,KAAK,CAAC;oBACjB,IAAI,CAAC,MAAM,GAAG,CAAC;oBACf,OAAO,EAAE,CAAC,CAAC,CAAC,KAAK,QAAQ;oBACzB,WAAW,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;IAChC,CAAC;IAED,sCAAsC;IACtC,kDAAkD;IAClD,oDAAoD;IACpD;;OAEG;IACH,UAAU;QACR,MAAM,EAAE,GAAG,IAAI,CAAC,YAAY,CAAA;QAC5B,OAAO,IAAI,CAAC,WAAW,KAAK,SAAS,CAAC,CAAC;YACnC,IAAI,CAAC,WAAW;YAClB,CAAC,CAAC,CAAC,IAAI,CAAC,WAAW;gBACf,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,EAAE,IAAI,EAAE,CAAC,MAAM,GAAG,CAAC,CAAC;oBAC/B,IAAI,CAAC,OAAO,EAAE;oBACd,IAAI,CAAC,KAAK,EAAE,CAAC,CAAA;IACrB,CAAC;IAED;;OAEG;IACH,IAAI;QACF,MAAM,CAAC,GAAG,IAAI,CAAC,YAAY,CAAC,CAAC,CAAC,CAAA;QAC9B,OAAO,CACH,OAAO,CAAC,KAAK,QAAQ,IAAI,IAAI,CAAC,UAAU,EAAE,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,CAChE,CAAC,CAAC;YACD,CAAC;YACH,CAAC,CAAC,EAAE,CAAA;IACR,CAAC;IAED;;;OAGG;IACH,mBAAmB;QACjB,OAAO,CAAC,CACN,IAAI,CAAC,MAAM,KAAK,CAAC;YACjB,CAAC,IAAI,CAAC,UAAU,EAAE;YAClB,CAAC,IAAI,CAAC,eAAe,CACtB,CAAA;IACH,CAAC;IAED;;OAEG;IACH,kBAAkB;QAChB,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,IAAI,CAAC,eAAe;YAClE,OAAO,KAAK,CAAA;QACd,IAAI,CAAC,eAAe,GAAG,KAAK,CAAA;QAC5B,OAAO,IAAI,CAAA;IACb,CAAC;CACF;AArOD,0BAqOC","sourcesContent":["// this is just a very light wrapper around 2 arrays with an offset index\n\nimport { GLOBSTAR } from 'minimatch'\nexport type MMPattern = string | RegExp | typeof GLOBSTAR\n\n// an array of length >= 1\nexport type PatternList = [p: MMPattern, ...rest: MMPattern[]]\nexport type UNCPatternList = [\n p0: '',\n p1: '',\n p2: string,\n p3: string,\n ...rest: MMPattern[],\n]\nexport type DrivePatternList = [p0: string, ...rest: MMPattern[]]\nexport type AbsolutePatternList = [p0: '', ...rest: MMPattern[]]\nexport type GlobList = [p: string, ...rest: string[]]\n\nconst isPatternList = (pl: MMPattern[]): pl is PatternList =>\n pl.length >= 1\nconst isGlobList = (gl: string[]): gl is GlobList => gl.length >= 1\n\n/**\n * An immutable-ish view on an array of glob parts and their parsed\n * results\n */\nexport class Pattern {\n readonly #patternList: PatternList\n readonly #globList: GlobList\n readonly #index: number\n readonly length: number\n readonly #platform: NodeJS.Platform\n #rest?: Pattern | null\n #globString?: string\n #isDrive?: boolean\n #isUNC?: boolean\n #isAbsolute?: boolean\n #followGlobstar: boolean = true\n\n constructor(\n patternList: MMPattern[],\n globList: string[],\n index: number,\n platform: NodeJS.Platform,\n ) {\n if (!isPatternList(patternList)) {\n throw new TypeError('empty pattern list')\n }\n if (!isGlobList(globList)) {\n throw new TypeError('empty glob list')\n }\n if (globList.length !== patternList.length) {\n throw new TypeError('mismatched pattern list and glob list lengths')\n }\n this.length = patternList.length\n if (index < 0 || index >= this.length) {\n throw new TypeError('index out of range')\n }\n this.#patternList = patternList\n this.#globList = globList\n this.#index = index\n this.#platform = platform\n\n // normalize root entries of absolute patterns on initial creation.\n if (this.#index === 0) {\n // c: => ['c:/']\n // C:/ => ['C:/']\n // C:/x => ['C:/', 'x']\n // //host/share => ['//host/share/']\n // //host/share/ => ['//host/share/']\n // //host/share/x => ['//host/share/', 'x']\n // /etc => ['/', 'etc']\n // / => ['/']\n if (this.isUNC()) {\n // '' / '' / 'host' / 'share'\n const [p0, p1, p2, p3, ...prest] = this.#patternList\n const [g0, g1, g2, g3, ...grest] = this.#globList\n if (prest[0] === '') {\n // ends in /\n prest.shift()\n grest.shift()\n }\n const p = [p0, p1, p2, p3, ''].join('/')\n const g = [g0, g1, g2, g3, ''].join('/')\n this.#patternList = [p, ...prest]\n this.#globList = [g, ...grest]\n this.length = this.#patternList.length\n } else if (this.isDrive() || this.isAbsolute()) {\n const [p1, ...prest] = this.#patternList\n const [g1, ...grest] = this.#globList\n if (prest[0] === '') {\n // ends in /\n prest.shift()\n grest.shift()\n }\n const p = (p1 as string) + '/'\n const g = g1 + '/'\n this.#patternList = [p, ...prest]\n this.#globList = [g, ...grest]\n this.length = this.#patternList.length\n }\n }\n }\n\n /**\n * The first entry in the parsed list of patterns\n */\n pattern(): MMPattern {\n return this.#patternList[this.#index] as MMPattern\n }\n\n /**\n * true of if pattern() returns a string\n */\n isString(): boolean {\n return typeof this.#patternList[this.#index] === 'string'\n }\n /**\n * true of if pattern() returns GLOBSTAR\n */\n isGlobstar(): boolean {\n return this.#patternList[this.#index] === GLOBSTAR\n }\n /**\n * true if pattern() returns a regexp\n */\n isRegExp(): boolean {\n return this.#patternList[this.#index] instanceof RegExp\n }\n\n /**\n * The /-joined set of glob parts that make up this pattern\n */\n globString(): string {\n return (this.#globString =\n this.#globString ||\n (this.#index === 0 ?\n this.isAbsolute() ?\n this.#globList[0] + this.#globList.slice(1).join('/')\n : this.#globList.join('/')\n : this.#globList.slice(this.#index).join('/')))\n }\n\n /**\n * true if there are more pattern parts after this one\n */\n hasMore(): boolean {\n return this.length > this.#index + 1\n }\n\n /**\n * The rest of the pattern after this part, or null if this is the end\n */\n rest(): Pattern | null {\n if (this.#rest !== undefined) return this.#rest\n if (!this.hasMore()) return (this.#rest = null)\n this.#rest = new Pattern(\n this.#patternList,\n this.#globList,\n this.#index + 1,\n this.#platform,\n )\n this.#rest.#isAbsolute = this.#isAbsolute\n this.#rest.#isUNC = this.#isUNC\n this.#rest.#isDrive = this.#isDrive\n return this.#rest\n }\n\n /**\n * true if the pattern represents a //unc/path/ on windows\n */\n isUNC(): boolean {\n const pl = this.#patternList\n return this.#isUNC !== undefined ?\n this.#isUNC\n : (this.#isUNC =\n this.#platform === 'win32' &&\n this.#index === 0 &&\n pl[0] === '' &&\n pl[1] === '' &&\n typeof pl[2] === 'string' &&\n !!pl[2] &&\n typeof pl[3] === 'string' &&\n !!pl[3])\n }\n\n // pattern like C:/...\n // split = ['C:', ...]\n // XXX: would be nice to handle patterns like `c:*` to test the cwd\n // in c: for *, but I don't know of a way to even figure out what that\n // cwd is without actually chdir'ing into it?\n /**\n * True if the pattern starts with a drive letter on Windows\n */\n isDrive(): boolean {\n const pl = this.#patternList\n return this.#isDrive !== undefined ?\n this.#isDrive\n : (this.#isDrive =\n this.#platform === 'win32' &&\n this.#index === 0 &&\n this.length > 1 &&\n typeof pl[0] === 'string' &&\n /^[a-z]:$/i.test(pl[0]))\n }\n\n // pattern = '/' or '/...' or '/x/...'\n // split = ['', ''] or ['', ...] or ['', 'x', ...]\n // Drive and UNC both considered absolute on windows\n /**\n * True if the pattern is rooted on an absolute path\n */\n isAbsolute(): boolean {\n const pl = this.#patternList\n return this.#isAbsolute !== undefined ?\n this.#isAbsolute\n : (this.#isAbsolute =\n (pl[0] === '' && pl.length > 1) ||\n this.isDrive() ||\n this.isUNC())\n }\n\n /**\n * consume the root of the pattern, and return it\n */\n root(): string {\n const p = this.#patternList[0]\n return (\n typeof p === 'string' && this.isAbsolute() && this.#index === 0\n ) ?\n p\n : ''\n }\n\n /**\n * Check to see if the current globstar pattern is allowed to follow\n * a symbolic link.\n */\n checkFollowGlobstar(): boolean {\n return !(\n this.#index === 0 ||\n !this.isGlobstar() ||\n !this.#followGlobstar\n )\n }\n\n /**\n * Mark that the current globstar pattern is following a symbolic link\n */\n markFollowGlobstar(): boolean {\n if (this.#index === 0 || !this.isGlobstar() || !this.#followGlobstar)\n return false\n this.#followGlobstar = false\n return true\n }\n}\n"]} \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/processor.d.ts b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/processor.d.ts new file mode 100644 index 0000000000000000000000000000000000000000..ccedfbf2820f7d51167574666a80785ca1b91b07 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/processor.d.ts @@ -0,0 +1,59 @@ +import { MMRegExp } from 'minimatch'; +import { Path } from 'path-scurry'; +import { Pattern } from './pattern.js'; +import { GlobWalkerOpts } from './walker.js'; +/** + * A cache of which patterns have been processed for a given Path + */ +export declare class HasWalkedCache { + store: Map>; + constructor(store?: Map>); + copy(): HasWalkedCache; + hasWalked(target: Path, pattern: Pattern): boolean | undefined; + storeWalked(target: Path, pattern: Pattern): void; +} +/** + * A record of which paths have been matched in a given walk step, + * and whether they only are considered a match if they are a directory, + * and whether their absolute or relative path should be returned. + */ +export declare class MatchRecord { + store: Map; + add(target: Path, absolute: boolean, ifDir: boolean): void; + entries(): [Path, boolean, boolean][]; +} +/** + * A collection of patterns that must be processed in a subsequent step + * for a given path. + */ +export declare class SubWalks { + store: Map; + add(target: Path, pattern: Pattern): void; + get(target: Path): Pattern[]; + entries(): [Path, Pattern[]][]; + keys(): Path[]; +} +/** + * The class that processes patterns for a given path. + * + * Handles child entry filtering, and determining whether a path's + * directory contents must be read. + */ +export declare class Processor { + hasWalkedCache: HasWalkedCache; + matches: MatchRecord; + subwalks: SubWalks; + patterns?: Pattern[]; + follow: boolean; + dot: boolean; + opts: GlobWalkerOpts; + constructor(opts: GlobWalkerOpts, hasWalkedCache?: HasWalkedCache); + processPatterns(target: Path, patterns: Pattern[]): this; + subwalkTargets(): Path[]; + child(): Processor; + filterEntries(parent: Path, entries: Path[]): Processor; + testGlobstar(e: Path, pattern: Pattern, rest: Pattern | null, absolute: boolean): void; + testRegExp(e: Path, p: MMRegExp, rest: Pattern | null, absolute: boolean): void; + testString(e: Path, p: string, rest: Pattern | null, absolute: boolean): void; +} +//# sourceMappingURL=processor.d.ts.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/processor.d.ts.map b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/processor.d.ts.map new file mode 100644 index 0000000000000000000000000000000000000000..aa266fee4a0544a0a8f7fddceb347d7d059643e0 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/processor.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"processor.d.ts","sourceRoot":"","sources":["../../src/processor.ts"],"names":[],"mappings":"AAEA,OAAO,EAAY,QAAQ,EAAE,MAAM,WAAW,CAAA;AAC9C,OAAO,EAAE,IAAI,EAAE,MAAM,aAAa,CAAA;AAClC,OAAO,EAAa,OAAO,EAAE,MAAM,cAAc,CAAA;AACjD,OAAO,EAAE,cAAc,EAAE,MAAM,aAAa,CAAA;AAE5C;;GAEG;AACH,qBAAa,cAAc;IACzB,KAAK,EAAE,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,MAAM,CAAC,CAAC,CAAA;gBACnB,KAAK,GAAE,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,MAAM,CAAC,CAAa;IAGvD,IAAI;IAGJ,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO;IAGxC,WAAW,CAAC,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO;CAM3C;AAED;;;;GAIG;AACH,qBAAa,WAAW;IACtB,KAAK,EAAE,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,CAAY;IACpC,GAAG,CAAC,MAAM,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,KAAK,EAAE,OAAO;IAMnD,OAAO,IAAI,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE;CAOtC;AAED;;;GAGG;AACH,qBAAa,QAAQ;IACnB,KAAK,EAAE,GAAG,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC,CAAY;IACvC,GAAG,CAAC,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO;IAWlC,GAAG,CAAC,MAAM,EAAE,IAAI,GAAG,OAAO,EAAE;IAS5B,OAAO,IAAI,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC,EAAE;IAG9B,IAAI,IAAI,IAAI,EAAE;CAGf;AAED;;;;;GAKG;AACH,qBAAa,SAAS;IACpB,cAAc,EAAE,cAAc,CAAA;IAC9B,OAAO,cAAoB;IAC3B,QAAQ,WAAiB;IACzB,QAAQ,CAAC,EAAE,OAAO,EAAE,CAAA;IACpB,MAAM,EAAE,OAAO,CAAA;IACf,GAAG,EAAE,OAAO,CAAA;IACZ,IAAI,EAAE,cAAc,CAAA;gBAER,IAAI,EAAE,cAAc,EAAE,cAAc,CAAC,EAAE,cAAc;IAQjE,eAAe,CAAC,MAAM,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE;IAmGjD,cAAc,IAAI,IAAI,EAAE;IAIxB,KAAK;IAQL,aAAa,CAAC,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,IAAI,EAAE,GAAG,SAAS;IAqBvD,YAAY,CACV,CAAC,EAAE,IAAI,EACP,OAAO,EAAE,OAAO,EAChB,IAAI,EAAE,OAAO,GAAG,IAAI,EACpB,QAAQ,EAAE,OAAO;IA8CnB,UAAU,CACR,CAAC,EAAE,IAAI,EACP,CAAC,EAAE,QAAQ,EACX,IAAI,EAAE,OAAO,GAAG,IAAI,EACpB,QAAQ,EAAE,OAAO;IAUnB,UAAU,CAAC,CAAC,EAAE,IAAI,EAAE,CAAC,EAAE,MAAM,EAAE,IAAI,EAAE,OAAO,GAAG,IAAI,EAAE,QAAQ,EAAE,OAAO;CASvE"} \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/processor.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/processor.js new file mode 100644 index 0000000000000000000000000000000000000000..ee3bb4397e0b2d3aaff9b1e2053ac13a2af9f31b --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/processor.js @@ -0,0 +1,301 @@ +"use strict"; +// synchronous utility for filtering entries and calculating subwalks +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Processor = exports.SubWalks = exports.MatchRecord = exports.HasWalkedCache = void 0; +const minimatch_1 = require("minimatch"); +/** + * A cache of which patterns have been processed for a given Path + */ +class HasWalkedCache { + store; + constructor(store = new Map()) { + this.store = store; + } + copy() { + return new HasWalkedCache(new Map(this.store)); + } + hasWalked(target, pattern) { + return this.store.get(target.fullpath())?.has(pattern.globString()); + } + storeWalked(target, pattern) { + const fullpath = target.fullpath(); + const cached = this.store.get(fullpath); + if (cached) + cached.add(pattern.globString()); + else + this.store.set(fullpath, new Set([pattern.globString()])); + } +} +exports.HasWalkedCache = HasWalkedCache; +/** + * A record of which paths have been matched in a given walk step, + * and whether they only are considered a match if they are a directory, + * and whether their absolute or relative path should be returned. + */ +class MatchRecord { + store = new Map(); + add(target, absolute, ifDir) { + const n = (absolute ? 2 : 0) | (ifDir ? 1 : 0); + const current = this.store.get(target); + this.store.set(target, current === undefined ? n : n & current); + } + // match, absolute, ifdir + entries() { + return [...this.store.entries()].map(([path, n]) => [ + path, + !!(n & 2), + !!(n & 1), + ]); + } +} +exports.MatchRecord = MatchRecord; +/** + * A collection of patterns that must be processed in a subsequent step + * for a given path. + */ +class SubWalks { + store = new Map(); + add(target, pattern) { + if (!target.canReaddir()) { + return; + } + const subs = this.store.get(target); + if (subs) { + if (!subs.find(p => p.globString() === pattern.globString())) { + subs.push(pattern); + } + } + else + this.store.set(target, [pattern]); + } + get(target) { + const subs = this.store.get(target); + /* c8 ignore start */ + if (!subs) { + throw new Error('attempting to walk unknown path'); + } + /* c8 ignore stop */ + return subs; + } + entries() { + return this.keys().map(k => [k, this.store.get(k)]); + } + keys() { + return [...this.store.keys()].filter(t => t.canReaddir()); + } +} +exports.SubWalks = SubWalks; +/** + * The class that processes patterns for a given path. + * + * Handles child entry filtering, and determining whether a path's + * directory contents must be read. + */ +class Processor { + hasWalkedCache; + matches = new MatchRecord(); + subwalks = new SubWalks(); + patterns; + follow; + dot; + opts; + constructor(opts, hasWalkedCache) { + this.opts = opts; + this.follow = !!opts.follow; + this.dot = !!opts.dot; + this.hasWalkedCache = + hasWalkedCache ? hasWalkedCache.copy() : new HasWalkedCache(); + } + processPatterns(target, patterns) { + this.patterns = patterns; + const processingSet = patterns.map(p => [target, p]); + // map of paths to the magic-starting subwalks they need to walk + // first item in patterns is the filter + for (let [t, pattern] of processingSet) { + this.hasWalkedCache.storeWalked(t, pattern); + const root = pattern.root(); + const absolute = pattern.isAbsolute() && this.opts.absolute !== false; + // start absolute patterns at root + if (root) { + t = t.resolve(root === '/' && this.opts.root !== undefined ? + this.opts.root + : root); + const rest = pattern.rest(); + if (!rest) { + this.matches.add(t, true, false); + continue; + } + else { + pattern = rest; + } + } + if (t.isENOENT()) + continue; + let p; + let rest; + let changed = false; + while (typeof (p = pattern.pattern()) === 'string' && + (rest = pattern.rest())) { + const c = t.resolve(p); + t = c; + pattern = rest; + changed = true; + } + p = pattern.pattern(); + rest = pattern.rest(); + if (changed) { + if (this.hasWalkedCache.hasWalked(t, pattern)) + continue; + this.hasWalkedCache.storeWalked(t, pattern); + } + // now we have either a final string for a known entry, + // more strings for an unknown entry, + // or a pattern starting with magic, mounted on t. + if (typeof p === 'string') { + // must not be final entry, otherwise we would have + // concatenated it earlier. + const ifDir = p === '..' || p === '' || p === '.'; + this.matches.add(t.resolve(p), absolute, ifDir); + continue; + } + else if (p === minimatch_1.GLOBSTAR) { + // if no rest, match and subwalk pattern + // if rest, process rest and subwalk pattern + // if it's a symlink, but we didn't get here by way of a + // globstar match (meaning it's the first time THIS globstar + // has traversed a symlink), then we follow it. Otherwise, stop. + if (!t.isSymbolicLink() || + this.follow || + pattern.checkFollowGlobstar()) { + this.subwalks.add(t, pattern); + } + const rp = rest?.pattern(); + const rrest = rest?.rest(); + if (!rest || ((rp === '' || rp === '.') && !rrest)) { + // only HAS to be a dir if it ends in **/ or **/. + // but ending in ** will match files as well. + this.matches.add(t, absolute, rp === '' || rp === '.'); + } + else { + if (rp === '..') { + // this would mean you're matching **/.. at the fs root, + // and no thanks, I'm not gonna test that specific case. + /* c8 ignore start */ + const tp = t.parent || t; + /* c8 ignore stop */ + if (!rrest) + this.matches.add(tp, absolute, true); + else if (!this.hasWalkedCache.hasWalked(tp, rrest)) { + this.subwalks.add(tp, rrest); + } + } + } + } + else if (p instanceof RegExp) { + this.subwalks.add(t, pattern); + } + } + return this; + } + subwalkTargets() { + return this.subwalks.keys(); + } + child() { + return new Processor(this.opts, this.hasWalkedCache); + } + // return a new Processor containing the subwalks for each + // child entry, and a set of matches, and + // a hasWalkedCache that's a copy of this one + // then we're going to call + filterEntries(parent, entries) { + const patterns = this.subwalks.get(parent); + // put matches and entry walks into the results processor + const results = this.child(); + for (const e of entries) { + for (const pattern of patterns) { + const absolute = pattern.isAbsolute(); + const p = pattern.pattern(); + const rest = pattern.rest(); + if (p === minimatch_1.GLOBSTAR) { + results.testGlobstar(e, pattern, rest, absolute); + } + else if (p instanceof RegExp) { + results.testRegExp(e, p, rest, absolute); + } + else { + results.testString(e, p, rest, absolute); + } + } + } + return results; + } + testGlobstar(e, pattern, rest, absolute) { + if (this.dot || !e.name.startsWith('.')) { + if (!pattern.hasMore()) { + this.matches.add(e, absolute, false); + } + if (e.canReaddir()) { + // if we're in follow mode or it's not a symlink, just keep + // testing the same pattern. If there's more after the globstar, + // then this symlink consumes the globstar. If not, then we can + // follow at most ONE symlink along the way, so we mark it, which + // also checks to ensure that it wasn't already marked. + if (this.follow || !e.isSymbolicLink()) { + this.subwalks.add(e, pattern); + } + else if (e.isSymbolicLink()) { + if (rest && pattern.checkFollowGlobstar()) { + this.subwalks.add(e, rest); + } + else if (pattern.markFollowGlobstar()) { + this.subwalks.add(e, pattern); + } + } + } + } + // if the NEXT thing matches this entry, then also add + // the rest. + if (rest) { + const rp = rest.pattern(); + if (typeof rp === 'string' && + // dots and empty were handled already + rp !== '..' && + rp !== '' && + rp !== '.') { + this.testString(e, rp, rest.rest(), absolute); + } + else if (rp === '..') { + /* c8 ignore start */ + const ep = e.parent || e; + /* c8 ignore stop */ + this.subwalks.add(ep, rest); + } + else if (rp instanceof RegExp) { + this.testRegExp(e, rp, rest.rest(), absolute); + } + } + } + testRegExp(e, p, rest, absolute) { + if (!p.test(e.name)) + return; + if (!rest) { + this.matches.add(e, absolute, false); + } + else { + this.subwalks.add(e, rest); + } + } + testString(e, p, rest, absolute) { + // should never happen? + if (!e.isNamed(p)) + return; + if (!rest) { + this.matches.add(e, absolute, false); + } + else { + this.subwalks.add(e, rest); + } + } +} +exports.Processor = Processor; +//# sourceMappingURL=processor.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/processor.js.map b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/processor.js.map new file mode 100644 index 0000000000000000000000000000000000000000..58a70882e9462f7c0746a03edf0fe37e625230ce --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/processor.js.map @@ -0,0 +1 @@ +{"version":3,"file":"processor.js","sourceRoot":"","sources":["../../src/processor.ts"],"names":[],"mappings":";AAAA,qEAAqE;;;AAErE,yCAA8C;AAK9C;;GAEG;AACH,MAAa,cAAc;IACzB,KAAK,CAA0B;IAC/B,YAAY,QAAkC,IAAI,GAAG,EAAE;QACrD,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;IACpB,CAAC;IACD,IAAI;QACF,OAAO,IAAI,cAAc,CAAC,IAAI,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAA;IAChD,CAAC;IACD,SAAS,CAAC,MAAY,EAAE,OAAgB;QACtC,OAAO,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC,EAAE,GAAG,CAAC,OAAO,CAAC,UAAU,EAAE,CAAC,CAAA;IACrE,CAAC;IACD,WAAW,CAAC,MAAY,EAAE,OAAgB;QACxC,MAAM,QAAQ,GAAG,MAAM,CAAC,QAAQ,EAAE,CAAA;QAClC,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAA;QACvC,IAAI,MAAM;YAAE,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,UAAU,EAAE,CAAC,CAAA;;YACvC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,QAAQ,EAAE,IAAI,GAAG,CAAC,CAAC,OAAO,CAAC,UAAU,EAAE,CAAC,CAAC,CAAC,CAAA;IAChE,CAAC;CACF;AAjBD,wCAiBC;AAED;;;;GAIG;AACH,MAAa,WAAW;IACtB,KAAK,GAAsB,IAAI,GAAG,EAAE,CAAA;IACpC,GAAG,CAAC,MAAY,EAAE,QAAiB,EAAE,KAAc;QACjD,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;QAC9C,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,CAAA;QACtC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,EAAE,OAAO,KAAK,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,CAAA;IACjE,CAAC;IACD,yBAAyB;IACzB,OAAO;QACL,OAAO,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC;YAClD,IAAI;YACJ,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;YACT,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;SACV,CAAC,CAAA;IACJ,CAAC;CACF;AAfD,kCAeC;AAED;;;GAGG;AACH,MAAa,QAAQ;IACnB,KAAK,GAAyB,IAAI,GAAG,EAAE,CAAA;IACvC,GAAG,CAAC,MAAY,EAAE,OAAgB;QAChC,IAAI,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,CAAC;YACzB,OAAM;QACR,CAAC;QACD,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,CAAA;QACnC,IAAI,IAAI,EAAE,CAAC;YACT,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,UAAU,EAAE,KAAK,OAAO,CAAC,UAAU,EAAE,CAAC,EAAE,CAAC;gBAC7D,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;YACpB,CAAC;QACH,CAAC;;YAAM,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,EAAE,CAAC,OAAO,CAAC,CAAC,CAAA;IAC1C,CAAC;IACD,GAAG,CAAC,MAAY;QACd,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,CAAA;QACnC,qBAAqB;QACrB,IAAI,CAAC,IAAI,EAAE,CAAC;YACV,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAA;QACpD,CAAC;QACD,oBAAoB;QACpB,OAAO,IAAI,CAAA;IACb,CAAC;IACD,OAAO;QACL,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAc,CAAC,CAAC,CAAA;IAClE,CAAC;IACD,IAAI;QACF,OAAO,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,UAAU,EAAE,CAAC,CAAA;IAC3D,CAAC;CACF;AA5BD,4BA4BC;AAED;;;;;GAKG;AACH,MAAa,SAAS;IACpB,cAAc,CAAgB;IAC9B,OAAO,GAAG,IAAI,WAAW,EAAE,CAAA;IAC3B,QAAQ,GAAG,IAAI,QAAQ,EAAE,CAAA;IACzB,QAAQ,CAAY;IACpB,MAAM,CAAS;IACf,GAAG,CAAS;IACZ,IAAI,CAAgB;IAEpB,YAAY,IAAoB,EAAE,cAA+B;QAC/D,IAAI,CAAC,IAAI,GAAG,IAAI,CAAA;QAChB,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,IAAI,CAAC,MAAM,CAAA;QAC3B,IAAI,CAAC,GAAG,GAAG,CAAC,CAAC,IAAI,CAAC,GAAG,CAAA;QACrB,IAAI,CAAC,cAAc;YACjB,cAAc,CAAC,CAAC,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,IAAI,cAAc,EAAE,CAAA;IACjE,CAAC;IAED,eAAe,CAAC,MAAY,EAAE,QAAmB;QAC/C,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;QACxB,MAAM,aAAa,GAAsB,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,CAAA;QAEvE,gEAAgE;QAChE,uCAAuC;QAEvC,KAAK,IAAI,CAAC,CAAC,EAAE,OAAO,CAAC,IAAI,aAAa,EAAE,CAAC;YACvC,IAAI,CAAC,cAAc,CAAC,WAAW,CAAC,CAAC,EAAE,OAAO,CAAC,CAAA;YAE3C,MAAM,IAAI,GAAG,OAAO,CAAC,IAAI,EAAE,CAAA;YAC3B,MAAM,QAAQ,GAAG,OAAO,CAAC,UAAU,EAAE,IAAI,IAAI,CAAC,IAAI,CAAC,QAAQ,KAAK,KAAK,CAAA;YAErE,kCAAkC;YAClC,IAAI,IAAI,EAAE,CAAC;gBACT,CAAC,GAAG,CAAC,CAAC,OAAO,CACX,IAAI,KAAK,GAAG,IAAI,IAAI,CAAC,IAAI,CAAC,IAAI,KAAK,SAAS,CAAC,CAAC;oBAC5C,IAAI,CAAC,IAAI,CAAC,IAAI;oBAChB,CAAC,CAAC,IAAI,CACP,CAAA;gBACD,MAAM,IAAI,GAAG,OAAO,CAAC,IAAI,EAAE,CAAA;gBAC3B,IAAI,CAAC,IAAI,EAAE,CAAC;oBACV,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,EAAE,KAAK,CAAC,CAAA;oBAChC,SAAQ;gBACV,CAAC;qBAAM,CAAC;oBACN,OAAO,GAAG,IAAI,CAAA;gBAChB,CAAC;YACH,CAAC;YAED,IAAI,CAAC,CAAC,QAAQ,EAAE;gBAAE,SAAQ;YAE1B,IAAI,CAAY,CAAA;YAChB,IAAI,IAAoB,CAAA;YACxB,IAAI,OAAO,GAAG,KAAK,CAAA;YACnB,OACE,OAAO,CAAC,CAAC,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC,KAAK,QAAQ;gBAC3C,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,EAAE,CAAC,EACvB,CAAC;gBACD,MAAM,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAA;gBACtB,CAAC,GAAG,CAAC,CAAA;gBACL,OAAO,GAAG,IAAI,CAAA;gBACd,OAAO,GAAG,IAAI,CAAA;YAChB,CAAC;YACD,CAAC,GAAG,OAAO,CAAC,OAAO,EAAE,CAAA;YACrB,IAAI,GAAG,OAAO,CAAC,IAAI,EAAE,CAAA;YACrB,IAAI,OAAO,EAAE,CAAC;gBACZ,IAAI,IAAI,CAAC,cAAc,CAAC,SAAS,CAAC,CAAC,EAAE,OAAO,CAAC;oBAAE,SAAQ;gBACvD,IAAI,CAAC,cAAc,CAAC,WAAW,CAAC,CAAC,EAAE,OAAO,CAAC,CAAA;YAC7C,CAAC;YAED,uDAAuD;YACvD,qCAAqC;YACrC,kDAAkD;YAClD,IAAI,OAAO,CAAC,KAAK,QAAQ,EAAE,CAAC;gBAC1B,mDAAmD;gBACnD,2BAA2B;gBAC3B,MAAM,KAAK,GAAG,CAAC,KAAK,IAAI,IAAI,CAAC,KAAK,EAAE,IAAI,CAAC,KAAK,GAAG,CAAA;gBACjD,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAA;gBAC/C,SAAQ;YACV,CAAC;iBAAM,IAAI,CAAC,KAAK,oBAAQ,EAAE,CAAC;gBAC1B,wCAAwC;gBACxC,4CAA4C;gBAC5C,wDAAwD;gBACxD,4DAA4D;gBAC5D,gEAAgE;gBAChE,IACE,CAAC,CAAC,CAAC,cAAc,EAAE;oBACnB,IAAI,CAAC,MAAM;oBACX,OAAO,CAAC,mBAAmB,EAAE,EAC7B,CAAC;oBACD,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,CAAC,CAAA;gBAC/B,CAAC;gBACD,MAAM,EAAE,GAAG,IAAI,EAAE,OAAO,EAAE,CAAA;gBAC1B,MAAM,KAAK,GAAG,IAAI,EAAE,IAAI,EAAE,CAAA;gBAC1B,IAAI,CAAC,IAAI,IAAI,CAAC,CAAC,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC;oBACnD,iDAAiD;oBACjD,6CAA6C;oBAC7C,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,EAAE,QAAQ,EAAE,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,GAAG,CAAC,CAAA;gBACxD,CAAC;qBAAM,CAAC;oBACN,IAAI,EAAE,KAAK,IAAI,EAAE,CAAC;wBAChB,wDAAwD;wBACxD,wDAAwD;wBACxD,qBAAqB;wBACrB,MAAM,EAAE,GAAG,CAAC,CAAC,MAAM,IAAI,CAAC,CAAA;wBACxB,oBAAoB;wBACpB,IAAI,CAAC,KAAK;4BAAE,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE,EAAE,QAAQ,EAAE,IAAI,CAAC,CAAA;6BAC3C,IAAI,CAAC,IAAI,CAAC,cAAc,CAAC,SAAS,CAAC,EAAE,EAAE,KAAK,CAAC,EAAE,CAAC;4BACnD,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE,EAAE,KAAK,CAAC,CAAA;wBAC9B,CAAC;oBACH,CAAC;gBACH,CAAC;YACH,CAAC;iBAAM,IAAI,CAAC,YAAY,MAAM,EAAE,CAAC;gBAC/B,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,CAAC,CAAA;YAC/B,CAAC;QACH,CAAC;QAED,OAAO,IAAI,CAAA;IACb,CAAC;IAED,cAAc;QACZ,OAAO,IAAI,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAA;IAC7B,CAAC;IAED,KAAK;QACH,OAAO,IAAI,SAAS,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,cAAc,CAAC,CAAA;IACtD,CAAC;IAED,0DAA0D;IAC1D,yCAAyC;IACzC,6CAA6C;IAC7C,2BAA2B;IAC3B,aAAa,CAAC,MAAY,EAAE,OAAe;QACzC,MAAM,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,MAAM,CAAC,CAAA;QAC1C,yDAAyD;QACzD,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,EAAE,CAAA;QAC5B,KAAK,MAAM,CAAC,IAAI,OAAO,EAAE,CAAC;YACxB,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE,CAAC;gBAC/B,MAAM,QAAQ,GAAG,OAAO,CAAC,UAAU,EAAE,CAAA;gBACrC,MAAM,CAAC,GAAG,OAAO,CAAC,OAAO,EAAE,CAAA;gBAC3B,MAAM,IAAI,GAAG,OAAO,CAAC,IAAI,EAAE,CAAA;gBAC3B,IAAI,CAAC,KAAK,oBAAQ,EAAE,CAAC;oBACnB,OAAO,CAAC,YAAY,CAAC,CAAC,EAAE,OAAO,EAAE,IAAI,EAAE,QAAQ,CAAC,CAAA;gBAClD,CAAC;qBAAM,IAAI,CAAC,YAAY,MAAM,EAAE,CAAC;oBAC/B,OAAO,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,EAAE,QAAQ,CAAC,CAAA;gBAC1C,CAAC;qBAAM,CAAC;oBACN,OAAO,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,EAAE,QAAQ,CAAC,CAAA;gBAC1C,CAAC;YACH,CAAC;QACH,CAAC;QACD,OAAO,OAAO,CAAA;IAChB,CAAC;IAED,YAAY,CACV,CAAO,EACP,OAAgB,EAChB,IAAoB,EACpB,QAAiB;QAEjB,IAAI,IAAI,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE,CAAC;YACxC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,EAAE,CAAC;gBACvB,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAA;YACtC,CAAC;YACD,IAAI,CAAC,CAAC,UAAU,EAAE,EAAE,CAAC;gBACnB,2DAA2D;gBAC3D,gEAAgE;gBAChE,+DAA+D;gBAC/D,iEAAiE;gBACjE,uDAAuD;gBACvD,IAAI,IAAI,CAAC,MAAM,IAAI,CAAC,CAAC,CAAC,cAAc,EAAE,EAAE,CAAC;oBACvC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,CAAC,CAAA;gBAC/B,CAAC;qBAAM,IAAI,CAAC,CAAC,cAAc,EAAE,EAAE,CAAC;oBAC9B,IAAI,IAAI,IAAI,OAAO,CAAC,mBAAmB,EAAE,EAAE,CAAC;wBAC1C,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,CAAA;oBAC5B,CAAC;yBAAM,IAAI,OAAO,CAAC,kBAAkB,EAAE,EAAE,CAAC;wBACxC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,CAAC,CAAA;oBAC/B,CAAC;gBACH,CAAC;YACH,CAAC;QACH,CAAC;QACD,sDAAsD;QACtD,YAAY;QACZ,IAAI,IAAI,EAAE,CAAC;YACT,MAAM,EAAE,GAAG,IAAI,CAAC,OAAO,EAAE,CAAA;YACzB,IACE,OAAO,EAAE,KAAK,QAAQ;gBACtB,sCAAsC;gBACtC,EAAE,KAAK,IAAI;gBACX,EAAE,KAAK,EAAE;gBACT,EAAE,KAAK,GAAG,EACV,CAAC;gBACD,IAAI,CAAC,UAAU,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,IAAI,EAAE,EAAE,QAAQ,CAAC,CAAA;YAC/C,CAAC;iBAAM,IAAI,EAAE,KAAK,IAAI,EAAE,CAAC;gBACvB,qBAAqB;gBACrB,MAAM,EAAE,GAAG,CAAC,CAAC,MAAM,IAAI,CAAC,CAAA;gBACxB,oBAAoB;gBACpB,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE,EAAE,IAAI,CAAC,CAAA;YAC7B,CAAC;iBAAM,IAAI,EAAE,YAAY,MAAM,EAAE,CAAC;gBAChC,IAAI,CAAC,UAAU,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,IAAI,EAAE,EAAE,QAAQ,CAAC,CAAA;YAC/C,CAAC;QACH,CAAC;IACH,CAAC;IAED,UAAU,CACR,CAAO,EACP,CAAW,EACX,IAAoB,EACpB,QAAiB;QAEjB,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC;YAAE,OAAM;QAC3B,IAAI,CAAC,IAAI,EAAE,CAAC;YACV,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAA;QACtC,CAAC;aAAM,CAAC;YACN,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,CAAA;QAC5B,CAAC;IACH,CAAC;IAED,UAAU,CAAC,CAAO,EAAE,CAAS,EAAE,IAAoB,EAAE,QAAiB;QACpE,uBAAuB;QACvB,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC;YAAE,OAAM;QACzB,IAAI,CAAC,IAAI,EAAE,CAAC;YACV,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAA;QACtC,CAAC;aAAM,CAAC;YACN,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,CAAA;QAC5B,CAAC;IACH,CAAC;CACF;AA9ND,8BA8NC","sourcesContent":["// synchronous utility for filtering entries and calculating subwalks\n\nimport { GLOBSTAR, MMRegExp } from 'minimatch'\nimport { Path } from 'path-scurry'\nimport { MMPattern, Pattern } from './pattern.js'\nimport { GlobWalkerOpts } from './walker.js'\n\n/**\n * A cache of which patterns have been processed for a given Path\n */\nexport class HasWalkedCache {\n store: Map>\n constructor(store: Map> = new Map()) {\n this.store = store\n }\n copy() {\n return new HasWalkedCache(new Map(this.store))\n }\n hasWalked(target: Path, pattern: Pattern) {\n return this.store.get(target.fullpath())?.has(pattern.globString())\n }\n storeWalked(target: Path, pattern: Pattern) {\n const fullpath = target.fullpath()\n const cached = this.store.get(fullpath)\n if (cached) cached.add(pattern.globString())\n else this.store.set(fullpath, new Set([pattern.globString()]))\n }\n}\n\n/**\n * A record of which paths have been matched in a given walk step,\n * and whether they only are considered a match if they are a directory,\n * and whether their absolute or relative path should be returned.\n */\nexport class MatchRecord {\n store: Map = new Map()\n add(target: Path, absolute: boolean, ifDir: boolean) {\n const n = (absolute ? 2 : 0) | (ifDir ? 1 : 0)\n const current = this.store.get(target)\n this.store.set(target, current === undefined ? n : n & current)\n }\n // match, absolute, ifdir\n entries(): [Path, boolean, boolean][] {\n return [...this.store.entries()].map(([path, n]) => [\n path,\n !!(n & 2),\n !!(n & 1),\n ])\n }\n}\n\n/**\n * A collection of patterns that must be processed in a subsequent step\n * for a given path.\n */\nexport class SubWalks {\n store: Map = new Map()\n add(target: Path, pattern: Pattern) {\n if (!target.canReaddir()) {\n return\n }\n const subs = this.store.get(target)\n if (subs) {\n if (!subs.find(p => p.globString() === pattern.globString())) {\n subs.push(pattern)\n }\n } else this.store.set(target, [pattern])\n }\n get(target: Path): Pattern[] {\n const subs = this.store.get(target)\n /* c8 ignore start */\n if (!subs) {\n throw new Error('attempting to walk unknown path')\n }\n /* c8 ignore stop */\n return subs\n }\n entries(): [Path, Pattern[]][] {\n return this.keys().map(k => [k, this.store.get(k) as Pattern[]])\n }\n keys(): Path[] {\n return [...this.store.keys()].filter(t => t.canReaddir())\n }\n}\n\n/**\n * The class that processes patterns for a given path.\n *\n * Handles child entry filtering, and determining whether a path's\n * directory contents must be read.\n */\nexport class Processor {\n hasWalkedCache: HasWalkedCache\n matches = new MatchRecord()\n subwalks = new SubWalks()\n patterns?: Pattern[]\n follow: boolean\n dot: boolean\n opts: GlobWalkerOpts\n\n constructor(opts: GlobWalkerOpts, hasWalkedCache?: HasWalkedCache) {\n this.opts = opts\n this.follow = !!opts.follow\n this.dot = !!opts.dot\n this.hasWalkedCache =\n hasWalkedCache ? hasWalkedCache.copy() : new HasWalkedCache()\n }\n\n processPatterns(target: Path, patterns: Pattern[]) {\n this.patterns = patterns\n const processingSet: [Path, Pattern][] = patterns.map(p => [target, p])\n\n // map of paths to the magic-starting subwalks they need to walk\n // first item in patterns is the filter\n\n for (let [t, pattern] of processingSet) {\n this.hasWalkedCache.storeWalked(t, pattern)\n\n const root = pattern.root()\n const absolute = pattern.isAbsolute() && this.opts.absolute !== false\n\n // start absolute patterns at root\n if (root) {\n t = t.resolve(\n root === '/' && this.opts.root !== undefined ?\n this.opts.root\n : root,\n )\n const rest = pattern.rest()\n if (!rest) {\n this.matches.add(t, true, false)\n continue\n } else {\n pattern = rest\n }\n }\n\n if (t.isENOENT()) continue\n\n let p: MMPattern\n let rest: Pattern | null\n let changed = false\n while (\n typeof (p = pattern.pattern()) === 'string' &&\n (rest = pattern.rest())\n ) {\n const c = t.resolve(p)\n t = c\n pattern = rest\n changed = true\n }\n p = pattern.pattern()\n rest = pattern.rest()\n if (changed) {\n if (this.hasWalkedCache.hasWalked(t, pattern)) continue\n this.hasWalkedCache.storeWalked(t, pattern)\n }\n\n // now we have either a final string for a known entry,\n // more strings for an unknown entry,\n // or a pattern starting with magic, mounted on t.\n if (typeof p === 'string') {\n // must not be final entry, otherwise we would have\n // concatenated it earlier.\n const ifDir = p === '..' || p === '' || p === '.'\n this.matches.add(t.resolve(p), absolute, ifDir)\n continue\n } else if (p === GLOBSTAR) {\n // if no rest, match and subwalk pattern\n // if rest, process rest and subwalk pattern\n // if it's a symlink, but we didn't get here by way of a\n // globstar match (meaning it's the first time THIS globstar\n // has traversed a symlink), then we follow it. Otherwise, stop.\n if (\n !t.isSymbolicLink() ||\n this.follow ||\n pattern.checkFollowGlobstar()\n ) {\n this.subwalks.add(t, pattern)\n }\n const rp = rest?.pattern()\n const rrest = rest?.rest()\n if (!rest || ((rp === '' || rp === '.') && !rrest)) {\n // only HAS to be a dir if it ends in **/ or **/.\n // but ending in ** will match files as well.\n this.matches.add(t, absolute, rp === '' || rp === '.')\n } else {\n if (rp === '..') {\n // this would mean you're matching **/.. at the fs root,\n // and no thanks, I'm not gonna test that specific case.\n /* c8 ignore start */\n const tp = t.parent || t\n /* c8 ignore stop */\n if (!rrest) this.matches.add(tp, absolute, true)\n else if (!this.hasWalkedCache.hasWalked(tp, rrest)) {\n this.subwalks.add(tp, rrest)\n }\n }\n }\n } else if (p instanceof RegExp) {\n this.subwalks.add(t, pattern)\n }\n }\n\n return this\n }\n\n subwalkTargets(): Path[] {\n return this.subwalks.keys()\n }\n\n child() {\n return new Processor(this.opts, this.hasWalkedCache)\n }\n\n // return a new Processor containing the subwalks for each\n // child entry, and a set of matches, and\n // a hasWalkedCache that's a copy of this one\n // then we're going to call\n filterEntries(parent: Path, entries: Path[]): Processor {\n const patterns = this.subwalks.get(parent)\n // put matches and entry walks into the results processor\n const results = this.child()\n for (const e of entries) {\n for (const pattern of patterns) {\n const absolute = pattern.isAbsolute()\n const p = pattern.pattern()\n const rest = pattern.rest()\n if (p === GLOBSTAR) {\n results.testGlobstar(e, pattern, rest, absolute)\n } else if (p instanceof RegExp) {\n results.testRegExp(e, p, rest, absolute)\n } else {\n results.testString(e, p, rest, absolute)\n }\n }\n }\n return results\n }\n\n testGlobstar(\n e: Path,\n pattern: Pattern,\n rest: Pattern | null,\n absolute: boolean,\n ) {\n if (this.dot || !e.name.startsWith('.')) {\n if (!pattern.hasMore()) {\n this.matches.add(e, absolute, false)\n }\n if (e.canReaddir()) {\n // if we're in follow mode or it's not a symlink, just keep\n // testing the same pattern. If there's more after the globstar,\n // then this symlink consumes the globstar. If not, then we can\n // follow at most ONE symlink along the way, so we mark it, which\n // also checks to ensure that it wasn't already marked.\n if (this.follow || !e.isSymbolicLink()) {\n this.subwalks.add(e, pattern)\n } else if (e.isSymbolicLink()) {\n if (rest && pattern.checkFollowGlobstar()) {\n this.subwalks.add(e, rest)\n } else if (pattern.markFollowGlobstar()) {\n this.subwalks.add(e, pattern)\n }\n }\n }\n }\n // if the NEXT thing matches this entry, then also add\n // the rest.\n if (rest) {\n const rp = rest.pattern()\n if (\n typeof rp === 'string' &&\n // dots and empty were handled already\n rp !== '..' &&\n rp !== '' &&\n rp !== '.'\n ) {\n this.testString(e, rp, rest.rest(), absolute)\n } else if (rp === '..') {\n /* c8 ignore start */\n const ep = e.parent || e\n /* c8 ignore stop */\n this.subwalks.add(ep, rest)\n } else if (rp instanceof RegExp) {\n this.testRegExp(e, rp, rest.rest(), absolute)\n }\n }\n }\n\n testRegExp(\n e: Path,\n p: MMRegExp,\n rest: Pattern | null,\n absolute: boolean,\n ) {\n if (!p.test(e.name)) return\n if (!rest) {\n this.matches.add(e, absolute, false)\n } else {\n this.subwalks.add(e, rest)\n }\n }\n\n testString(e: Path, p: string, rest: Pattern | null, absolute: boolean) {\n // should never happen?\n if (!e.isNamed(p)) return\n if (!rest) {\n this.matches.add(e, absolute, false)\n } else {\n this.subwalks.add(e, rest)\n }\n }\n}\n"]} \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/walker.d.ts b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/walker.d.ts new file mode 100644 index 0000000000000000000000000000000000000000..499c8f4933857a8720e77b7b96d9842bbd3248ae --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/walker.d.ts @@ -0,0 +1,97 @@ +/** + * Single-use utility classes to provide functionality to the {@link Glob} + * methods. + * + * @module + */ +import { Minipass } from 'minipass'; +import { Path } from 'path-scurry'; +import { IgnoreLike } from './ignore.js'; +import { Pattern } from './pattern.js'; +import { Processor } from './processor.js'; +export interface GlobWalkerOpts { + absolute?: boolean; + allowWindowsEscape?: boolean; + cwd?: string | URL; + dot?: boolean; + dotRelative?: boolean; + follow?: boolean; + ignore?: string | string[] | IgnoreLike; + mark?: boolean; + matchBase?: boolean; + maxDepth?: number; + nobrace?: boolean; + nocase?: boolean; + nodir?: boolean; + noext?: boolean; + noglobstar?: boolean; + platform?: NodeJS.Platform; + posix?: boolean; + realpath?: boolean; + root?: string; + stat?: boolean; + signal?: AbortSignal; + windowsPathsNoEscape?: boolean; + withFileTypes?: boolean; + includeChildMatches?: boolean; +} +export type GWOFileTypesTrue = GlobWalkerOpts & { + withFileTypes: true; +}; +export type GWOFileTypesFalse = GlobWalkerOpts & { + withFileTypes: false; +}; +export type GWOFileTypesUnset = GlobWalkerOpts & { + withFileTypes?: undefined; +}; +export type Result = O extends GWOFileTypesTrue ? Path : O extends GWOFileTypesFalse ? string : O extends GWOFileTypesUnset ? string : Path | string; +export type Matches = O extends GWOFileTypesTrue ? Set : O extends GWOFileTypesFalse ? Set : O extends GWOFileTypesUnset ? Set : Set; +export type MatchStream = Minipass, Result>; +/** + * basic walking utilities that all the glob walker types use + */ +export declare abstract class GlobUtil { + #private; + path: Path; + patterns: Pattern[]; + opts: O; + seen: Set; + paused: boolean; + aborted: boolean; + signal?: AbortSignal; + maxDepth: number; + includeChildMatches: boolean; + constructor(patterns: Pattern[], path: Path, opts: O); + pause(): void; + resume(): void; + onResume(fn: () => any): void; + matchCheck(e: Path, ifDir: boolean): Promise; + matchCheckTest(e: Path | undefined, ifDir: boolean): Path | undefined; + matchCheckSync(e: Path, ifDir: boolean): Path | undefined; + abstract matchEmit(p: Result): void; + abstract matchEmit(p: string | Path): void; + matchFinish(e: Path, absolute: boolean): void; + match(e: Path, absolute: boolean, ifDir: boolean): Promise; + matchSync(e: Path, absolute: boolean, ifDir: boolean): void; + walkCB(target: Path, patterns: Pattern[], cb: () => any): void; + walkCB2(target: Path, patterns: Pattern[], processor: Processor, cb: () => any): any; + walkCB3(target: Path, entries: Path[], processor: Processor, cb: () => any): void; + walkCBSync(target: Path, patterns: Pattern[], cb: () => any): void; + walkCB2Sync(target: Path, patterns: Pattern[], processor: Processor, cb: () => any): any; + walkCB3Sync(target: Path, entries: Path[], processor: Processor, cb: () => any): void; +} +export declare class GlobWalker extends GlobUtil { + matches: Set>; + constructor(patterns: Pattern[], path: Path, opts: O); + matchEmit(e: Result): void; + walk(): Promise>>; + walkSync(): Set>; +} +export declare class GlobStream extends GlobUtil { + results: Minipass, Result>; + constructor(patterns: Pattern[], path: Path, opts: O); + matchEmit(e: Result): void; + stream(): MatchStream; + streamSync(): MatchStream; +} +//# sourceMappingURL=walker.d.ts.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/walker.d.ts.map b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/walker.d.ts.map new file mode 100644 index 0000000000000000000000000000000000000000..769957bd59bb1ce67cdf28134be59ca86946c405 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/walker.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"walker.d.ts","sourceRoot":"","sources":["../../src/walker.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AACH,OAAO,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAA;AACnC,OAAO,EAAE,IAAI,EAAE,MAAM,aAAa,CAAA;AAClC,OAAO,EAAU,UAAU,EAAE,MAAM,aAAa,CAAA;AAOhD,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAA;AACtC,OAAO,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAA;AAE1C,MAAM,WAAW,cAAc;IAC7B,QAAQ,CAAC,EAAE,OAAO,CAAA;IAClB,kBAAkB,CAAC,EAAE,OAAO,CAAA;IAC5B,GAAG,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;IAClB,GAAG,CAAC,EAAE,OAAO,CAAA;IACb,WAAW,CAAC,EAAE,OAAO,CAAA;IACrB,MAAM,CAAC,EAAE,OAAO,CAAA;IAChB,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,UAAU,CAAA;IACvC,IAAI,CAAC,EAAE,OAAO,CAAA;IACd,SAAS,CAAC,EAAE,OAAO,CAAA;IAGnB,QAAQ,CAAC,EAAE,MAAM,CAAA;IACjB,OAAO,CAAC,EAAE,OAAO,CAAA;IACjB,MAAM,CAAC,EAAE,OAAO,CAAA;IAChB,KAAK,CAAC,EAAE,OAAO,CAAA;IACf,KAAK,CAAC,EAAE,OAAO,CAAA;IACf,UAAU,CAAC,EAAE,OAAO,CAAA;IACpB,QAAQ,CAAC,EAAE,MAAM,CAAC,QAAQ,CAAA;IAC1B,KAAK,CAAC,EAAE,OAAO,CAAA;IACf,QAAQ,CAAC,EAAE,OAAO,CAAA;IAClB,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,IAAI,CAAC,EAAE,OAAO,CAAA;IACd,MAAM,CAAC,EAAE,WAAW,CAAA;IACpB,oBAAoB,CAAC,EAAE,OAAO,CAAA;IAC9B,aAAa,CAAC,EAAE,OAAO,CAAA;IACvB,mBAAmB,CAAC,EAAE,OAAO,CAAA;CAC9B;AAED,MAAM,MAAM,gBAAgB,GAAG,cAAc,GAAG;IAC9C,aAAa,EAAE,IAAI,CAAA;CACpB,CAAA;AACD,MAAM,MAAM,iBAAiB,GAAG,cAAc,GAAG;IAC/C,aAAa,EAAE,KAAK,CAAA;CACrB,CAAA;AACD,MAAM,MAAM,iBAAiB,GAAG,cAAc,GAAG;IAC/C,aAAa,CAAC,EAAE,SAAS,CAAA;CAC1B,CAAA;AAED,MAAM,MAAM,MAAM,CAAC,CAAC,SAAS,cAAc,IACzC,CAAC,SAAS,gBAAgB,GAAG,IAAI,GAC/B,CAAC,SAAS,iBAAiB,GAAG,MAAM,GACpC,CAAC,SAAS,iBAAiB,GAAG,MAAM,GACpC,IAAI,GAAG,MAAM,CAAA;AAEjB,MAAM,MAAM,OAAO,CAAC,CAAC,SAAS,cAAc,IAC1C,CAAC,SAAS,gBAAgB,GAAG,GAAG,CAAC,IAAI,CAAC,GACpC,CAAC,SAAS,iBAAiB,GAAG,GAAG,CAAC,MAAM,CAAC,GACzC,CAAC,SAAS,iBAAiB,GAAG,GAAG,CAAC,MAAM,CAAC,GACzC,GAAG,CAAC,IAAI,GAAG,MAAM,CAAC,CAAA;AAEtB,MAAM,MAAM,WAAW,CAAC,CAAC,SAAS,cAAc,IAAI,QAAQ,CAC1D,MAAM,CAAC,CAAC,CAAC,EACT,MAAM,CAAC,CAAC,CAAC,CACV,CAAA;AAUD;;GAEG;AACH,8BAAsB,QAAQ,CAAC,CAAC,SAAS,cAAc,GAAG,cAAc;;IACtE,IAAI,EAAE,IAAI,CAAA;IACV,QAAQ,EAAE,OAAO,EAAE,CAAA;IACnB,IAAI,EAAE,CAAC,CAAA;IACP,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,CAAkB;IACjC,MAAM,EAAE,OAAO,CAAQ;IACvB,OAAO,EAAE,OAAO,CAAQ;IAIxB,MAAM,CAAC,EAAE,WAAW,CAAA;IACpB,QAAQ,EAAE,MAAM,CAAA;IAChB,mBAAmB,EAAE,OAAO,CAAA;gBAEhB,QAAQ,EAAE,OAAO,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC;IAsCpD,KAAK;IAGL,MAAM;IAUN,QAAQ,CAAC,EAAE,EAAE,MAAM,GAAG;IAahB,UAAU,CAAC,CAAC,EAAE,IAAI,EAAE,KAAK,EAAE,OAAO,GAAG,OAAO,CAAC,IAAI,GAAG,SAAS,CAAC;IAqBpE,cAAc,CAAC,CAAC,EAAE,IAAI,GAAG,SAAS,EAAE,KAAK,EAAE,OAAO,GAAG,IAAI,GAAG,SAAS;IAgBrE,cAAc,CAAC,CAAC,EAAE,IAAI,EAAE,KAAK,EAAE,OAAO,GAAG,IAAI,GAAG,SAAS;IAmBzD,QAAQ,CAAC,SAAS,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,GAAG,IAAI;IACtC,QAAQ,CAAC,SAAS,CAAC,CAAC,EAAE,MAAM,GAAG,IAAI,GAAG,IAAI;IAE1C,WAAW,CAAC,CAAC,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO;IA2BhC,KAAK,CAAC,CAAC,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,KAAK,EAAE,OAAO,GAAG,OAAO,CAAC,IAAI,CAAC;IAKtE,SAAS,CAAC,CAAC,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,KAAK,EAAE,OAAO,GAAG,IAAI;IAK3D,MAAM,CAAC,MAAM,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,EAAE,EAAE,EAAE,MAAM,GAAG;IAOvD,OAAO,CACL,MAAM,EAAE,IAAI,EACZ,QAAQ,EAAE,OAAO,EAAE,EACnB,SAAS,EAAE,SAAS,EACpB,EAAE,EAAE,MAAM,GAAG;IA2Cf,OAAO,CACL,MAAM,EAAE,IAAI,EACZ,OAAO,EAAE,IAAI,EAAE,EACf,SAAS,EAAE,SAAS,EACpB,EAAE,EAAE,MAAM,GAAG;IAsBf,UAAU,CAAC,MAAM,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,EAAE,EAAE,EAAE,MAAM,GAAG;IAO3D,WAAW,CACT,MAAM,EAAE,IAAI,EACZ,QAAQ,EAAE,OAAO,EAAE,EACnB,SAAS,EAAE,SAAS,EACpB,EAAE,EAAE,MAAM,GAAG;IAqCf,WAAW,CACT,MAAM,EAAE,IAAI,EACZ,OAAO,EAAE,IAAI,EAAE,EACf,SAAS,EAAE,SAAS,EACpB,EAAE,EAAE,MAAM,GAAG;CAoBhB;AAED,qBAAa,UAAU,CACrB,CAAC,SAAS,cAAc,GAAG,cAAc,CACzC,SAAQ,QAAQ,CAAC,CAAC,CAAC;IACnB,OAAO,iBAAuB;gBAElB,QAAQ,EAAE,OAAO,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC;IAIpD,SAAS,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,GAAG,IAAI;IAIvB,IAAI,IAAI,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;IAiBrC,QAAQ,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;CAW3B;AAED,qBAAa,UAAU,CACrB,CAAC,SAAS,cAAc,GAAG,cAAc,CACzC,SAAQ,QAAQ,CAAC,CAAC,CAAC;IACnB,OAAO,EAAE,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,CAAA;gBAE3B,QAAQ,EAAE,OAAO,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC;IAUpD,SAAS,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,GAAG,IAAI;IAK7B,MAAM,IAAI,WAAW,CAAC,CAAC,CAAC;IAYxB,UAAU,IAAI,WAAW,CAAC,CAAC,CAAC;CAO7B"} \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/walker.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/walker.js new file mode 100644 index 0000000000000000000000000000000000000000..cb15946d9a852cf8147b2c53f4021c796df36ef9 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/walker.js @@ -0,0 +1,387 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.GlobStream = exports.GlobWalker = exports.GlobUtil = void 0; +/** + * Single-use utility classes to provide functionality to the {@link Glob} + * methods. + * + * @module + */ +const minipass_1 = require("minipass"); +const ignore_js_1 = require("./ignore.js"); +const processor_js_1 = require("./processor.js"); +const makeIgnore = (ignore, opts) => typeof ignore === 'string' ? new ignore_js_1.Ignore([ignore], opts) + : Array.isArray(ignore) ? new ignore_js_1.Ignore(ignore, opts) + : ignore; +/** + * basic walking utilities that all the glob walker types use + */ +class GlobUtil { + path; + patterns; + opts; + seen = new Set(); + paused = false; + aborted = false; + #onResume = []; + #ignore; + #sep; + signal; + maxDepth; + includeChildMatches; + constructor(patterns, path, opts) { + this.patterns = patterns; + this.path = path; + this.opts = opts; + this.#sep = !opts.posix && opts.platform === 'win32' ? '\\' : '/'; + this.includeChildMatches = opts.includeChildMatches !== false; + if (opts.ignore || !this.includeChildMatches) { + this.#ignore = makeIgnore(opts.ignore ?? [], opts); + if (!this.includeChildMatches && + typeof this.#ignore.add !== 'function') { + const m = 'cannot ignore child matches, ignore lacks add() method.'; + throw new Error(m); + } + } + // ignore, always set with maxDepth, but it's optional on the + // GlobOptions type + /* c8 ignore start */ + this.maxDepth = opts.maxDepth || Infinity; + /* c8 ignore stop */ + if (opts.signal) { + this.signal = opts.signal; + this.signal.addEventListener('abort', () => { + this.#onResume.length = 0; + }); + } + } + #ignored(path) { + return this.seen.has(path) || !!this.#ignore?.ignored?.(path); + } + #childrenIgnored(path) { + return !!this.#ignore?.childrenIgnored?.(path); + } + // backpressure mechanism + pause() { + this.paused = true; + } + resume() { + /* c8 ignore start */ + if (this.signal?.aborted) + return; + /* c8 ignore stop */ + this.paused = false; + let fn = undefined; + while (!this.paused && (fn = this.#onResume.shift())) { + fn(); + } + } + onResume(fn) { + if (this.signal?.aborted) + return; + /* c8 ignore start */ + if (!this.paused) { + fn(); + } + else { + /* c8 ignore stop */ + this.#onResume.push(fn); + } + } + // do the requisite realpath/stat checking, and return the path + // to add or undefined to filter it out. + async matchCheck(e, ifDir) { + if (ifDir && this.opts.nodir) + return undefined; + let rpc; + if (this.opts.realpath) { + rpc = e.realpathCached() || (await e.realpath()); + if (!rpc) + return undefined; + e = rpc; + } + const needStat = e.isUnknown() || this.opts.stat; + const s = needStat ? await e.lstat() : e; + if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) { + const target = await s.realpath(); + /* c8 ignore start */ + if (target && (target.isUnknown() || this.opts.stat)) { + await target.lstat(); + } + /* c8 ignore stop */ + } + return this.matchCheckTest(s, ifDir); + } + matchCheckTest(e, ifDir) { + return (e && + (this.maxDepth === Infinity || e.depth() <= this.maxDepth) && + (!ifDir || e.canReaddir()) && + (!this.opts.nodir || !e.isDirectory()) && + (!this.opts.nodir || + !this.opts.follow || + !e.isSymbolicLink() || + !e.realpathCached()?.isDirectory()) && + !this.#ignored(e)) ? + e + : undefined; + } + matchCheckSync(e, ifDir) { + if (ifDir && this.opts.nodir) + return undefined; + let rpc; + if (this.opts.realpath) { + rpc = e.realpathCached() || e.realpathSync(); + if (!rpc) + return undefined; + e = rpc; + } + const needStat = e.isUnknown() || this.opts.stat; + const s = needStat ? e.lstatSync() : e; + if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) { + const target = s.realpathSync(); + if (target && (target?.isUnknown() || this.opts.stat)) { + target.lstatSync(); + } + } + return this.matchCheckTest(s, ifDir); + } + matchFinish(e, absolute) { + if (this.#ignored(e)) + return; + // we know we have an ignore if this is false, but TS doesn't + if (!this.includeChildMatches && this.#ignore?.add) { + const ign = `${e.relativePosix()}/**`; + this.#ignore.add(ign); + } + const abs = this.opts.absolute === undefined ? absolute : this.opts.absolute; + this.seen.add(e); + const mark = this.opts.mark && e.isDirectory() ? this.#sep : ''; + // ok, we have what we need! + if (this.opts.withFileTypes) { + this.matchEmit(e); + } + else if (abs) { + const abs = this.opts.posix ? e.fullpathPosix() : e.fullpath(); + this.matchEmit(abs + mark); + } + else { + const rel = this.opts.posix ? e.relativePosix() : e.relative(); + const pre = this.opts.dotRelative && !rel.startsWith('..' + this.#sep) ? + '.' + this.#sep + : ''; + this.matchEmit(!rel ? '.' + mark : pre + rel + mark); + } + } + async match(e, absolute, ifDir) { + const p = await this.matchCheck(e, ifDir); + if (p) + this.matchFinish(p, absolute); + } + matchSync(e, absolute, ifDir) { + const p = this.matchCheckSync(e, ifDir); + if (p) + this.matchFinish(p, absolute); + } + walkCB(target, patterns, cb) { + /* c8 ignore start */ + if (this.signal?.aborted) + cb(); + /* c8 ignore stop */ + this.walkCB2(target, patterns, new processor_js_1.Processor(this.opts), cb); + } + walkCB2(target, patterns, processor, cb) { + if (this.#childrenIgnored(target)) + return cb(); + if (this.signal?.aborted) + cb(); + if (this.paused) { + this.onResume(() => this.walkCB2(target, patterns, processor, cb)); + return; + } + processor.processPatterns(target, patterns); + // done processing. all of the above is sync, can be abstracted out. + // subwalks is a map of paths to the entry filters they need + // matches is a map of paths to [absolute, ifDir] tuples. + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + tasks++; + this.match(m, absolute, ifDir).then(() => next()); + } + for (const t of processor.subwalkTargets()) { + if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) { + continue; + } + tasks++; + const childrenCached = t.readdirCached(); + if (t.calledReaddir()) + this.walkCB3(t, childrenCached, processor, next); + else { + t.readdirCB((_, entries) => this.walkCB3(t, entries, processor, next), true); + } + } + next(); + } + walkCB3(target, entries, processor, cb) { + processor = processor.filterEntries(target, entries); + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + tasks++; + this.match(m, absolute, ifDir).then(() => next()); + } + for (const [target, patterns] of processor.subwalks.entries()) { + tasks++; + this.walkCB2(target, patterns, processor.child(), next); + } + next(); + } + walkCBSync(target, patterns, cb) { + /* c8 ignore start */ + if (this.signal?.aborted) + cb(); + /* c8 ignore stop */ + this.walkCB2Sync(target, patterns, new processor_js_1.Processor(this.opts), cb); + } + walkCB2Sync(target, patterns, processor, cb) { + if (this.#childrenIgnored(target)) + return cb(); + if (this.signal?.aborted) + cb(); + if (this.paused) { + this.onResume(() => this.walkCB2Sync(target, patterns, processor, cb)); + return; + } + processor.processPatterns(target, patterns); + // done processing. all of the above is sync, can be abstracted out. + // subwalks is a map of paths to the entry filters they need + // matches is a map of paths to [absolute, ifDir] tuples. + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + this.matchSync(m, absolute, ifDir); + } + for (const t of processor.subwalkTargets()) { + if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) { + continue; + } + tasks++; + const children = t.readdirSync(); + this.walkCB3Sync(t, children, processor, next); + } + next(); + } + walkCB3Sync(target, entries, processor, cb) { + processor = processor.filterEntries(target, entries); + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + this.matchSync(m, absolute, ifDir); + } + for (const [target, patterns] of processor.subwalks.entries()) { + tasks++; + this.walkCB2Sync(target, patterns, processor.child(), next); + } + next(); + } +} +exports.GlobUtil = GlobUtil; +class GlobWalker extends GlobUtil { + matches = new Set(); + constructor(patterns, path, opts) { + super(patterns, path, opts); + } + matchEmit(e) { + this.matches.add(e); + } + async walk() { + if (this.signal?.aborted) + throw this.signal.reason; + if (this.path.isUnknown()) { + await this.path.lstat(); + } + await new Promise((res, rej) => { + this.walkCB(this.path, this.patterns, () => { + if (this.signal?.aborted) { + rej(this.signal.reason); + } + else { + res(this.matches); + } + }); + }); + return this.matches; + } + walkSync() { + if (this.signal?.aborted) + throw this.signal.reason; + if (this.path.isUnknown()) { + this.path.lstatSync(); + } + // nothing for the callback to do, because this never pauses + this.walkCBSync(this.path, this.patterns, () => { + if (this.signal?.aborted) + throw this.signal.reason; + }); + return this.matches; + } +} +exports.GlobWalker = GlobWalker; +class GlobStream extends GlobUtil { + results; + constructor(patterns, path, opts) { + super(patterns, path, opts); + this.results = new minipass_1.Minipass({ + signal: this.signal, + objectMode: true, + }); + this.results.on('drain', () => this.resume()); + this.results.on('resume', () => this.resume()); + } + matchEmit(e) { + this.results.write(e); + if (!this.results.flowing) + this.pause(); + } + stream() { + const target = this.path; + if (target.isUnknown()) { + target.lstat().then(() => { + this.walkCB(target, this.patterns, () => this.results.end()); + }); + } + else { + this.walkCB(target, this.patterns, () => this.results.end()); + } + return this.results; + } + streamSync() { + if (this.path.isUnknown()) { + this.path.lstatSync(); + } + this.walkCBSync(this.path, this.patterns, () => this.results.end()); + return this.results; + } +} +exports.GlobStream = GlobStream; +//# sourceMappingURL=walker.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/walker.js.map b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/walker.js.map new file mode 100644 index 0000000000000000000000000000000000000000..49b013864d534bfba5b0357f69223f32f4ffd37b --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/commonjs/walker.js.map @@ -0,0 +1 @@ +{"version":3,"file":"walker.js","sourceRoot":"","sources":["../../src/walker.ts"],"names":[],"mappings":";;;AAAA;;;;;GAKG;AACH,uCAAmC;AAEnC,2CAAgD;AAQhD,iDAA0C;AA0D1C,MAAM,UAAU,GAAG,CACjB,MAAsC,EACtC,IAAoB,EACR,EAAE,CACd,OAAO,MAAM,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,kBAAM,CAAC,CAAC,MAAM,CAAC,EAAE,IAAI,CAAC;IACvD,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,IAAI,kBAAM,CAAC,MAAM,EAAE,IAAI,CAAC;QAClD,CAAC,CAAC,MAAM,CAAA;AAEV;;GAEG;AACH,MAAsB,QAAQ;IAC5B,IAAI,CAAM;IACV,QAAQ,CAAW;IACnB,IAAI,CAAG;IACP,IAAI,GAAc,IAAI,GAAG,EAAQ,CAAA;IACjC,MAAM,GAAY,KAAK,CAAA;IACvB,OAAO,GAAY,KAAK,CAAA;IACxB,SAAS,GAAkB,EAAE,CAAA;IAC7B,OAAO,CAAa;IACpB,IAAI,CAAY;IAChB,MAAM,CAAc;IACpB,QAAQ,CAAQ;IAChB,mBAAmB,CAAS;IAG5B,YAAY,QAAmB,EAAE,IAAU,EAAE,IAAO;QAClD,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;QACxB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAA;QAChB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAA;QAChB,IAAI,CAAC,IAAI,GAAG,CAAC,IAAI,CAAC,KAAK,IAAI,IAAI,CAAC,QAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAA;QACjE,IAAI,CAAC,mBAAmB,GAAG,IAAI,CAAC,mBAAmB,KAAK,KAAK,CAAA;QAC7D,IAAI,IAAI,CAAC,MAAM,IAAI,CAAC,IAAI,CAAC,mBAAmB,EAAE,CAAC;YAC7C,IAAI,CAAC,OAAO,GAAG,UAAU,CAAC,IAAI,CAAC,MAAM,IAAI,EAAE,EAAE,IAAI,CAAC,CAAA;YAClD,IACE,CAAC,IAAI,CAAC,mBAAmB;gBACzB,OAAO,IAAI,CAAC,OAAO,CAAC,GAAG,KAAK,UAAU,EACtC,CAAC;gBACD,MAAM,CAAC,GAAG,yDAAyD,CAAA;gBACnE,MAAM,IAAI,KAAK,CAAC,CAAC,CAAC,CAAA;YACpB,CAAC;QACH,CAAC;QACD,6DAA6D;QAC7D,mBAAmB;QACnB,qBAAqB;QACrB,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,IAAI,QAAQ,CAAA;QACzC,oBAAoB;QACpB,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;YAChB,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;YACzB,IAAI,CAAC,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE;gBACzC,IAAI,CAAC,SAAS,CAAC,MAAM,GAAG,CAAC,CAAA;YAC3B,CAAC,CAAC,CAAA;QACJ,CAAC;IACH,CAAC;IAED,QAAQ,CAAC,IAAU;QACjB,OAAO,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,EAAE,CAAC,IAAI,CAAC,CAAA;IAC/D,CAAC;IACD,gBAAgB,CAAC,IAAU;QACzB,OAAO,CAAC,CAAC,IAAI,CAAC,OAAO,EAAE,eAAe,EAAE,CAAC,IAAI,CAAC,CAAA;IAChD,CAAC;IAED,yBAAyB;IACzB,KAAK;QACH,IAAI,CAAC,MAAM,GAAG,IAAI,CAAA;IACpB,CAAC;IACD,MAAM;QACJ,qBAAqB;QACrB,IAAI,IAAI,CAAC,MAAM,EAAE,OAAO;YAAE,OAAM;QAChC,oBAAoB;QACpB,IAAI,CAAC,MAAM,GAAG,KAAK,CAAA;QACnB,IAAI,EAAE,GAA4B,SAAS,CAAA;QAC3C,OAAO,CAAC,IAAI,CAAC,MAAM,IAAI,CAAC,EAAE,GAAG,IAAI,CAAC,SAAS,CAAC,KAAK,EAAE,CAAC,EAAE,CAAC;YACrD,EAAE,EAAE,CAAA;QACN,CAAC;IACH,CAAC;IACD,QAAQ,CAAC,EAAa;QACpB,IAAI,IAAI,CAAC,MAAM,EAAE,OAAO;YAAE,OAAM;QAChC,qBAAqB;QACrB,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC;YACjB,EAAE,EAAE,CAAA;QACN,CAAC;aAAM,CAAC;YACN,oBAAoB;YACpB,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE,CAAC,CAAA;QACzB,CAAC;IACH,CAAC;IAED,+DAA+D;IAC/D,wCAAwC;IACxC,KAAK,CAAC,UAAU,CAAC,CAAO,EAAE,KAAc;QACtC,IAAI,KAAK,IAAI,IAAI,CAAC,IAAI,CAAC,KAAK;YAAE,OAAO,SAAS,CAAA;QAC9C,IAAI,GAAqB,CAAA;QACzB,IAAI,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC;YACvB,GAAG,GAAG,CAAC,CAAC,cAAc,EAAE,IAAI,CAAC,MAAM,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAA;YAChD,IAAI,CAAC,GAAG;gBAAE,OAAO,SAAS,CAAA;YAC1B,CAAC,GAAG,GAAG,CAAA;QACT,CAAC;QACD,MAAM,QAAQ,GAAG,CAAC,CAAC,SAAS,EAAE,IAAI,IAAI,CAAC,IAAI,CAAC,IAAI,CAAA;QAChD,MAAM,CAAC,GAAG,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,CAAA;QACxC,IAAI,IAAI,CAAC,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,IAAI,CAAC,KAAK,IAAI,CAAC,EAAE,cAAc,EAAE,EAAE,CAAC;YAC/D,MAAM,MAAM,GAAG,MAAM,CAAC,CAAC,QAAQ,EAAE,CAAA;YACjC,qBAAqB;YACrB,IAAI,MAAM,IAAI,CAAC,MAAM,CAAC,SAAS,EAAE,IAAI,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC;gBACrD,MAAM,MAAM,CAAC,KAAK,EAAE,CAAA;YACtB,CAAC;YACD,oBAAoB;QACtB,CAAC;QACD,OAAO,IAAI,CAAC,cAAc,CAAC,CAAC,EAAE,KAAK,CAAC,CAAA;IACtC,CAAC;IAED,cAAc,CAAC,CAAmB,EAAE,KAAc;QAChD,OAAO,CACH,CAAC;YACC,CAAC,IAAI,CAAC,QAAQ,KAAK,QAAQ,IAAI,CAAC,CAAC,KAAK,EAAE,IAAI,IAAI,CAAC,QAAQ,CAAC;YAC1D,CAAC,CAAC,KAAK,IAAI,CAAC,CAAC,UAAU,EAAE,CAAC;YAC1B,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,IAAI,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC;YACtC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK;gBACf,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM;gBACjB,CAAC,CAAC,CAAC,cAAc,EAAE;gBACnB,CAAC,CAAC,CAAC,cAAc,EAAE,EAAE,WAAW,EAAE,CAAC;YACrC,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CACpB,CAAC,CAAC;YACD,CAAC;YACH,CAAC,CAAC,SAAS,CAAA;IACf,CAAC;IAED,cAAc,CAAC,CAAO,EAAE,KAAc;QACpC,IAAI,KAAK,IAAI,IAAI,CAAC,IAAI,CAAC,KAAK;YAAE,OAAO,SAAS,CAAA;QAC9C,IAAI,GAAqB,CAAA;QACzB,IAAI,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC;YACvB,GAAG,GAAG,CAAC,CAAC,cAAc,EAAE,IAAI,CAAC,CAAC,YAAY,EAAE,CAAA;YAC5C,IAAI,CAAC,GAAG;gBAAE,OAAO,SAAS,CAAA;YAC1B,CAAC,GAAG,GAAG,CAAA;QACT,CAAC;QACD,MAAM,QAAQ,GAAG,CAAC,CAAC,SAAS,EAAE,IAAI,IAAI,CAAC,IAAI,CAAC,IAAI,CAAA;QAChD,MAAM,CAAC,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAA;QACtC,IAAI,IAAI,CAAC,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,IAAI,CAAC,KAAK,IAAI,CAAC,EAAE,cAAc,EAAE,EAAE,CAAC;YAC/D,MAAM,MAAM,GAAG,CAAC,CAAC,YAAY,EAAE,CAAA;YAC/B,IAAI,MAAM,IAAI,CAAC,MAAM,EAAE,SAAS,EAAE,IAAI,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC;gBACtD,MAAM,CAAC,SAAS,EAAE,CAAA;YACpB,CAAC;QACH,CAAC;QACD,OAAO,IAAI,CAAC,cAAc,CAAC,CAAC,EAAE,KAAK,CAAC,CAAA;IACtC,CAAC;IAKD,WAAW,CAAC,CAAO,EAAE,QAAiB;QACpC,IAAI,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC;YAAE,OAAM;QAC5B,6DAA6D;QAC7D,IAAI,CAAC,IAAI,CAAC,mBAAmB,IAAI,IAAI,CAAC,OAAO,EAAE,GAAG,EAAE,CAAC;YACnD,MAAM,GAAG,GAAG,GAAG,CAAC,CAAC,aAAa,EAAE,KAAK,CAAA;YACrC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,CAAA;QACvB,CAAC;QACD,MAAM,GAAG,GACP,IAAI,CAAC,IAAI,CAAC,QAAQ,KAAK,SAAS,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAA;QAClE,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;QAChB,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAA;QAC/D,4BAA4B;QAC5B,IAAI,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,CAAC;YAC5B,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAA;QACnB,CAAC;aAAM,IAAI,GAAG,EAAE,CAAC;YACf,MAAM,GAAG,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,aAAa,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAA;YAC9D,IAAI,CAAC,SAAS,CAAC,GAAG,GAAG,IAAI,CAAC,CAAA;QAC5B,CAAC;aAAM,CAAC;YACN,MAAM,GAAG,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,aAAa,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAA;YAC9D,MAAM,GAAG,GACP,IAAI,CAAC,IAAI,CAAC,WAAW,IAAI,CAAC,GAAG,CAAC,UAAU,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;gBAC1D,GAAG,GAAG,IAAI,CAAC,IAAI;gBACjB,CAAC,CAAC,EAAE,CAAA;YACN,IAAI,CAAC,SAAS,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,GAAG,IAAI,CAAC,CAAC,CAAC,GAAG,GAAG,GAAG,GAAG,IAAI,CAAC,CAAA;QACtD,CAAC;IACH,CAAC;IAED,KAAK,CAAC,KAAK,CAAC,CAAO,EAAE,QAAiB,EAAE,KAAc;QACpD,MAAM,CAAC,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,CAAC,EAAE,KAAK,CAAC,CAAA;QACzC,IAAI,CAAC;YAAE,IAAI,CAAC,WAAW,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAA;IACtC,CAAC;IAED,SAAS,CAAC,CAAO,EAAE,QAAiB,EAAE,KAAc;QAClD,MAAM,CAAC,GAAG,IAAI,CAAC,cAAc,CAAC,CAAC,EAAE,KAAK,CAAC,CAAA;QACvC,IAAI,CAAC;YAAE,IAAI,CAAC,WAAW,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAA;IACtC,CAAC;IAED,MAAM,CAAC,MAAY,EAAE,QAAmB,EAAE,EAAa;QACrD,qBAAqB;QACrB,IAAI,IAAI,CAAC,MAAM,EAAE,OAAO;YAAE,EAAE,EAAE,CAAA;QAC9B,oBAAoB;QACpB,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,QAAQ,EAAE,IAAI,wBAAS,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,EAAE,CAAC,CAAA;IAC9D,CAAC;IAED,OAAO,CACL,MAAY,EACZ,QAAmB,EACnB,SAAoB,EACpB,EAAa;QAEb,IAAI,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC;YAAE,OAAO,EAAE,EAAE,CAAA;QAC9C,IAAI,IAAI,CAAC,MAAM,EAAE,OAAO;YAAE,EAAE,EAAE,CAAA;QAC9B,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;YAChB,IAAI,CAAC,QAAQ,CAAC,GAAG,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,QAAQ,EAAE,SAAS,EAAE,EAAE,CAAC,CAAC,CAAA;YAClE,OAAM;QACR,CAAC;QACD,SAAS,CAAC,eAAe,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QAE3C,qEAAqE;QACrE,4DAA4D;QAC5D,yDAAyD;QACzD,IAAI,KAAK,GAAG,CAAC,CAAA;QACb,MAAM,IAAI,GAAG,GAAG,EAAE;YAChB,IAAI,EAAE,KAAK,KAAK,CAAC;gBAAE,EAAE,EAAE,CAAA;QACzB,CAAC,CAAA;QAED,KAAK,MAAM,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,IAAI,SAAS,CAAC,OAAO,CAAC,OAAO,EAAE,EAAE,CAAC;YAC/D,IAAI,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC;gBAAE,SAAQ;YAC9B,KAAK,EAAE,CAAA;YACP,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC,IAAI,EAAE,CAAC,CAAA;QACnD,CAAC;QAED,KAAK,MAAM,CAAC,IAAI,SAAS,CAAC,cAAc,EAAE,EAAE,CAAC;YAC3C,IAAI,IAAI,CAAC,QAAQ,KAAK,QAAQ,IAAI,CAAC,CAAC,KAAK,EAAE,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;gBAC7D,SAAQ;YACV,CAAC;YACD,KAAK,EAAE,CAAA;YACP,MAAM,cAAc,GAAG,CAAC,CAAC,aAAa,EAAE,CAAA;YACxC,IAAI,CAAC,CAAC,aAAa,EAAE;gBACnB,IAAI,CAAC,OAAO,CAAC,CAAC,EAAE,cAAc,EAAE,SAAS,EAAE,IAAI,CAAC,CAAA;iBAC7C,CAAC;gBACJ,CAAC,CAAC,SAAS,CACT,CAAC,CAAC,EAAE,OAAO,EAAE,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,EAAE,OAAO,EAAE,SAAS,EAAE,IAAI,CAAC,EACzD,IAAI,CACL,CAAA;YACH,CAAC;QACH,CAAC;QAED,IAAI,EAAE,CAAA;IACR,CAAC;IAED,OAAO,CACL,MAAY,EACZ,OAAe,EACf,SAAoB,EACpB,EAAa;QAEb,SAAS,GAAG,SAAS,CAAC,aAAa,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA;QAEpD,IAAI,KAAK,GAAG,CAAC,CAAA;QACb,MAAM,IAAI,GAAG,GAAG,EAAE;YAChB,IAAI,EAAE,KAAK,KAAK,CAAC;gBAAE,EAAE,EAAE,CAAA;QACzB,CAAC,CAAA;QAED,KAAK,MAAM,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,IAAI,SAAS,CAAC,OAAO,CAAC,OAAO,EAAE,EAAE,CAAC;YAC/D,IAAI,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC;gBAAE,SAAQ;YAC9B,KAAK,EAAE,CAAA;YACP,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC,IAAI,EAAE,CAAC,CAAA;QACnD,CAAC;QACD,KAAK,MAAM,CAAC,MAAM,EAAE,QAAQ,CAAC,IAAI,SAAS,CAAC,QAAQ,CAAC,OAAO,EAAE,EAAE,CAAC;YAC9D,KAAK,EAAE,CAAA;YACP,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,QAAQ,EAAE,SAAS,CAAC,KAAK,EAAE,EAAE,IAAI,CAAC,CAAA;QACzD,CAAC;QAED,IAAI,EAAE,CAAA;IACR,CAAC;IAED,UAAU,CAAC,MAAY,EAAE,QAAmB,EAAE,EAAa;QACzD,qBAAqB;QACrB,IAAI,IAAI,CAAC,MAAM,EAAE,OAAO;YAAE,EAAE,EAAE,CAAA;QAC9B,oBAAoB;QACpB,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,QAAQ,EAAE,IAAI,wBAAS,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,EAAE,CAAC,CAAA;IAClE,CAAC;IAED,WAAW,CACT,MAAY,EACZ,QAAmB,EACnB,SAAoB,EACpB,EAAa;QAEb,IAAI,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC;YAAE,OAAO,EAAE,EAAE,CAAA;QAC9C,IAAI,IAAI,CAAC,MAAM,EAAE,OAAO;YAAE,EAAE,EAAE,CAAA;QAC9B,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;YAChB,IAAI,CAAC,QAAQ,CAAC,GAAG,EAAE,CACjB,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,QAAQ,EAAE,SAAS,EAAE,EAAE,CAAC,CAClD,CAAA;YACD,OAAM;QACR,CAAC;QACD,SAAS,CAAC,eAAe,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QAE3C,qEAAqE;QACrE,4DAA4D;QAC5D,yDAAyD;QACzD,IAAI,KAAK,GAAG,CAAC,CAAA;QACb,MAAM,IAAI,GAAG,GAAG,EAAE;YAChB,IAAI,EAAE,KAAK,KAAK,CAAC;gBAAE,EAAE,EAAE,CAAA;QACzB,CAAC,CAAA;QAED,KAAK,MAAM,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,IAAI,SAAS,CAAC,OAAO,CAAC,OAAO,EAAE,EAAE,CAAC;YAC/D,IAAI,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC;gBAAE,SAAQ;YAC9B,IAAI,CAAC,SAAS,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAA;QACpC,CAAC;QAED,KAAK,MAAM,CAAC,IAAI,SAAS,CAAC,cAAc,EAAE,EAAE,CAAC;YAC3C,IAAI,IAAI,CAAC,QAAQ,KAAK,QAAQ,IAAI,CAAC,CAAC,KAAK,EAAE,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;gBAC7D,SAAQ;YACV,CAAC;YACD,KAAK,EAAE,CAAA;YACP,MAAM,QAAQ,GAAG,CAAC,CAAC,WAAW,EAAE,CAAA;YAChC,IAAI,CAAC,WAAW,CAAC,CAAC,EAAE,QAAQ,EAAE,SAAS,EAAE,IAAI,CAAC,CAAA;QAChD,CAAC;QAED,IAAI,EAAE,CAAA;IACR,CAAC;IAED,WAAW,CACT,MAAY,EACZ,OAAe,EACf,SAAoB,EACpB,EAAa;QAEb,SAAS,GAAG,SAAS,CAAC,aAAa,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA;QAEpD,IAAI,KAAK,GAAG,CAAC,CAAA;QACb,MAAM,IAAI,GAAG,GAAG,EAAE;YAChB,IAAI,EAAE,KAAK,KAAK,CAAC;gBAAE,EAAE,EAAE,CAAA;QACzB,CAAC,CAAA;QAED,KAAK,MAAM,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,IAAI,SAAS,CAAC,OAAO,CAAC,OAAO,EAAE,EAAE,CAAC;YAC/D,IAAI,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC;gBAAE,SAAQ;YAC9B,IAAI,CAAC,SAAS,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAA;QACpC,CAAC;QACD,KAAK,MAAM,CAAC,MAAM,EAAE,QAAQ,CAAC,IAAI,SAAS,CAAC,QAAQ,CAAC,OAAO,EAAE,EAAE,CAAC;YAC9D,KAAK,EAAE,CAAA;YACP,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,QAAQ,EAAE,SAAS,CAAC,KAAK,EAAE,EAAE,IAAI,CAAC,CAAA;QAC7D,CAAC;QAED,IAAI,EAAE,CAAA;IACR,CAAC;CACF;AAtUD,4BAsUC;AAED,MAAa,UAEX,SAAQ,QAAW;IACnB,OAAO,GAAG,IAAI,GAAG,EAAa,CAAA;IAE9B,YAAY,QAAmB,EAAE,IAAU,EAAE,IAAO;QAClD,KAAK,CAAC,QAAQ,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;IAC7B,CAAC;IAED,SAAS,CAAC,CAAY;QACpB,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;IACrB,CAAC;IAED,KAAK,CAAC,IAAI;QACR,IAAI,IAAI,CAAC,MAAM,EAAE,OAAO;YAAE,MAAM,IAAI,CAAC,MAAM,CAAC,MAAM,CAAA;QAClD,IAAI,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE,CAAC;YAC1B,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,CAAA;QACzB,CAAC;QACD,MAAM,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE;YAC7B,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,QAAQ,EAAE,GAAG,EAAE;gBACzC,IAAI,IAAI,CAAC,MAAM,EAAE,OAAO,EAAE,CAAC;oBACzB,GAAG,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,CAAA;gBACzB,CAAC;qBAAM,CAAC;oBACN,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;gBACnB,CAAC;YACH,CAAC,CAAC,CAAA;QACJ,CAAC,CAAC,CAAA;QACF,OAAO,IAAI,CAAC,OAAO,CAAA;IACrB,CAAC;IAED,QAAQ;QACN,IAAI,IAAI,CAAC,MAAM,EAAE,OAAO;YAAE,MAAM,IAAI,CAAC,MAAM,CAAC,MAAM,CAAA;QAClD,IAAI,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE,CAAC;YAC1B,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,CAAA;QACvB,CAAC;QACD,4DAA4D;QAC5D,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,QAAQ,EAAE,GAAG,EAAE;YAC7C,IAAI,IAAI,CAAC,MAAM,EAAE,OAAO;gBAAE,MAAM,IAAI,CAAC,MAAM,CAAC,MAAM,CAAA;QACpD,CAAC,CAAC,CAAA;QACF,OAAO,IAAI,CAAC,OAAO,CAAA;IACrB,CAAC;CACF;AAzCD,gCAyCC;AAED,MAAa,UAEX,SAAQ,QAAW;IACnB,OAAO,CAAgC;IAEvC,YAAY,QAAmB,EAAE,IAAU,EAAE,IAAO;QAClD,KAAK,CAAC,QAAQ,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;QAC3B,IAAI,CAAC,OAAO,GAAG,IAAI,mBAAQ,CAAuB;YAChD,MAAM,EAAE,IAAI,CAAC,MAAM;YACnB,UAAU,EAAE,IAAI;SACjB,CAAC,CAAA;QACF,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,OAAO,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAA;QAC7C,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,QAAQ,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAA;IAChD,CAAC;IAED,SAAS,CAAC,CAAY;QACpB,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,CAAA;QACrB,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO;YAAE,IAAI,CAAC,KAAK,EAAE,CAAA;IACzC,CAAC;IAED,MAAM;QACJ,MAAM,MAAM,GAAG,IAAI,CAAC,IAAI,CAAA;QACxB,IAAI,MAAM,CAAC,SAAS,EAAE,EAAE,CAAC;YACvB,MAAM,CAAC,KAAK,EAAE,CAAC,IAAI,CAAC,GAAG,EAAE;gBACvB,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,IAAI,CAAC,QAAQ,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC,CAAA;YAC9D,CAAC,CAAC,CAAA;QACJ,CAAC;aAAM,CAAC;YACN,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,IAAI,CAAC,QAAQ,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC,CAAA;QAC9D,CAAC;QACD,OAAO,IAAI,CAAC,OAAO,CAAA;IACrB,CAAC;IAED,UAAU;QACR,IAAI,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE,CAAC;YAC1B,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,CAAA;QACvB,CAAC;QACD,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,QAAQ,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC,CAAA;QACnE,OAAO,IAAI,CAAC,OAAO,CAAA;IACrB,CAAC;CACF;AAvCD,gCAuCC","sourcesContent":["/**\n * Single-use utility classes to provide functionality to the {@link Glob}\n * methods.\n *\n * @module\n */\nimport { Minipass } from 'minipass'\nimport { Path } from 'path-scurry'\nimport { Ignore, IgnoreLike } from './ignore.js'\n\n// XXX can we somehow make it so that it NEVER processes a given path more than\n// once, enough that the match set tracking is no longer needed? that'd speed\n// things up a lot. Or maybe bring back nounique, and skip it in that case?\n\n// a single minimatch set entry with 1 or more parts\nimport { Pattern } from './pattern.js'\nimport { Processor } from './processor.js'\n\nexport interface GlobWalkerOpts {\n absolute?: boolean\n allowWindowsEscape?: boolean\n cwd?: string | URL\n dot?: boolean\n dotRelative?: boolean\n follow?: boolean\n ignore?: string | string[] | IgnoreLike\n mark?: boolean\n matchBase?: boolean\n // Note: maxDepth here means \"maximum actual Path.depth()\",\n // not \"maximum depth beyond cwd\"\n maxDepth?: number\n nobrace?: boolean\n nocase?: boolean\n nodir?: boolean\n noext?: boolean\n noglobstar?: boolean\n platform?: NodeJS.Platform\n posix?: boolean\n realpath?: boolean\n root?: string\n stat?: boolean\n signal?: AbortSignal\n windowsPathsNoEscape?: boolean\n withFileTypes?: boolean\n includeChildMatches?: boolean\n}\n\nexport type GWOFileTypesTrue = GlobWalkerOpts & {\n withFileTypes: true\n}\nexport type GWOFileTypesFalse = GlobWalkerOpts & {\n withFileTypes: false\n}\nexport type GWOFileTypesUnset = GlobWalkerOpts & {\n withFileTypes?: undefined\n}\n\nexport type Result =\n O extends GWOFileTypesTrue ? Path\n : O extends GWOFileTypesFalse ? string\n : O extends GWOFileTypesUnset ? string\n : Path | string\n\nexport type Matches =\n O extends GWOFileTypesTrue ? Set\n : O extends GWOFileTypesFalse ? Set\n : O extends GWOFileTypesUnset ? Set\n : Set\n\nexport type MatchStream = Minipass<\n Result,\n Result\n>\n\nconst makeIgnore = (\n ignore: string | string[] | IgnoreLike,\n opts: GlobWalkerOpts,\n): IgnoreLike =>\n typeof ignore === 'string' ? new Ignore([ignore], opts)\n : Array.isArray(ignore) ? new Ignore(ignore, opts)\n : ignore\n\n/**\n * basic walking utilities that all the glob walker types use\n */\nexport abstract class GlobUtil {\n path: Path\n patterns: Pattern[]\n opts: O\n seen: Set = new Set()\n paused: boolean = false\n aborted: boolean = false\n #onResume: (() => any)[] = []\n #ignore?: IgnoreLike\n #sep: '\\\\' | '/'\n signal?: AbortSignal\n maxDepth: number\n includeChildMatches: boolean\n\n constructor(patterns: Pattern[], path: Path, opts: O)\n constructor(patterns: Pattern[], path: Path, opts: O) {\n this.patterns = patterns\n this.path = path\n this.opts = opts\n this.#sep = !opts.posix && opts.platform === 'win32' ? '\\\\' : '/'\n this.includeChildMatches = opts.includeChildMatches !== false\n if (opts.ignore || !this.includeChildMatches) {\n this.#ignore = makeIgnore(opts.ignore ?? [], opts)\n if (\n !this.includeChildMatches &&\n typeof this.#ignore.add !== 'function'\n ) {\n const m = 'cannot ignore child matches, ignore lacks add() method.'\n throw new Error(m)\n }\n }\n // ignore, always set with maxDepth, but it's optional on the\n // GlobOptions type\n /* c8 ignore start */\n this.maxDepth = opts.maxDepth || Infinity\n /* c8 ignore stop */\n if (opts.signal) {\n this.signal = opts.signal\n this.signal.addEventListener('abort', () => {\n this.#onResume.length = 0\n })\n }\n }\n\n #ignored(path: Path): boolean {\n return this.seen.has(path) || !!this.#ignore?.ignored?.(path)\n }\n #childrenIgnored(path: Path): boolean {\n return !!this.#ignore?.childrenIgnored?.(path)\n }\n\n // backpressure mechanism\n pause() {\n this.paused = true\n }\n resume() {\n /* c8 ignore start */\n if (this.signal?.aborted) return\n /* c8 ignore stop */\n this.paused = false\n let fn: (() => any) | undefined = undefined\n while (!this.paused && (fn = this.#onResume.shift())) {\n fn()\n }\n }\n onResume(fn: () => any) {\n if (this.signal?.aborted) return\n /* c8 ignore start */\n if (!this.paused) {\n fn()\n } else {\n /* c8 ignore stop */\n this.#onResume.push(fn)\n }\n }\n\n // do the requisite realpath/stat checking, and return the path\n // to add or undefined to filter it out.\n async matchCheck(e: Path, ifDir: boolean): Promise {\n if (ifDir && this.opts.nodir) return undefined\n let rpc: Path | undefined\n if (this.opts.realpath) {\n rpc = e.realpathCached() || (await e.realpath())\n if (!rpc) return undefined\n e = rpc\n }\n const needStat = e.isUnknown() || this.opts.stat\n const s = needStat ? await e.lstat() : e\n if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) {\n const target = await s.realpath()\n /* c8 ignore start */\n if (target && (target.isUnknown() || this.opts.stat)) {\n await target.lstat()\n }\n /* c8 ignore stop */\n }\n return this.matchCheckTest(s, ifDir)\n }\n\n matchCheckTest(e: Path | undefined, ifDir: boolean): Path | undefined {\n return (\n e &&\n (this.maxDepth === Infinity || e.depth() <= this.maxDepth) &&\n (!ifDir || e.canReaddir()) &&\n (!this.opts.nodir || !e.isDirectory()) &&\n (!this.opts.nodir ||\n !this.opts.follow ||\n !e.isSymbolicLink() ||\n !e.realpathCached()?.isDirectory()) &&\n !this.#ignored(e)\n ) ?\n e\n : undefined\n }\n\n matchCheckSync(e: Path, ifDir: boolean): Path | undefined {\n if (ifDir && this.opts.nodir) return undefined\n let rpc: Path | undefined\n if (this.opts.realpath) {\n rpc = e.realpathCached() || e.realpathSync()\n if (!rpc) return undefined\n e = rpc\n }\n const needStat = e.isUnknown() || this.opts.stat\n const s = needStat ? e.lstatSync() : e\n if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) {\n const target = s.realpathSync()\n if (target && (target?.isUnknown() || this.opts.stat)) {\n target.lstatSync()\n }\n }\n return this.matchCheckTest(s, ifDir)\n }\n\n abstract matchEmit(p: Result): void\n abstract matchEmit(p: string | Path): void\n\n matchFinish(e: Path, absolute: boolean) {\n if (this.#ignored(e)) return\n // we know we have an ignore if this is false, but TS doesn't\n if (!this.includeChildMatches && this.#ignore?.add) {\n const ign = `${e.relativePosix()}/**`\n this.#ignore.add(ign)\n }\n const abs =\n this.opts.absolute === undefined ? absolute : this.opts.absolute\n this.seen.add(e)\n const mark = this.opts.mark && e.isDirectory() ? this.#sep : ''\n // ok, we have what we need!\n if (this.opts.withFileTypes) {\n this.matchEmit(e)\n } else if (abs) {\n const abs = this.opts.posix ? e.fullpathPosix() : e.fullpath()\n this.matchEmit(abs + mark)\n } else {\n const rel = this.opts.posix ? e.relativePosix() : e.relative()\n const pre =\n this.opts.dotRelative && !rel.startsWith('..' + this.#sep) ?\n '.' + this.#sep\n : ''\n this.matchEmit(!rel ? '.' + mark : pre + rel + mark)\n }\n }\n\n async match(e: Path, absolute: boolean, ifDir: boolean): Promise {\n const p = await this.matchCheck(e, ifDir)\n if (p) this.matchFinish(p, absolute)\n }\n\n matchSync(e: Path, absolute: boolean, ifDir: boolean): void {\n const p = this.matchCheckSync(e, ifDir)\n if (p) this.matchFinish(p, absolute)\n }\n\n walkCB(target: Path, patterns: Pattern[], cb: () => any) {\n /* c8 ignore start */\n if (this.signal?.aborted) cb()\n /* c8 ignore stop */\n this.walkCB2(target, patterns, new Processor(this.opts), cb)\n }\n\n walkCB2(\n target: Path,\n patterns: Pattern[],\n processor: Processor,\n cb: () => any,\n ) {\n if (this.#childrenIgnored(target)) return cb()\n if (this.signal?.aborted) cb()\n if (this.paused) {\n this.onResume(() => this.walkCB2(target, patterns, processor, cb))\n return\n }\n processor.processPatterns(target, patterns)\n\n // done processing. all of the above is sync, can be abstracted out.\n // subwalks is a map of paths to the entry filters they need\n // matches is a map of paths to [absolute, ifDir] tuples.\n let tasks = 1\n const next = () => {\n if (--tasks === 0) cb()\n }\n\n for (const [m, absolute, ifDir] of processor.matches.entries()) {\n if (this.#ignored(m)) continue\n tasks++\n this.match(m, absolute, ifDir).then(() => next())\n }\n\n for (const t of processor.subwalkTargets()) {\n if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) {\n continue\n }\n tasks++\n const childrenCached = t.readdirCached()\n if (t.calledReaddir())\n this.walkCB3(t, childrenCached, processor, next)\n else {\n t.readdirCB(\n (_, entries) => this.walkCB3(t, entries, processor, next),\n true,\n )\n }\n }\n\n next()\n }\n\n walkCB3(\n target: Path,\n entries: Path[],\n processor: Processor,\n cb: () => any,\n ) {\n processor = processor.filterEntries(target, entries)\n\n let tasks = 1\n const next = () => {\n if (--tasks === 0) cb()\n }\n\n for (const [m, absolute, ifDir] of processor.matches.entries()) {\n if (this.#ignored(m)) continue\n tasks++\n this.match(m, absolute, ifDir).then(() => next())\n }\n for (const [target, patterns] of processor.subwalks.entries()) {\n tasks++\n this.walkCB2(target, patterns, processor.child(), next)\n }\n\n next()\n }\n\n walkCBSync(target: Path, patterns: Pattern[], cb: () => any) {\n /* c8 ignore start */\n if (this.signal?.aborted) cb()\n /* c8 ignore stop */\n this.walkCB2Sync(target, patterns, new Processor(this.opts), cb)\n }\n\n walkCB2Sync(\n target: Path,\n patterns: Pattern[],\n processor: Processor,\n cb: () => any,\n ) {\n if (this.#childrenIgnored(target)) return cb()\n if (this.signal?.aborted) cb()\n if (this.paused) {\n this.onResume(() =>\n this.walkCB2Sync(target, patterns, processor, cb),\n )\n return\n }\n processor.processPatterns(target, patterns)\n\n // done processing. all of the above is sync, can be abstracted out.\n // subwalks is a map of paths to the entry filters they need\n // matches is a map of paths to [absolute, ifDir] tuples.\n let tasks = 1\n const next = () => {\n if (--tasks === 0) cb()\n }\n\n for (const [m, absolute, ifDir] of processor.matches.entries()) {\n if (this.#ignored(m)) continue\n this.matchSync(m, absolute, ifDir)\n }\n\n for (const t of processor.subwalkTargets()) {\n if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) {\n continue\n }\n tasks++\n const children = t.readdirSync()\n this.walkCB3Sync(t, children, processor, next)\n }\n\n next()\n }\n\n walkCB3Sync(\n target: Path,\n entries: Path[],\n processor: Processor,\n cb: () => any,\n ) {\n processor = processor.filterEntries(target, entries)\n\n let tasks = 1\n const next = () => {\n if (--tasks === 0) cb()\n }\n\n for (const [m, absolute, ifDir] of processor.matches.entries()) {\n if (this.#ignored(m)) continue\n this.matchSync(m, absolute, ifDir)\n }\n for (const [target, patterns] of processor.subwalks.entries()) {\n tasks++\n this.walkCB2Sync(target, patterns, processor.child(), next)\n }\n\n next()\n }\n}\n\nexport class GlobWalker<\n O extends GlobWalkerOpts = GlobWalkerOpts,\n> extends GlobUtil {\n matches = new Set>()\n\n constructor(patterns: Pattern[], path: Path, opts: O) {\n super(patterns, path, opts)\n }\n\n matchEmit(e: Result): void {\n this.matches.add(e)\n }\n\n async walk(): Promise>> {\n if (this.signal?.aborted) throw this.signal.reason\n if (this.path.isUnknown()) {\n await this.path.lstat()\n }\n await new Promise((res, rej) => {\n this.walkCB(this.path, this.patterns, () => {\n if (this.signal?.aborted) {\n rej(this.signal.reason)\n } else {\n res(this.matches)\n }\n })\n })\n return this.matches\n }\n\n walkSync(): Set> {\n if (this.signal?.aborted) throw this.signal.reason\n if (this.path.isUnknown()) {\n this.path.lstatSync()\n }\n // nothing for the callback to do, because this never pauses\n this.walkCBSync(this.path, this.patterns, () => {\n if (this.signal?.aborted) throw this.signal.reason\n })\n return this.matches\n }\n}\n\nexport class GlobStream<\n O extends GlobWalkerOpts = GlobWalkerOpts,\n> extends GlobUtil {\n results: Minipass, Result>\n\n constructor(patterns: Pattern[], path: Path, opts: O) {\n super(patterns, path, opts)\n this.results = new Minipass, Result>({\n signal: this.signal,\n objectMode: true,\n })\n this.results.on('drain', () => this.resume())\n this.results.on('resume', () => this.resume())\n }\n\n matchEmit(e: Result): void {\n this.results.write(e)\n if (!this.results.flowing) this.pause()\n }\n\n stream(): MatchStream {\n const target = this.path\n if (target.isUnknown()) {\n target.lstat().then(() => {\n this.walkCB(target, this.patterns, () => this.results.end())\n })\n } else {\n this.walkCB(target, this.patterns, () => this.results.end())\n }\n return this.results\n }\n\n streamSync(): MatchStream {\n if (this.path.isUnknown()) {\n this.path.lstatSync()\n }\n this.walkCBSync(this.path, this.patterns, () => this.results.end())\n return this.results\n }\n}\n"]} \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/bin.d.mts b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/bin.d.mts new file mode 100644 index 0000000000000000000000000000000000000000..77298e477081756086e9db739440c9422f97e7ee --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/bin.d.mts @@ -0,0 +1,3 @@ +#!/usr/bin/env node +export {}; +//# sourceMappingURL=bin.d.mts.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/bin.d.mts.map b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/bin.d.mts.map new file mode 100644 index 0000000000000000000000000000000000000000..ec64bdda861bc9d0632eb010ac3a572aa8a5e50b --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/bin.d.mts.map @@ -0,0 +1 @@ +{"version":3,"file":"bin.d.mts","sourceRoot":"","sources":["../../src/bin.mts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/bin.mjs b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/bin.mjs new file mode 100644 index 0000000000000000000000000000000000000000..553bb79303d9018965b1f54b135611c7aa1980dd --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/bin.mjs @@ -0,0 +1,276 @@ +#!/usr/bin/env node +import { foregroundChild } from 'foreground-child'; +import { existsSync } from 'fs'; +import { jack } from 'jackspeak'; +import { loadPackageJson } from 'package-json-from-dist'; +import { join } from 'path'; +import { globStream } from './index.js'; +const { version } = loadPackageJson(import.meta.url, '../package.json'); +const j = jack({ + usage: 'glob [options] [ [ ...]]', +}) + .description(` + Glob v${version} + + Expand the positional glob expression arguments into any matching file + system paths found. + `) + .opt({ + cmd: { + short: 'c', + hint: 'command', + description: `Run the command provided, passing the glob expression + matches as arguments.`, + }, +}) + .opt({ + default: { + short: 'p', + hint: 'pattern', + description: `If no positional arguments are provided, glob will use + this pattern`, + }, +}) + .flag({ + all: { + short: 'A', + description: `By default, the glob cli command will not expand any + arguments that are an exact match to a file on disk. + + This prevents double-expanding, in case the shell expands + an argument whose filename is a glob expression. + + For example, if 'app/*.ts' would match 'app/[id].ts', then + on Windows powershell or cmd.exe, 'glob app/*.ts' will + expand to 'app/[id].ts', as expected. However, in posix + shells such as bash or zsh, the shell will first expand + 'app/*.ts' to a list of filenames. Then glob will look + for a file matching 'app/[id].ts' (ie, 'app/i.ts' or + 'app/d.ts'), which is unexpected. + + Setting '--all' prevents this behavior, causing glob + to treat ALL patterns as glob expressions to be expanded, + even if they are an exact match to a file on disk. + + When setting this option, be sure to enquote arguments + so that the shell will not expand them prior to passing + them to the glob command process. + `, + }, + absolute: { + short: 'a', + description: 'Expand to absolute paths', + }, + 'dot-relative': { + short: 'd', + description: `Prepend './' on relative matches`, + }, + mark: { + short: 'm', + description: `Append a / on any directories matched`, + }, + posix: { + short: 'x', + description: `Always resolve to posix style paths, using '/' as the + directory separator, even on Windows. Drive letter + absolute matches on Windows will be expanded to their + full resolved UNC maths, eg instead of 'C:\\foo\\bar', + it will expand to '//?/C:/foo/bar'. + `, + }, + follow: { + short: 'f', + description: `Follow symlinked directories when expanding '**'`, + }, + realpath: { + short: 'R', + description: `Call 'fs.realpath' on all of the results. In the case + of an entry that cannot be resolved, the entry is + omitted. This incurs a slight performance penalty, of + course, because of the added system calls.`, + }, + stat: { + short: 's', + description: `Call 'fs.lstat' on all entries, whether required or not + to determine if it's a valid match.`, + }, + 'match-base': { + short: 'b', + description: `Perform a basename-only match if the pattern does not + contain any slash characters. That is, '*.js' would be + treated as equivalent to '**/*.js', matching js files + in all directories. + `, + }, + dot: { + description: `Allow patterns to match files/directories that start + with '.', even if the pattern does not start with '.' + `, + }, + nobrace: { + description: 'Do not expand {...} patterns', + }, + nocase: { + description: `Perform a case-insensitive match. This defaults to + 'true' on macOS and Windows platforms, and false on + all others. + + Note: 'nocase' should only be explicitly set when it is + known that the filesystem's case sensitivity differs + from the platform default. If set 'true' on + case-insensitive file systems, then the walk may return + more or less results than expected. + `, + }, + nodir: { + description: `Do not match directories, only files. + + Note: to *only* match directories, append a '/' at the + end of the pattern. + `, + }, + noext: { + description: `Do not expand extglob patterns, such as '+(a|b)'`, + }, + noglobstar: { + description: `Do not expand '**' against multiple path portions. + Ie, treat it as a normal '*' instead.`, + }, + 'windows-path-no-escape': { + description: `Use '\\' as a path separator *only*, and *never* as an + escape character. If set, all '\\' characters are + replaced with '/' in the pattern.`, + }, +}) + .num({ + 'max-depth': { + short: 'D', + description: `Maximum depth to traverse from the current + working directory`, + }, +}) + .opt({ + cwd: { + short: 'C', + description: 'Current working directory to execute/match in', + default: process.cwd(), + }, + root: { + short: 'r', + description: `A string path resolved against the 'cwd', which is + used as the starting point for absolute patterns that + start with '/' (but not drive letters or UNC paths + on Windows). + + Note that this *doesn't* necessarily limit the walk to + the 'root' directory, and doesn't affect the cwd + starting point for non-absolute patterns. A pattern + containing '..' will still be able to traverse out of + the root directory, if it is not an actual root directory + on the filesystem, and any non-absolute patterns will + still be matched in the 'cwd'. + + To start absolute and non-absolute patterns in the same + path, you can use '--root=' to set it to the empty + string. However, be aware that on Windows systems, a + pattern like 'x:/*' or '//host/share/*' will *always* + start in the 'x:/' or '//host/share/' directory, + regardless of the --root setting. + `, + }, + platform: { + description: `Defaults to the value of 'process.platform' if + available, or 'linux' if not. Setting --platform=win32 + on non-Windows systems may cause strange behavior!`, + validOptions: [ + 'aix', + 'android', + 'darwin', + 'freebsd', + 'haiku', + 'linux', + 'openbsd', + 'sunos', + 'win32', + 'cygwin', + 'netbsd', + ], + }, +}) + .optList({ + ignore: { + short: 'i', + description: `Glob patterns to ignore`, + }, +}) + .flag({ + debug: { + short: 'v', + description: `Output a huge amount of noisy debug information about + patterns as they are parsed and used to match files.`, + }, + version: { + short: 'V', + description: `Output the version (${version})`, + }, + help: { + short: 'h', + description: 'Show this usage information', + }, +}); +try { + const { positionals, values } = j.parse(); + if (values.version) { + console.log(version); + process.exit(0); + } + if (values.help) { + console.log(j.usage()); + process.exit(0); + } + if (positionals.length === 0 && !values.default) + throw 'No patterns provided'; + if (positionals.length === 0 && values.default) + positionals.push(values.default); + const patterns = values.all ? positionals : positionals.filter(p => !existsSync(p)); + const matches = values.all ? + [] + : positionals.filter(p => existsSync(p)).map(p => join(p)); + const stream = globStream(patterns, { + absolute: values.absolute, + cwd: values.cwd, + dot: values.dot, + dotRelative: values['dot-relative'], + follow: values.follow, + ignore: values.ignore, + mark: values.mark, + matchBase: values['match-base'], + maxDepth: values['max-depth'], + nobrace: values.nobrace, + nocase: values.nocase, + nodir: values.nodir, + noext: values.noext, + noglobstar: values.noglobstar, + platform: values.platform, + realpath: values.realpath, + root: values.root, + stat: values.stat, + debug: values.debug, + posix: values.posix, + }); + const cmd = values.cmd; + if (!cmd) { + matches.forEach(m => console.log(m)); + stream.on('data', f => console.log(f)); + } + else { + stream.on('data', f => matches.push(f)); + stream.on('end', () => foregroundChild(cmd, matches, { shell: true })); + } +} +catch (e) { + console.error(j.usage()); + console.error(e instanceof Error ? e.message : String(e)); + process.exit(1); +} +//# sourceMappingURL=bin.mjs.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/bin.mjs.map b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/bin.mjs.map new file mode 100644 index 0000000000000000000000000000000000000000..a08cfb7e443dd48526955733da8b580de0a30bc4 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/bin.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"bin.mjs","sourceRoot":"","sources":["../../src/bin.mts"],"names":[],"mappings":";AACA,OAAO,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAA;AAClD,OAAO,EAAE,UAAU,EAAE,MAAM,IAAI,CAAA;AAC/B,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAA;AAChC,OAAO,EAAE,eAAe,EAAE,MAAM,wBAAwB,CAAA;AACxD,OAAO,EAAE,IAAI,EAAE,MAAM,MAAM,CAAA;AAC3B,OAAO,EAAE,UAAU,EAAE,MAAM,YAAY,CAAA;AAEvC,MAAM,EAAE,OAAO,EAAE,GAAG,eAAe,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE,iBAAiB,CAAC,CAAA;AAEvE,MAAM,CAAC,GAAG,IAAI,CAAC;IACb,KAAK,EAAE,4CAA4C;CACpD,CAAC;KACC,WAAW,CACV;YACQ,OAAO;;;;GAIhB,CACA;KACA,GAAG,CAAC;IACH,GAAG,EAAE;QACH,KAAK,EAAE,GAAG;QACV,IAAI,EAAE,SAAS;QACf,WAAW,EAAE;0CACuB;KACrC;CACF,CAAC;KACD,GAAG,CAAC;IACH,OAAO,EAAE;QACP,KAAK,EAAE,GAAG;QACV,IAAI,EAAE,SAAS;QACf,WAAW,EAAE;iCACc;KAC5B;CACF,CAAC;KACD,IAAI,CAAC;IACJ,GAAG,EAAE;QACH,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;;;;;;;;;;;;;;;;;;;OAqBZ;KACF;IACD,QAAQ,EAAE;QACR,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,0BAA0B;KACxC;IACD,cAAc,EAAE;QACd,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,kCAAkC;KAChD;IACD,IAAI,EAAE;QACJ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,uCAAuC;KACrD;IACD,KAAK,EAAE;QACL,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;;;OAKZ;KACF;IAED,MAAM,EAAE;QACN,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,kDAAkD;KAChE;IACD,QAAQ,EAAE;QACR,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;+DAG4C;KAC1D;IACD,IAAI,EAAE;QACJ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;wDACqC;KACnD;IACD,YAAY,EAAE;QACZ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;;OAIZ;KACF;IAED,GAAG,EAAE;QACH,WAAW,EAAE;;OAEZ;KACF;IACD,OAAO,EAAE;QACP,WAAW,EAAE,8BAA8B;KAC5C;IACD,MAAM,EAAE;QACN,WAAW,EAAE;;;;;;;;;OASZ;KACF;IACD,KAAK,EAAE;QACL,WAAW,EAAE;;;;OAIZ;KACF;IACD,KAAK,EAAE;QACL,WAAW,EAAE,kDAAkD;KAChE;IACD,UAAU,EAAE;QACV,WAAW,EAAE;0DACuC;KACrD;IACD,wBAAwB,EAAE;QACxB,WAAW,EAAE;;sDAEmC;KACjD;CACF,CAAC;KACD,GAAG,CAAC;IACH,WAAW,EAAE;QACX,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;sCACmB;KACjC;CACF,CAAC;KACD,GAAG,CAAC;IACH,GAAG,EAAE;QACH,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,+CAA+C;QAC5D,OAAO,EAAE,OAAO,CAAC,GAAG,EAAE;KACvB;IACD,IAAI,EAAE;QACJ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;;;;;;;;;;;;;;;;;;;OAmBZ;KACF;IACD,QAAQ,EAAE;QACR,WAAW,EAAE;;uEAEoD;QACjE,YAAY,EAAE;YACZ,KAAK;YACL,SAAS;YACT,QAAQ;YACR,SAAS;YACT,OAAO;YACP,OAAO;YACP,SAAS;YACT,OAAO;YACP,OAAO;YACP,QAAQ;YACR,QAAQ;SACT;KACF;CACF,CAAC;KACD,OAAO,CAAC;IACP,MAAM,EAAE;QACN,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,yBAAyB;KACvC;CACF,CAAC;KACD,IAAI,CAAC;IACJ,KAAK,EAAE;QACL,KAAK,EAAE,GAAG;QACV,WAAW,EAAE;yEACsD;KACpE;IACD,OAAO,EAAE;QACP,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,uBAAuB,OAAO,GAAG;KAC/C;IACD,IAAI,EAAE;QACJ,KAAK,EAAE,GAAG;QACV,WAAW,EAAE,6BAA6B;KAC3C;CACF,CAAC,CAAA;AAEJ,IAAI,CAAC;IACH,MAAM,EAAE,WAAW,EAAE,MAAM,EAAE,GAAG,CAAC,CAAC,KAAK,EAAE,CAAA;IACzC,IAAI,MAAM,CAAC,OAAO,EAAE,CAAC;QACnB,OAAO,CAAC,GAAG,CAAC,OAAO,CAAC,CAAA;QACpB,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;IACjB,CAAC;IACD,IAAI,MAAM,CAAC,IAAI,EAAE,CAAC;QAChB,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAA;QACtB,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;IACjB,CAAC;IACD,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO;QAC7C,MAAM,sBAAsB,CAAA;IAC9B,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO;QAC5C,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAA;IAClC,MAAM,QAAQ,GACZ,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAA;IACpE,MAAM,OAAO,GACX,MAAM,CAAC,GAAG,CAAC,CAAC;QACV,EAAE;QACJ,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAA;IAC5D,MAAM,MAAM,GAAG,UAAU,CAAC,QAAQ,EAAE;QAClC,QAAQ,EAAE,MAAM,CAAC,QAAQ;QACzB,GAAG,EAAE,MAAM,CAAC,GAAG;QACf,GAAG,EAAE,MAAM,CAAC,GAAG;QACf,WAAW,EAAE,MAAM,CAAC,cAAc,CAAC;QACnC,MAAM,EAAE,MAAM,CAAC,MAAM;QACrB,MAAM,EAAE,MAAM,CAAC,MAAM;QACrB,IAAI,EAAE,MAAM,CAAC,IAAI;QACjB,SAAS,EAAE,MAAM,CAAC,YAAY,CAAC;QAC/B,QAAQ,EAAE,MAAM,CAAC,WAAW,CAAC;QAC7B,OAAO,EAAE,MAAM,CAAC,OAAO;QACvB,MAAM,EAAE,MAAM,CAAC,MAAM;QACrB,KAAK,EAAE,MAAM,CAAC,KAAK;QACnB,KAAK,EAAE,MAAM,CAAC,KAAK;QACnB,UAAU,EAAE,MAAM,CAAC,UAAU;QAC7B,QAAQ,EAAE,MAAM,CAAC,QAAuC;QACxD,QAAQ,EAAE,MAAM,CAAC,QAAQ;QACzB,IAAI,EAAE,MAAM,CAAC,IAAI;QACjB,IAAI,EAAE,MAAM,CAAC,IAAI;QACjB,KAAK,EAAE,MAAM,CAAC,KAAK;QACnB,KAAK,EAAE,MAAM,CAAC,KAAK;KACpB,CAAC,CAAA;IAEF,MAAM,GAAG,GAAG,MAAM,CAAC,GAAG,CAAA;IACtB,IAAI,CAAC,GAAG,EAAE,CAAC;QACT,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAA;QACpC,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAA;IACxC,CAAC;SAAM,CAAC;QACN,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAA;QACvC,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE,CAAC,eAAe,CAAC,GAAG,EAAE,OAAO,EAAE,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC,CAAA;IACxE,CAAC;AACH,CAAC;AAAC,OAAO,CAAC,EAAE,CAAC;IACX,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAA;IACxB,OAAO,CAAC,KAAK,CAAC,CAAC,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAA;IACzD,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;AACjB,CAAC","sourcesContent":["#!/usr/bin/env node\nimport { foregroundChild } from 'foreground-child'\nimport { existsSync } from 'fs'\nimport { jack } from 'jackspeak'\nimport { loadPackageJson } from 'package-json-from-dist'\nimport { join } from 'path'\nimport { globStream } from './index.js'\n\nconst { version } = loadPackageJson(import.meta.url, '../package.json')\n\nconst j = jack({\n usage: 'glob [options] [ [ ...]]',\n})\n .description(\n `\n Glob v${version}\n\n Expand the positional glob expression arguments into any matching file\n system paths found.\n `,\n )\n .opt({\n cmd: {\n short: 'c',\n hint: 'command',\n description: `Run the command provided, passing the glob expression\n matches as arguments.`,\n },\n })\n .opt({\n default: {\n short: 'p',\n hint: 'pattern',\n description: `If no positional arguments are provided, glob will use\n this pattern`,\n },\n })\n .flag({\n all: {\n short: 'A',\n description: `By default, the glob cli command will not expand any\n arguments that are an exact match to a file on disk.\n\n This prevents double-expanding, in case the shell expands\n an argument whose filename is a glob expression.\n\n For example, if 'app/*.ts' would match 'app/[id].ts', then\n on Windows powershell or cmd.exe, 'glob app/*.ts' will\n expand to 'app/[id].ts', as expected. However, in posix\n shells such as bash or zsh, the shell will first expand\n 'app/*.ts' to a list of filenames. Then glob will look\n for a file matching 'app/[id].ts' (ie, 'app/i.ts' or\n 'app/d.ts'), which is unexpected.\n\n Setting '--all' prevents this behavior, causing glob\n to treat ALL patterns as glob expressions to be expanded,\n even if they are an exact match to a file on disk.\n\n When setting this option, be sure to enquote arguments\n so that the shell will not expand them prior to passing\n them to the glob command process.\n `,\n },\n absolute: {\n short: 'a',\n description: 'Expand to absolute paths',\n },\n 'dot-relative': {\n short: 'd',\n description: `Prepend './' on relative matches`,\n },\n mark: {\n short: 'm',\n description: `Append a / on any directories matched`,\n },\n posix: {\n short: 'x',\n description: `Always resolve to posix style paths, using '/' as the\n directory separator, even on Windows. Drive letter\n absolute matches on Windows will be expanded to their\n full resolved UNC maths, eg instead of 'C:\\\\foo\\\\bar',\n it will expand to '//?/C:/foo/bar'.\n `,\n },\n\n follow: {\n short: 'f',\n description: `Follow symlinked directories when expanding '**'`,\n },\n realpath: {\n short: 'R',\n description: `Call 'fs.realpath' on all of the results. In the case\n of an entry that cannot be resolved, the entry is\n omitted. This incurs a slight performance penalty, of\n course, because of the added system calls.`,\n },\n stat: {\n short: 's',\n description: `Call 'fs.lstat' on all entries, whether required or not\n to determine if it's a valid match.`,\n },\n 'match-base': {\n short: 'b',\n description: `Perform a basename-only match if the pattern does not\n contain any slash characters. That is, '*.js' would be\n treated as equivalent to '**/*.js', matching js files\n in all directories.\n `,\n },\n\n dot: {\n description: `Allow patterns to match files/directories that start\n with '.', even if the pattern does not start with '.'\n `,\n },\n nobrace: {\n description: 'Do not expand {...} patterns',\n },\n nocase: {\n description: `Perform a case-insensitive match. This defaults to\n 'true' on macOS and Windows platforms, and false on\n all others.\n\n Note: 'nocase' should only be explicitly set when it is\n known that the filesystem's case sensitivity differs\n from the platform default. If set 'true' on\n case-insensitive file systems, then the walk may return\n more or less results than expected.\n `,\n },\n nodir: {\n description: `Do not match directories, only files.\n\n Note: to *only* match directories, append a '/' at the\n end of the pattern.\n `,\n },\n noext: {\n description: `Do not expand extglob patterns, such as '+(a|b)'`,\n },\n noglobstar: {\n description: `Do not expand '**' against multiple path portions.\n Ie, treat it as a normal '*' instead.`,\n },\n 'windows-path-no-escape': {\n description: `Use '\\\\' as a path separator *only*, and *never* as an\n escape character. If set, all '\\\\' characters are\n replaced with '/' in the pattern.`,\n },\n })\n .num({\n 'max-depth': {\n short: 'D',\n description: `Maximum depth to traverse from the current\n working directory`,\n },\n })\n .opt({\n cwd: {\n short: 'C',\n description: 'Current working directory to execute/match in',\n default: process.cwd(),\n },\n root: {\n short: 'r',\n description: `A string path resolved against the 'cwd', which is\n used as the starting point for absolute patterns that\n start with '/' (but not drive letters or UNC paths\n on Windows).\n\n Note that this *doesn't* necessarily limit the walk to\n the 'root' directory, and doesn't affect the cwd\n starting point for non-absolute patterns. A pattern\n containing '..' will still be able to traverse out of\n the root directory, if it is not an actual root directory\n on the filesystem, and any non-absolute patterns will\n still be matched in the 'cwd'.\n\n To start absolute and non-absolute patterns in the same\n path, you can use '--root=' to set it to the empty\n string. However, be aware that on Windows systems, a\n pattern like 'x:/*' or '//host/share/*' will *always*\n start in the 'x:/' or '//host/share/' directory,\n regardless of the --root setting.\n `,\n },\n platform: {\n description: `Defaults to the value of 'process.platform' if\n available, or 'linux' if not. Setting --platform=win32\n on non-Windows systems may cause strange behavior!`,\n validOptions: [\n 'aix',\n 'android',\n 'darwin',\n 'freebsd',\n 'haiku',\n 'linux',\n 'openbsd',\n 'sunos',\n 'win32',\n 'cygwin',\n 'netbsd',\n ],\n },\n })\n .optList({\n ignore: {\n short: 'i',\n description: `Glob patterns to ignore`,\n },\n })\n .flag({\n debug: {\n short: 'v',\n description: `Output a huge amount of noisy debug information about\n patterns as they are parsed and used to match files.`,\n },\n version: {\n short: 'V',\n description: `Output the version (${version})`,\n },\n help: {\n short: 'h',\n description: 'Show this usage information',\n },\n })\n\ntry {\n const { positionals, values } = j.parse()\n if (values.version) {\n console.log(version)\n process.exit(0)\n }\n if (values.help) {\n console.log(j.usage())\n process.exit(0)\n }\n if (positionals.length === 0 && !values.default)\n throw 'No patterns provided'\n if (positionals.length === 0 && values.default)\n positionals.push(values.default)\n const patterns =\n values.all ? positionals : positionals.filter(p => !existsSync(p))\n const matches =\n values.all ?\n []\n : positionals.filter(p => existsSync(p)).map(p => join(p))\n const stream = globStream(patterns, {\n absolute: values.absolute,\n cwd: values.cwd,\n dot: values.dot,\n dotRelative: values['dot-relative'],\n follow: values.follow,\n ignore: values.ignore,\n mark: values.mark,\n matchBase: values['match-base'],\n maxDepth: values['max-depth'],\n nobrace: values.nobrace,\n nocase: values.nocase,\n nodir: values.nodir,\n noext: values.noext,\n noglobstar: values.noglobstar,\n platform: values.platform as undefined | NodeJS.Platform,\n realpath: values.realpath,\n root: values.root,\n stat: values.stat,\n debug: values.debug,\n posix: values.posix,\n })\n\n const cmd = values.cmd\n if (!cmd) {\n matches.forEach(m => console.log(m))\n stream.on('data', f => console.log(f))\n } else {\n stream.on('data', f => matches.push(f))\n stream.on('end', () => foregroundChild(cmd, matches, { shell: true }))\n }\n} catch (e) {\n console.error(j.usage())\n console.error(e instanceof Error ? e.message : String(e))\n process.exit(1)\n}\n"]} \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/glob.d.ts b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/glob.d.ts new file mode 100644 index 0000000000000000000000000000000000000000..314ad1f5ccd3ccdadff2f3cc5bba5136db5a0d03 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/glob.d.ts @@ -0,0 +1,388 @@ +import { Minimatch } from 'minimatch'; +import { Minipass } from 'minipass'; +import { FSOption, Path, PathScurry } from 'path-scurry'; +import { IgnoreLike } from './ignore.js'; +import { Pattern } from './pattern.js'; +export type MatchSet = Minimatch['set']; +export type GlobParts = Exclude; +/** + * A `GlobOptions` object may be provided to any of the exported methods, and + * must be provided to the `Glob` constructor. + * + * All options are optional, boolean, and false by default, unless otherwise + * noted. + * + * All resolved options are added to the Glob object as properties. + * + * If you are running many `glob` operations, you can pass a Glob object as the + * `options` argument to a subsequent operation to share the previously loaded + * cache. + */ +export interface GlobOptions { + /** + * Set to `true` to always receive absolute paths for + * matched files. Set to `false` to always return relative paths. + * + * When this option is not set, absolute paths are returned for patterns + * that are absolute, and otherwise paths are returned that are relative + * to the `cwd` setting. + * + * This does _not_ make an extra system call to get + * the realpath, it only does string path resolution. + * + * Conflicts with {@link withFileTypes} + */ + absolute?: boolean; + /** + * Set to false to enable {@link windowsPathsNoEscape} + * + * @deprecated + */ + allowWindowsEscape?: boolean; + /** + * The current working directory in which to search. Defaults to + * `process.cwd()`. + * + * May be eiher a string path or a `file://` URL object or string. + */ + cwd?: string | URL; + /** + * Include `.dot` files in normal matches and `globstar` + * matches. Note that an explicit dot in a portion of the pattern + * will always match dot files. + */ + dot?: boolean; + /** + * Prepend all relative path strings with `./` (or `.\` on Windows). + * + * Without this option, returned relative paths are "bare", so instead of + * returning `'./foo/bar'`, they are returned as `'foo/bar'`. + * + * Relative patterns starting with `'../'` are not prepended with `./`, even + * if this option is set. + */ + dotRelative?: boolean; + /** + * Follow symlinked directories when expanding `**` + * patterns. This can result in a lot of duplicate references in + * the presence of cyclic links, and make performance quite bad. + * + * By default, a `**` in a pattern will follow 1 symbolic link if + * it is not the first item in the pattern, or none if it is the + * first item in the pattern, following the same behavior as Bash. + */ + follow?: boolean; + /** + * string or string[], or an object with `ignored` and `childrenIgnored` + * methods. + * + * If a string or string[] is provided, then this is treated as a glob + * pattern or array of glob patterns to exclude from matches. To ignore all + * children within a directory, as well as the entry itself, append `'/**'` + * to the ignore pattern. + * + * **Note** `ignore` patterns are _always_ in `dot:true` mode, regardless of + * any other settings. + * + * If an object is provided that has `ignored(path)` and/or + * `childrenIgnored(path)` methods, then these methods will be called to + * determine whether any Path is a match or if its children should be + * traversed, respectively. + */ + ignore?: string | string[] | IgnoreLike; + /** + * Treat brace expansion like `{a,b}` as a "magic" pattern. Has no + * effect if {@link nobrace} is set. + * + * Only has effect on the {@link hasMagic} function. + */ + magicalBraces?: boolean; + /** + * Add a `/` character to directory matches. Note that this requires + * additional stat calls in some cases. + */ + mark?: boolean; + /** + * Perform a basename-only match if the pattern does not contain any slash + * characters. That is, `*.js` would be treated as equivalent to + * `**\/*.js`, matching all js files in all directories. + */ + matchBase?: boolean; + /** + * Limit the directory traversal to a given depth below the cwd. + * Note that this does NOT prevent traversal to sibling folders, + * root patterns, and so on. It only limits the maximum folder depth + * that the walk will descend, relative to the cwd. + */ + maxDepth?: number; + /** + * Do not expand `{a,b}` and `{1..3}` brace sets. + */ + nobrace?: boolean; + /** + * Perform a case-insensitive match. This defaults to `true` on macOS and + * Windows systems, and `false` on all others. + * + * **Note** `nocase` should only be explicitly set when it is + * known that the filesystem's case sensitivity differs from the + * platform default. If set `true` on case-sensitive file + * systems, or `false` on case-insensitive file systems, then the + * walk may return more or less results than expected. + */ + nocase?: boolean; + /** + * Do not match directories, only files. (Note: to match + * _only_ directories, put a `/` at the end of the pattern.) + */ + nodir?: boolean; + /** + * Do not match "extglob" patterns such as `+(a|b)`. + */ + noext?: boolean; + /** + * Do not match `**` against multiple filenames. (Ie, treat it as a normal + * `*` instead.) + * + * Conflicts with {@link matchBase} + */ + noglobstar?: boolean; + /** + * Defaults to value of `process.platform` if available, or `'linux'` if + * not. Setting `platform:'win32'` on non-Windows systems may cause strange + * behavior. + */ + platform?: NodeJS.Platform; + /** + * Set to true to call `fs.realpath` on all of the + * results. In the case of an entry that cannot be resolved, the + * entry is omitted. This incurs a slight performance penalty, of + * course, because of the added system calls. + */ + realpath?: boolean; + /** + * + * A string path resolved against the `cwd` option, which + * is used as the starting point for absolute patterns that start + * with `/`, (but not drive letters or UNC paths on Windows). + * + * Note that this _doesn't_ necessarily limit the walk to the + * `root` directory, and doesn't affect the cwd starting point for + * non-absolute patterns. A pattern containing `..` will still be + * able to traverse out of the root directory, if it is not an + * actual root directory on the filesystem, and any non-absolute + * patterns will be matched in the `cwd`. For example, the + * pattern `/../*` with `{root:'/some/path'}` will return all + * files in `/some`, not all files in `/some/path`. The pattern + * `*` with `{root:'/some/path'}` will return all the entries in + * the cwd, not the entries in `/some/path`. + * + * To start absolute and non-absolute patterns in the same + * path, you can use `{root:''}`. However, be aware that on + * Windows systems, a pattern like `x:/*` or `//host/share/*` will + * _always_ start in the `x:/` or `//host/share` directory, + * regardless of the `root` setting. + */ + root?: string; + /** + * A [PathScurry](http://npm.im/path-scurry) object used + * to traverse the file system. If the `nocase` option is set + * explicitly, then any provided `scurry` object must match this + * setting. + */ + scurry?: PathScurry; + /** + * Call `lstat()` on all entries, whether required or not to determine + * if it's a valid match. When used with {@link withFileTypes}, this means + * that matches will include data such as modified time, permissions, and + * so on. Note that this will incur a performance cost due to the added + * system calls. + */ + stat?: boolean; + /** + * An AbortSignal which will cancel the Glob walk when + * triggered. + */ + signal?: AbortSignal; + /** + * Use `\\` as a path separator _only_, and + * _never_ as an escape character. If set, all `\\` characters are + * replaced with `/` in the pattern. + * + * Note that this makes it **impossible** to match against paths + * containing literal glob pattern characters, but allows matching + * with patterns constructed using `path.join()` and + * `path.resolve()` on Windows platforms, mimicking the (buggy!) + * behavior of Glob v7 and before on Windows. Please use with + * caution, and be mindful of [the caveat below about Windows + * paths](#windows). (For legacy reasons, this is also set if + * `allowWindowsEscape` is set to the exact value `false`.) + */ + windowsPathsNoEscape?: boolean; + /** + * Return [PathScurry](http://npm.im/path-scurry) + * `Path` objects instead of strings. These are similar to a + * NodeJS `Dirent` object, but with additional methods and + * properties. + * + * Conflicts with {@link absolute} + */ + withFileTypes?: boolean; + /** + * An fs implementation to override some or all of the defaults. See + * http://npm.im/path-scurry for details about what can be overridden. + */ + fs?: FSOption; + /** + * Just passed along to Minimatch. Note that this makes all pattern + * matching operations slower and *extremely* noisy. + */ + debug?: boolean; + /** + * Return `/` delimited paths, even on Windows. + * + * On posix systems, this has no effect. But, on Windows, it means that + * paths will be `/` delimited, and absolute paths will be their full + * resolved UNC forms, eg instead of `'C:\\foo\\bar'`, it would return + * `'//?/C:/foo/bar'` + */ + posix?: boolean; + /** + * Do not match any children of any matches. For example, the pattern + * `**\/foo` would match `a/foo`, but not `a/foo/b/foo` in this mode. + * + * This is especially useful for cases like "find all `node_modules` + * folders, but not the ones in `node_modules`". + * + * In order to support this, the `Ignore` implementation must support an + * `add(pattern: string)` method. If using the default `Ignore` class, then + * this is fine, but if this is set to `false`, and a custom `Ignore` is + * provided that does not have an `add()` method, then it will throw an + * error. + * + * **Caveat** It *only* ignores matches that would be a descendant of a + * previous match, and only if that descendant is matched *after* the + * ancestor is encountered. Since the file system walk happens in + * indeterminate order, it's possible that a match will already be added + * before its ancestor, if multiple or braced patterns are used. + * + * For example: + * + * ```ts + * const results = await glob([ + * // likely to match first, since it's just a stat + * 'a/b/c/d/e/f', + * + * // this pattern is more complicated! It must to various readdir() + * // calls and test the results against a regular expression, and that + * // is certainly going to take a little bit longer. + * // + * // So, later on, it encounters a match at 'a/b/c/d/e', but it's too + * // late to ignore a/b/c/d/e/f, because it's already been emitted. + * 'a/[bdf]/?/[a-z]/*', + * ], { includeChildMatches: false }) + * ``` + * + * It's best to only set this to `false` if you can be reasonably sure that + * no components of the pattern will potentially match one another's file + * system descendants, or if the occasional included child entry will not + * cause problems. + * + * @default true + */ + includeChildMatches?: boolean; +} +export type GlobOptionsWithFileTypesTrue = GlobOptions & { + withFileTypes: true; + absolute?: undefined; + mark?: undefined; + posix?: undefined; +}; +export type GlobOptionsWithFileTypesFalse = GlobOptions & { + withFileTypes?: false; +}; +export type GlobOptionsWithFileTypesUnset = GlobOptions & { + withFileTypes?: undefined; +}; +export type Result = Opts extends GlobOptionsWithFileTypesTrue ? Path : Opts extends GlobOptionsWithFileTypesFalse ? string : Opts extends GlobOptionsWithFileTypesUnset ? string : string | Path; +export type Results = Result[]; +export type FileTypes = Opts extends GlobOptionsWithFileTypesTrue ? true : Opts extends GlobOptionsWithFileTypesFalse ? false : Opts extends GlobOptionsWithFileTypesUnset ? false : boolean; +/** + * An object that can perform glob pattern traversals. + */ +export declare class Glob implements GlobOptions { + absolute?: boolean; + cwd: string; + root?: string; + dot: boolean; + dotRelative: boolean; + follow: boolean; + ignore?: string | string[] | IgnoreLike; + magicalBraces: boolean; + mark?: boolean; + matchBase: boolean; + maxDepth: number; + nobrace: boolean; + nocase: boolean; + nodir: boolean; + noext: boolean; + noglobstar: boolean; + pattern: string[]; + platform: NodeJS.Platform; + realpath: boolean; + scurry: PathScurry; + stat: boolean; + signal?: AbortSignal; + windowsPathsNoEscape: boolean; + withFileTypes: FileTypes; + includeChildMatches: boolean; + /** + * The options provided to the constructor. + */ + opts: Opts; + /** + * An array of parsed immutable {@link Pattern} objects. + */ + patterns: Pattern[]; + /** + * All options are stored as properties on the `Glob` object. + * + * See {@link GlobOptions} for full options descriptions. + * + * Note that a previous `Glob` object can be passed as the + * `GlobOptions` to another `Glob` instantiation to re-use settings + * and caches with a new pattern. + * + * Traversal functions can be called multiple times to run the walk + * again. + */ + constructor(pattern: string | string[], opts: Opts); + /** + * Returns a Promise that resolves to the results array. + */ + walk(): Promise>; + /** + * synchronous {@link Glob.walk} + */ + walkSync(): Results; + /** + * Stream results asynchronously. + */ + stream(): Minipass, Result>; + /** + * Stream results synchronously. + */ + streamSync(): Minipass, Result>; + /** + * Default sync iteration function. Returns a Generator that + * iterates over the results. + */ + iterateSync(): Generator, void, void>; + [Symbol.iterator](): Generator, void, void>; + /** + * Default async iteration function. Returns an AsyncGenerator that + * iterates over the results. + */ + iterate(): AsyncGenerator, void, void>; + [Symbol.asyncIterator](): AsyncGenerator, void, void>; +} +//# sourceMappingURL=glob.d.ts.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/glob.d.ts.map b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/glob.d.ts.map new file mode 100644 index 0000000000000000000000000000000000000000..c32dc74c96774177b949cc137befa0edb6489e3f --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/glob.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"glob.d.ts","sourceRoot":"","sources":["../../src/glob.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAoB,MAAM,WAAW,CAAA;AACvD,OAAO,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAA;AAEnC,OAAO,EACL,QAAQ,EACR,IAAI,EACJ,UAAU,EAIX,MAAM,aAAa,CAAA;AACpB,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AACxC,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAA;AAGtC,MAAM,MAAM,QAAQ,GAAG,SAAS,CAAC,KAAK,CAAC,CAAA;AACvC,MAAM,MAAM,SAAS,GAAG,OAAO,CAAC,SAAS,CAAC,WAAW,CAAC,EAAE,SAAS,CAAC,CAAA;AAalE;;;;;;;;;;;;GAYG;AACH,MAAM,WAAW,WAAW;IAC1B;;;;;;;;;;;;OAYG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAA;IAElB;;;;OAIG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAA;IAE5B;;;;;OAKG;IACH,GAAG,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;IAElB;;;;OAIG;IACH,GAAG,CAAC,EAAE,OAAO,CAAA;IAEb;;;;;;;;OAQG;IACH,WAAW,CAAC,EAAE,OAAO,CAAA;IAErB;;;;;;;;OAQG;IACH,MAAM,CAAC,EAAE,OAAO,CAAA;IAEhB;;;;;;;;;;;;;;;;OAgBG;IACH,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,UAAU,CAAA;IAEvC;;;;;OAKG;IACH,aAAa,CAAC,EAAE,OAAO,CAAA;IAEvB;;;OAGG;IACH,IAAI,CAAC,EAAE,OAAO,CAAA;IAEd;;;;OAIG;IACH,SAAS,CAAC,EAAE,OAAO,CAAA;IAEnB;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAA;IAEjB;;OAEG;IACH,OAAO,CAAC,EAAE,OAAO,CAAA;IAEjB;;;;;;;;;OASG;IACH,MAAM,CAAC,EAAE,OAAO,CAAA;IAEhB;;;OAGG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;OAEG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;;;;OAKG;IACH,UAAU,CAAC,EAAE,OAAO,CAAA;IAEpB;;;;OAIG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC,QAAQ,CAAA;IAE1B;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAA;IAElB;;;;;;;;;;;;;;;;;;;;;;OAsBG;IACH,IAAI,CAAC,EAAE,MAAM,CAAA;IAEb;;;;;OAKG;IACH,MAAM,CAAC,EAAE,UAAU,CAAA;IAEnB;;;;;;OAMG;IACH,IAAI,CAAC,EAAE,OAAO,CAAA;IAEd;;;OAGG;IACH,MAAM,CAAC,EAAE,WAAW,CAAA;IAEpB;;;;;;;;;;;;;OAaG;IACH,oBAAoB,CAAC,EAAE,OAAO,CAAA;IAE9B;;;;;;;OAOG;IACH,aAAa,CAAC,EAAE,OAAO,CAAA;IAEvB;;;OAGG;IACH,EAAE,CAAC,EAAE,QAAQ,CAAA;IAEb;;;OAGG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;;;;;;OAOG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OA0CG;IACH,mBAAmB,CAAC,EAAE,OAAO,CAAA;CAC9B;AAED,MAAM,MAAM,4BAA4B,GAAG,WAAW,GAAG;IACvD,aAAa,EAAE,IAAI,CAAA;IAEnB,QAAQ,CAAC,EAAE,SAAS,CAAA;IACpB,IAAI,CAAC,EAAE,SAAS,CAAA;IAChB,KAAK,CAAC,EAAE,SAAS,CAAA;CAClB,CAAA;AAED,MAAM,MAAM,6BAA6B,GAAG,WAAW,GAAG;IACxD,aAAa,CAAC,EAAE,KAAK,CAAA;CACtB,CAAA;AAED,MAAM,MAAM,6BAA6B,GAAG,WAAW,GAAG;IACxD,aAAa,CAAC,EAAE,SAAS,CAAA;CAC1B,CAAA;AAED,MAAM,MAAM,MAAM,CAAC,IAAI,IACrB,IAAI,SAAS,4BAA4B,GAAG,IAAI,GAC9C,IAAI,SAAS,6BAA6B,GAAG,MAAM,GACnD,IAAI,SAAS,6BAA6B,GAAG,MAAM,GACnD,MAAM,GAAG,IAAI,CAAA;AACjB,MAAM,MAAM,OAAO,CAAC,IAAI,IAAI,MAAM,CAAC,IAAI,CAAC,EAAE,CAAA;AAE1C,MAAM,MAAM,SAAS,CAAC,IAAI,IACxB,IAAI,SAAS,4BAA4B,GAAG,IAAI,GAC9C,IAAI,SAAS,6BAA6B,GAAG,KAAK,GAClD,IAAI,SAAS,6BAA6B,GAAG,KAAK,GAClD,OAAO,CAAA;AAEX;;GAEG;AACH,qBAAa,IAAI,CAAC,IAAI,SAAS,WAAW,CAAE,YAAW,WAAW;IAChE,QAAQ,CAAC,EAAE,OAAO,CAAA;IAClB,GAAG,EAAE,MAAM,CAAA;IACX,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,GAAG,EAAE,OAAO,CAAA;IACZ,WAAW,EAAE,OAAO,CAAA;IACpB,MAAM,EAAE,OAAO,CAAA;IACf,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,UAAU,CAAA;IACvC,aAAa,EAAE,OAAO,CAAA;IACtB,IAAI,CAAC,EAAE,OAAO,CAAA;IACd,SAAS,EAAE,OAAO,CAAA;IAClB,QAAQ,EAAE,MAAM,CAAA;IAChB,OAAO,EAAE,OAAO,CAAA;IAChB,MAAM,EAAE,OAAO,CAAA;IACf,KAAK,EAAE,OAAO,CAAA;IACd,KAAK,EAAE,OAAO,CAAA;IACd,UAAU,EAAE,OAAO,CAAA;IACnB,OAAO,EAAE,MAAM,EAAE,CAAA;IACjB,QAAQ,EAAE,MAAM,CAAC,QAAQ,CAAA;IACzB,QAAQ,EAAE,OAAO,CAAA;IACjB,MAAM,EAAE,UAAU,CAAA;IAClB,IAAI,EAAE,OAAO,CAAA;IACb,MAAM,CAAC,EAAE,WAAW,CAAA;IACpB,oBAAoB,EAAE,OAAO,CAAA;IAC7B,aAAa,EAAE,SAAS,CAAC,IAAI,CAAC,CAAA;IAC9B,mBAAmB,EAAE,OAAO,CAAA;IAE5B;;OAEG;IACH,IAAI,EAAE,IAAI,CAAA;IAEV;;OAEG;IACH,QAAQ,EAAE,OAAO,EAAE,CAAA;IAEnB;;;;;;;;;;;OAWG;gBACS,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI;IA2HlD;;OAEG;IACG,IAAI,IAAI,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;IAoBpC;;OAEG;IACH,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;IAgBzB;;OAEG;IACH,MAAM,IAAI,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC;IAc9C;;OAEG;IACH,UAAU,IAAI,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC;IAclD;;;OAGG;IACH,WAAW,IAAI,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC;IAGlD,CAAC,MAAM,CAAC,QAAQ,CAAC;IAIjB;;;OAGG;IACH,OAAO,IAAI,cAAc,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC;IAGnD,CAAC,MAAM,CAAC,aAAa,CAAC;CAGvB"} \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/glob.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/glob.js new file mode 100644 index 0000000000000000000000000000000000000000..c9ff3b0036d9455bf4106c247e40cb207b84fba4 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/glob.js @@ -0,0 +1,243 @@ +import { Minimatch } from 'minimatch'; +import { fileURLToPath } from 'node:url'; +import { PathScurry, PathScurryDarwin, PathScurryPosix, PathScurryWin32, } from 'path-scurry'; +import { Pattern } from './pattern.js'; +import { GlobStream, GlobWalker } from './walker.js'; +// if no process global, just call it linux. +// so we default to case-sensitive, / separators +const defaultPlatform = (typeof process === 'object' && + process && + typeof process.platform === 'string') ? + process.platform + : 'linux'; +/** + * An object that can perform glob pattern traversals. + */ +export class Glob { + absolute; + cwd; + root; + dot; + dotRelative; + follow; + ignore; + magicalBraces; + mark; + matchBase; + maxDepth; + nobrace; + nocase; + nodir; + noext; + noglobstar; + pattern; + platform; + realpath; + scurry; + stat; + signal; + windowsPathsNoEscape; + withFileTypes; + includeChildMatches; + /** + * The options provided to the constructor. + */ + opts; + /** + * An array of parsed immutable {@link Pattern} objects. + */ + patterns; + /** + * All options are stored as properties on the `Glob` object. + * + * See {@link GlobOptions} for full options descriptions. + * + * Note that a previous `Glob` object can be passed as the + * `GlobOptions` to another `Glob` instantiation to re-use settings + * and caches with a new pattern. + * + * Traversal functions can be called multiple times to run the walk + * again. + */ + constructor(pattern, opts) { + /* c8 ignore start */ + if (!opts) + throw new TypeError('glob options required'); + /* c8 ignore stop */ + this.withFileTypes = !!opts.withFileTypes; + this.signal = opts.signal; + this.follow = !!opts.follow; + this.dot = !!opts.dot; + this.dotRelative = !!opts.dotRelative; + this.nodir = !!opts.nodir; + this.mark = !!opts.mark; + if (!opts.cwd) { + this.cwd = ''; + } + else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) { + opts.cwd = fileURLToPath(opts.cwd); + } + this.cwd = opts.cwd || ''; + this.root = opts.root; + this.magicalBraces = !!opts.magicalBraces; + this.nobrace = !!opts.nobrace; + this.noext = !!opts.noext; + this.realpath = !!opts.realpath; + this.absolute = opts.absolute; + this.includeChildMatches = opts.includeChildMatches !== false; + this.noglobstar = !!opts.noglobstar; + this.matchBase = !!opts.matchBase; + this.maxDepth = + typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity; + this.stat = !!opts.stat; + this.ignore = opts.ignore; + if (this.withFileTypes && this.absolute !== undefined) { + throw new Error('cannot set absolute and withFileTypes:true'); + } + if (typeof pattern === 'string') { + pattern = [pattern]; + } + this.windowsPathsNoEscape = + !!opts.windowsPathsNoEscape || + opts.allowWindowsEscape === + false; + if (this.windowsPathsNoEscape) { + pattern = pattern.map(p => p.replace(/\\/g, '/')); + } + if (this.matchBase) { + if (opts.noglobstar) { + throw new TypeError('base matching requires globstar'); + } + pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`)); + } + this.pattern = pattern; + this.platform = opts.platform || defaultPlatform; + this.opts = { ...opts, platform: this.platform }; + if (opts.scurry) { + this.scurry = opts.scurry; + if (opts.nocase !== undefined && + opts.nocase !== opts.scurry.nocase) { + throw new Error('nocase option contradicts provided scurry option'); + } + } + else { + const Scurry = opts.platform === 'win32' ? PathScurryWin32 + : opts.platform === 'darwin' ? PathScurryDarwin + : opts.platform ? PathScurryPosix + : PathScurry; + this.scurry = new Scurry(this.cwd, { + nocase: opts.nocase, + fs: opts.fs, + }); + } + this.nocase = this.scurry.nocase; + // If you do nocase:true on a case-sensitive file system, then + // we need to use regexps instead of strings for non-magic + // path portions, because statting `aBc` won't return results + // for the file `AbC` for example. + const nocaseMagicOnly = this.platform === 'darwin' || this.platform === 'win32'; + const mmo = { + // default nocase based on platform + ...opts, + dot: this.dot, + matchBase: this.matchBase, + nobrace: this.nobrace, + nocase: this.nocase, + nocaseMagicOnly, + nocomment: true, + noext: this.noext, + nonegate: true, + optimizationLevel: 2, + platform: this.platform, + windowsPathsNoEscape: this.windowsPathsNoEscape, + debug: !!this.opts.debug, + }; + const mms = this.pattern.map(p => new Minimatch(p, mmo)); + const [matchSet, globParts] = mms.reduce((set, m) => { + set[0].push(...m.set); + set[1].push(...m.globParts); + return set; + }, [[], []]); + this.patterns = matchSet.map((set, i) => { + const g = globParts[i]; + /* c8 ignore start */ + if (!g) + throw new Error('invalid pattern object'); + /* c8 ignore stop */ + return new Pattern(set, g, 0, this.platform); + }); + } + async walk() { + // Walkers always return array of Path objects, so we just have to + // coerce them into the right shape. It will have already called + // realpath() if the option was set to do so, so we know that's cached. + // start out knowing the cwd, at least + return [ + ...(await new GlobWalker(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity ? + this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + includeChildMatches: this.includeChildMatches, + }).walk()), + ]; + } + walkSync() { + return [ + ...new GlobWalker(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity ? + this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + includeChildMatches: this.includeChildMatches, + }).walkSync(), + ]; + } + stream() { + return new GlobStream(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity ? + this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + includeChildMatches: this.includeChildMatches, + }).stream(); + } + streamSync() { + return new GlobStream(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity ? + this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + includeChildMatches: this.includeChildMatches, + }).streamSync(); + } + /** + * Default sync iteration function. Returns a Generator that + * iterates over the results. + */ + iterateSync() { + return this.streamSync()[Symbol.iterator](); + } + [Symbol.iterator]() { + return this.iterateSync(); + } + /** + * Default async iteration function. Returns an AsyncGenerator that + * iterates over the results. + */ + iterate() { + return this.stream()[Symbol.asyncIterator](); + } + [Symbol.asyncIterator]() { + return this.iterate(); + } +} +//# sourceMappingURL=glob.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/glob.js.map b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/glob.js.map new file mode 100644 index 0000000000000000000000000000000000000000..a431736271e441302a2a41da0c5599f08f99750e --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/glob.js.map @@ -0,0 +1 @@ +{"version":3,"file":"glob.js","sourceRoot":"","sources":["../../src/glob.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAoB,MAAM,WAAW,CAAA;AAEvD,OAAO,EAAE,aAAa,EAAE,MAAM,UAAU,CAAA;AACxC,OAAO,EAGL,UAAU,EACV,gBAAgB,EAChB,eAAe,EACf,eAAe,GAChB,MAAM,aAAa,CAAA;AAEpB,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAA;AACtC,OAAO,EAAE,UAAU,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AAKpD,4CAA4C;AAC5C,gDAAgD;AAChD,MAAM,eAAe,GACnB,CACE,OAAO,OAAO,KAAK,QAAQ;IAC3B,OAAO;IACP,OAAO,OAAO,CAAC,QAAQ,KAAK,QAAQ,CACrC,CAAC,CAAC;IACD,OAAO,CAAC,QAAQ;IAClB,CAAC,CAAC,OAAO,CAAA;AAyVX;;GAEG;AACH,MAAM,OAAO,IAAI;IACf,QAAQ,CAAU;IAClB,GAAG,CAAQ;IACX,IAAI,CAAS;IACb,GAAG,CAAS;IACZ,WAAW,CAAS;IACpB,MAAM,CAAS;IACf,MAAM,CAAiC;IACvC,aAAa,CAAS;IACtB,IAAI,CAAU;IACd,SAAS,CAAS;IAClB,QAAQ,CAAQ;IAChB,OAAO,CAAS;IAChB,MAAM,CAAS;IACf,KAAK,CAAS;IACd,KAAK,CAAS;IACd,UAAU,CAAS;IACnB,OAAO,CAAU;IACjB,QAAQ,CAAiB;IACzB,QAAQ,CAAS;IACjB,MAAM,CAAY;IAClB,IAAI,CAAS;IACb,MAAM,CAAc;IACpB,oBAAoB,CAAS;IAC7B,aAAa,CAAiB;IAC9B,mBAAmB,CAAS;IAE5B;;OAEG;IACH,IAAI,CAAM;IAEV;;OAEG;IACH,QAAQ,CAAW;IAEnB;;;;;;;;;;;OAWG;IACH,YAAY,OAA0B,EAAE,IAAU;QAChD,qBAAqB;QACrB,IAAI,CAAC,IAAI;YAAE,MAAM,IAAI,SAAS,CAAC,uBAAuB,CAAC,CAAA;QACvD,oBAAoB;QACpB,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC,IAAI,CAAC,aAAgC,CAAA;QAC5D,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;QACzB,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,IAAI,CAAC,MAAM,CAAA;QAC3B,IAAI,CAAC,GAAG,GAAG,CAAC,CAAC,IAAI,CAAC,GAAG,CAAA;QACrB,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC,IAAI,CAAC,WAAW,CAAA;QACrC,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,IAAI,CAAC,KAAK,CAAA;QACzB,IAAI,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,CAAA;QACvB,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC;YACd,IAAI,CAAC,GAAG,GAAG,EAAE,CAAA;QACf,CAAC;aAAM,IAAI,IAAI,CAAC,GAAG,YAAY,GAAG,IAAI,IAAI,CAAC,GAAG,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE,CAAC;YACrE,IAAI,CAAC,GAAG,GAAG,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;QACpC,CAAC;QACD,IAAI,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,IAAI,EAAE,CAAA;QACzB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,CAAA;QACrB,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC,IAAI,CAAC,aAAa,CAAA;QACzC,IAAI,CAAC,OAAO,GAAG,CAAC,CAAC,IAAI,CAAC,OAAO,CAAA;QAC7B,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,IAAI,CAAC,KAAK,CAAA;QACzB,IAAI,CAAC,QAAQ,GAAG,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAA;QAC/B,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAA;QAC7B,IAAI,CAAC,mBAAmB,GAAG,IAAI,CAAC,mBAAmB,KAAK,KAAK,CAAA;QAE7D,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC,IAAI,CAAC,UAAU,CAAA;QACnC,IAAI,CAAC,SAAS,GAAG,CAAC,CAAC,IAAI,CAAC,SAAS,CAAA;QACjC,IAAI,CAAC,QAAQ;YACX,OAAO,IAAI,CAAC,QAAQ,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAA;QAC9D,IAAI,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,CAAA;QACvB,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;QAEzB,IAAI,IAAI,CAAC,aAAa,IAAI,IAAI,CAAC,QAAQ,KAAK,SAAS,EAAE,CAAC;YACtD,MAAM,IAAI,KAAK,CAAC,4CAA4C,CAAC,CAAA;QAC/D,CAAC;QAED,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE,CAAC;YAChC,OAAO,GAAG,CAAC,OAAO,CAAC,CAAA;QACrB,CAAC;QAED,IAAI,CAAC,oBAAoB;YACvB,CAAC,CAAC,IAAI,CAAC,oBAAoB;gBAC1B,IAAyC,CAAC,kBAAkB;oBAC3D,KAAK,CAAA;QAET,IAAI,IAAI,CAAC,oBAAoB,EAAE,CAAC;YAC9B,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAA;QACnD,CAAC;QAED,IAAI,IAAI,CAAC,SAAS,EAAE,CAAC;YACnB,IAAI,IAAI,CAAC,UAAU,EAAE,CAAC;gBACpB,MAAM,IAAI,SAAS,CAAC,iCAAiC,CAAC,CAAA;YACxD,CAAC;YACD,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAA;QACjE,CAAC;QAED,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;QAEtB,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,IAAI,eAAe,CAAA;QAChD,IAAI,CAAC,IAAI,GAAG,EAAE,GAAG,IAAI,EAAE,QAAQ,EAAE,IAAI,CAAC,QAAQ,EAAE,CAAA;QAChD,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;YAChB,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;YACzB,IACE,IAAI,CAAC,MAAM,KAAK,SAAS;gBACzB,IAAI,CAAC,MAAM,KAAK,IAAI,CAAC,MAAM,CAAC,MAAM,EAClC,CAAC;gBACD,MAAM,IAAI,KAAK,CAAC,kDAAkD,CAAC,CAAA;YACrE,CAAC;QACH,CAAC;aAAM,CAAC;YACN,MAAM,MAAM,GACV,IAAI,CAAC,QAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,eAAe;gBAC3C,CAAC,CAAC,IAAI,CAAC,QAAQ,KAAK,QAAQ,CAAC,CAAC,CAAC,gBAAgB;oBAC/C,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,eAAe;wBACjC,CAAC,CAAC,UAAU,CAAA;YACd,IAAI,CAAC,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE;gBACjC,MAAM,EAAE,IAAI,CAAC,MAAM;gBACnB,EAAE,EAAE,IAAI,CAAC,EAAE;aACZ,CAAC,CAAA;QACJ,CAAC;QACD,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,CAAA;QAEhC,8DAA8D;QAC9D,0DAA0D;QAC1D,6DAA6D;QAC7D,kCAAkC;QAClC,MAAM,eAAe,GACnB,IAAI,CAAC,QAAQ,KAAK,QAAQ,IAAI,IAAI,CAAC,QAAQ,KAAK,OAAO,CAAA;QAEzD,MAAM,GAAG,GAAqB;YAC5B,mCAAmC;YACnC,GAAG,IAAI;YACP,GAAG,EAAE,IAAI,CAAC,GAAG;YACb,SAAS,EAAE,IAAI,CAAC,SAAS;YACzB,OAAO,EAAE,IAAI,CAAC,OAAO;YACrB,MAAM,EAAE,IAAI,CAAC,MAAM;YACnB,eAAe;YACf,SAAS,EAAE,IAAI;YACf,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,QAAQ,EAAE,IAAI;YACd,iBAAiB,EAAE,CAAC;YACpB,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,oBAAoB,EAAE,IAAI,CAAC,oBAAoB;YAC/C,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK;SACzB,CAAA;QAED,MAAM,GAAG,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,SAAS,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAA;QACxD,MAAM,CAAC,QAAQ,EAAE,SAAS,CAAC,GAAG,GAAG,CAAC,MAAM,CACtC,CAAC,GAA0B,EAAE,CAAC,EAAE,EAAE;YAChC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAA;YACrB,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,SAAS,CAAC,CAAA;YAC3B,OAAO,GAAG,CAAA;QACZ,CAAC,EACD,CAAC,EAAE,EAAE,EAAE,CAAC,CACT,CAAA;QACD,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,CAAC,EAAE,EAAE;YACtC,MAAM,CAAC,GAAG,SAAS,CAAC,CAAC,CAAC,CAAA;YACtB,qBAAqB;YACrB,IAAI,CAAC,CAAC;gBAAE,MAAM,IAAI,KAAK,CAAC,wBAAwB,CAAC,CAAA;YACjD,oBAAoB;YACpB,OAAO,IAAI,OAAO,CAAC,GAAG,EAAE,CAAC,EAAE,CAAC,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAA;QAC9C,CAAC,CAAC,CAAA;IACJ,CAAC;IAMD,KAAK,CAAC,IAAI;QACR,kEAAkE;QAClE,iEAAiE;QACjE,uEAAuE;QACvE,sCAAsC;QACtC,OAAO;YACL,GAAG,CAAC,MAAM,IAAI,UAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;gBACvD,GAAG,IAAI,CAAC,IAAI;gBACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ,CAAC,CAAC;oBAC1B,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;oBACzC,CAAC,CAAC,QAAQ;gBACZ,QAAQ,EAAE,IAAI,CAAC,QAAQ;gBACvB,MAAM,EAAE,IAAI,CAAC,MAAM;gBACnB,mBAAmB,EAAE,IAAI,CAAC,mBAAmB;aAC9C,CAAC,CAAC,IAAI,EAAE,CAAC;SACX,CAAA;IACH,CAAC;IAMD,QAAQ;QACN,OAAO;YACL,GAAG,IAAI,UAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;gBAChD,GAAG,IAAI,CAAC,IAAI;gBACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ,CAAC,CAAC;oBAC1B,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;oBACzC,CAAC,CAAC,QAAQ;gBACZ,QAAQ,EAAE,IAAI,CAAC,QAAQ;gBACvB,MAAM,EAAE,IAAI,CAAC,MAAM;gBACnB,mBAAmB,EAAE,IAAI,CAAC,mBAAmB;aAC9C,CAAC,CAAC,QAAQ,EAAE;SACd,CAAA;IACH,CAAC;IAMD,MAAM;QACJ,OAAO,IAAI,UAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;YACpD,GAAG,IAAI,CAAC,IAAI;YACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ,CAAC,CAAC;gBAC1B,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;gBACzC,CAAC,CAAC,QAAQ;YACZ,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,MAAM,EAAE,IAAI,CAAC,MAAM;YACnB,mBAAmB,EAAE,IAAI,CAAC,mBAAmB;SAC9C,CAAC,CAAC,MAAM,EAAE,CAAA;IACb,CAAC;IAMD,UAAU;QACR,OAAO,IAAI,UAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE;YACpD,GAAG,IAAI,CAAC,IAAI;YACZ,QAAQ,EACN,IAAI,CAAC,QAAQ,KAAK,QAAQ,CAAC,CAAC;gBAC1B,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE;gBACzC,CAAC,CAAC,QAAQ;YACZ,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,MAAM,EAAE,IAAI,CAAC,MAAM;YACnB,mBAAmB,EAAE,IAAI,CAAC,mBAAmB;SAC9C,CAAC,CAAC,UAAU,EAAE,CAAA;IACjB,CAAC;IAED;;;OAGG;IACH,WAAW;QACT,OAAO,IAAI,CAAC,UAAU,EAAE,CAAC,MAAM,CAAC,QAAQ,CAAC,EAAE,CAAA;IAC7C,CAAC;IACD,CAAC,MAAM,CAAC,QAAQ,CAAC;QACf,OAAO,IAAI,CAAC,WAAW,EAAE,CAAA;IAC3B,CAAC;IAED;;;OAGG;IACH,OAAO;QACL,OAAO,IAAI,CAAC,MAAM,EAAE,CAAC,MAAM,CAAC,aAAa,CAAC,EAAE,CAAA;IAC9C,CAAC;IACD,CAAC,MAAM,CAAC,aAAa,CAAC;QACpB,OAAO,IAAI,CAAC,OAAO,EAAE,CAAA;IACvB,CAAC;CACF","sourcesContent":["import { Minimatch, MinimatchOptions } from 'minimatch'\nimport { Minipass } from 'minipass'\nimport { fileURLToPath } from 'node:url'\nimport {\n FSOption,\n Path,\n PathScurry,\n PathScurryDarwin,\n PathScurryPosix,\n PathScurryWin32,\n} from 'path-scurry'\nimport { IgnoreLike } from './ignore.js'\nimport { Pattern } from './pattern.js'\nimport { GlobStream, GlobWalker } from './walker.js'\n\nexport type MatchSet = Minimatch['set']\nexport type GlobParts = Exclude\n\n// if no process global, just call it linux.\n// so we default to case-sensitive, / separators\nconst defaultPlatform: NodeJS.Platform =\n (\n typeof process === 'object' &&\n process &&\n typeof process.platform === 'string'\n ) ?\n process.platform\n : 'linux'\n\n/**\n * A `GlobOptions` object may be provided to any of the exported methods, and\n * must be provided to the `Glob` constructor.\n *\n * All options are optional, boolean, and false by default, unless otherwise\n * noted.\n *\n * All resolved options are added to the Glob object as properties.\n *\n * If you are running many `glob` operations, you can pass a Glob object as the\n * `options` argument to a subsequent operation to share the previously loaded\n * cache.\n */\nexport interface GlobOptions {\n /**\n * Set to `true` to always receive absolute paths for\n * matched files. Set to `false` to always return relative paths.\n *\n * When this option is not set, absolute paths are returned for patterns\n * that are absolute, and otherwise paths are returned that are relative\n * to the `cwd` setting.\n *\n * This does _not_ make an extra system call to get\n * the realpath, it only does string path resolution.\n *\n * Conflicts with {@link withFileTypes}\n */\n absolute?: boolean\n\n /**\n * Set to false to enable {@link windowsPathsNoEscape}\n *\n * @deprecated\n */\n allowWindowsEscape?: boolean\n\n /**\n * The current working directory in which to search. Defaults to\n * `process.cwd()`.\n *\n * May be eiher a string path or a `file://` URL object or string.\n */\n cwd?: string | URL\n\n /**\n * Include `.dot` files in normal matches and `globstar`\n * matches. Note that an explicit dot in a portion of the pattern\n * will always match dot files.\n */\n dot?: boolean\n\n /**\n * Prepend all relative path strings with `./` (or `.\\` on Windows).\n *\n * Without this option, returned relative paths are \"bare\", so instead of\n * returning `'./foo/bar'`, they are returned as `'foo/bar'`.\n *\n * Relative patterns starting with `'../'` are not prepended with `./`, even\n * if this option is set.\n */\n dotRelative?: boolean\n\n /**\n * Follow symlinked directories when expanding `**`\n * patterns. This can result in a lot of duplicate references in\n * the presence of cyclic links, and make performance quite bad.\n *\n * By default, a `**` in a pattern will follow 1 symbolic link if\n * it is not the first item in the pattern, or none if it is the\n * first item in the pattern, following the same behavior as Bash.\n */\n follow?: boolean\n\n /**\n * string or string[], or an object with `ignored` and `childrenIgnored`\n * methods.\n *\n * If a string or string[] is provided, then this is treated as a glob\n * pattern or array of glob patterns to exclude from matches. To ignore all\n * children within a directory, as well as the entry itself, append `'/**'`\n * to the ignore pattern.\n *\n * **Note** `ignore` patterns are _always_ in `dot:true` mode, regardless of\n * any other settings.\n *\n * If an object is provided that has `ignored(path)` and/or\n * `childrenIgnored(path)` methods, then these methods will be called to\n * determine whether any Path is a match or if its children should be\n * traversed, respectively.\n */\n ignore?: string | string[] | IgnoreLike\n\n /**\n * Treat brace expansion like `{a,b}` as a \"magic\" pattern. Has no\n * effect if {@link nobrace} is set.\n *\n * Only has effect on the {@link hasMagic} function.\n */\n magicalBraces?: boolean\n\n /**\n * Add a `/` character to directory matches. Note that this requires\n * additional stat calls in some cases.\n */\n mark?: boolean\n\n /**\n * Perform a basename-only match if the pattern does not contain any slash\n * characters. That is, `*.js` would be treated as equivalent to\n * `**\\/*.js`, matching all js files in all directories.\n */\n matchBase?: boolean\n\n /**\n * Limit the directory traversal to a given depth below the cwd.\n * Note that this does NOT prevent traversal to sibling folders,\n * root patterns, and so on. It only limits the maximum folder depth\n * that the walk will descend, relative to the cwd.\n */\n maxDepth?: number\n\n /**\n * Do not expand `{a,b}` and `{1..3}` brace sets.\n */\n nobrace?: boolean\n\n /**\n * Perform a case-insensitive match. This defaults to `true` on macOS and\n * Windows systems, and `false` on all others.\n *\n * **Note** `nocase` should only be explicitly set when it is\n * known that the filesystem's case sensitivity differs from the\n * platform default. If set `true` on case-sensitive file\n * systems, or `false` on case-insensitive file systems, then the\n * walk may return more or less results than expected.\n */\n nocase?: boolean\n\n /**\n * Do not match directories, only files. (Note: to match\n * _only_ directories, put a `/` at the end of the pattern.)\n */\n nodir?: boolean\n\n /**\n * Do not match \"extglob\" patterns such as `+(a|b)`.\n */\n noext?: boolean\n\n /**\n * Do not match `**` against multiple filenames. (Ie, treat it as a normal\n * `*` instead.)\n *\n * Conflicts with {@link matchBase}\n */\n noglobstar?: boolean\n\n /**\n * Defaults to value of `process.platform` if available, or `'linux'` if\n * not. Setting `platform:'win32'` on non-Windows systems may cause strange\n * behavior.\n */\n platform?: NodeJS.Platform\n\n /**\n * Set to true to call `fs.realpath` on all of the\n * results. In the case of an entry that cannot be resolved, the\n * entry is omitted. This incurs a slight performance penalty, of\n * course, because of the added system calls.\n */\n realpath?: boolean\n\n /**\n *\n * A string path resolved against the `cwd` option, which\n * is used as the starting point for absolute patterns that start\n * with `/`, (but not drive letters or UNC paths on Windows).\n *\n * Note that this _doesn't_ necessarily limit the walk to the\n * `root` directory, and doesn't affect the cwd starting point for\n * non-absolute patterns. A pattern containing `..` will still be\n * able to traverse out of the root directory, if it is not an\n * actual root directory on the filesystem, and any non-absolute\n * patterns will be matched in the `cwd`. For example, the\n * pattern `/../*` with `{root:'/some/path'}` will return all\n * files in `/some`, not all files in `/some/path`. The pattern\n * `*` with `{root:'/some/path'}` will return all the entries in\n * the cwd, not the entries in `/some/path`.\n *\n * To start absolute and non-absolute patterns in the same\n * path, you can use `{root:''}`. However, be aware that on\n * Windows systems, a pattern like `x:/*` or `//host/share/*` will\n * _always_ start in the `x:/` or `//host/share` directory,\n * regardless of the `root` setting.\n */\n root?: string\n\n /**\n * A [PathScurry](http://npm.im/path-scurry) object used\n * to traverse the file system. If the `nocase` option is set\n * explicitly, then any provided `scurry` object must match this\n * setting.\n */\n scurry?: PathScurry\n\n /**\n * Call `lstat()` on all entries, whether required or not to determine\n * if it's a valid match. When used with {@link withFileTypes}, this means\n * that matches will include data such as modified time, permissions, and\n * so on. Note that this will incur a performance cost due to the added\n * system calls.\n */\n stat?: boolean\n\n /**\n * An AbortSignal which will cancel the Glob walk when\n * triggered.\n */\n signal?: AbortSignal\n\n /**\n * Use `\\\\` as a path separator _only_, and\n * _never_ as an escape character. If set, all `\\\\` characters are\n * replaced with `/` in the pattern.\n *\n * Note that this makes it **impossible** to match against paths\n * containing literal glob pattern characters, but allows matching\n * with patterns constructed using `path.join()` and\n * `path.resolve()` on Windows platforms, mimicking the (buggy!)\n * behavior of Glob v7 and before on Windows. Please use with\n * caution, and be mindful of [the caveat below about Windows\n * paths](#windows). (For legacy reasons, this is also set if\n * `allowWindowsEscape` is set to the exact value `false`.)\n */\n windowsPathsNoEscape?: boolean\n\n /**\n * Return [PathScurry](http://npm.im/path-scurry)\n * `Path` objects instead of strings. These are similar to a\n * NodeJS `Dirent` object, but with additional methods and\n * properties.\n *\n * Conflicts with {@link absolute}\n */\n withFileTypes?: boolean\n\n /**\n * An fs implementation to override some or all of the defaults. See\n * http://npm.im/path-scurry for details about what can be overridden.\n */\n fs?: FSOption\n\n /**\n * Just passed along to Minimatch. Note that this makes all pattern\n * matching operations slower and *extremely* noisy.\n */\n debug?: boolean\n\n /**\n * Return `/` delimited paths, even on Windows.\n *\n * On posix systems, this has no effect. But, on Windows, it means that\n * paths will be `/` delimited, and absolute paths will be their full\n * resolved UNC forms, eg instead of `'C:\\\\foo\\\\bar'`, it would return\n * `'//?/C:/foo/bar'`\n */\n posix?: boolean\n\n /**\n * Do not match any children of any matches. For example, the pattern\n * `**\\/foo` would match `a/foo`, but not `a/foo/b/foo` in this mode.\n *\n * This is especially useful for cases like \"find all `node_modules`\n * folders, but not the ones in `node_modules`\".\n *\n * In order to support this, the `Ignore` implementation must support an\n * `add(pattern: string)` method. If using the default `Ignore` class, then\n * this is fine, but if this is set to `false`, and a custom `Ignore` is\n * provided that does not have an `add()` method, then it will throw an\n * error.\n *\n * **Caveat** It *only* ignores matches that would be a descendant of a\n * previous match, and only if that descendant is matched *after* the\n * ancestor is encountered. Since the file system walk happens in\n * indeterminate order, it's possible that a match will already be added\n * before its ancestor, if multiple or braced patterns are used.\n *\n * For example:\n *\n * ```ts\n * const results = await glob([\n * // likely to match first, since it's just a stat\n * 'a/b/c/d/e/f',\n *\n * // this pattern is more complicated! It must to various readdir()\n * // calls and test the results against a regular expression, and that\n * // is certainly going to take a little bit longer.\n * //\n * // So, later on, it encounters a match at 'a/b/c/d/e', but it's too\n * // late to ignore a/b/c/d/e/f, because it's already been emitted.\n * 'a/[bdf]/?/[a-z]/*',\n * ], { includeChildMatches: false })\n * ```\n *\n * It's best to only set this to `false` if you can be reasonably sure that\n * no components of the pattern will potentially match one another's file\n * system descendants, or if the occasional included child entry will not\n * cause problems.\n *\n * @default true\n */\n includeChildMatches?: boolean\n}\n\nexport type GlobOptionsWithFileTypesTrue = GlobOptions & {\n withFileTypes: true\n // string options not relevant if returning Path objects.\n absolute?: undefined\n mark?: undefined\n posix?: undefined\n}\n\nexport type GlobOptionsWithFileTypesFalse = GlobOptions & {\n withFileTypes?: false\n}\n\nexport type GlobOptionsWithFileTypesUnset = GlobOptions & {\n withFileTypes?: undefined\n}\n\nexport type Result =\n Opts extends GlobOptionsWithFileTypesTrue ? Path\n : Opts extends GlobOptionsWithFileTypesFalse ? string\n : Opts extends GlobOptionsWithFileTypesUnset ? string\n : string | Path\nexport type Results = Result[]\n\nexport type FileTypes =\n Opts extends GlobOptionsWithFileTypesTrue ? true\n : Opts extends GlobOptionsWithFileTypesFalse ? false\n : Opts extends GlobOptionsWithFileTypesUnset ? false\n : boolean\n\n/**\n * An object that can perform glob pattern traversals.\n */\nexport class Glob implements GlobOptions {\n absolute?: boolean\n cwd: string\n root?: string\n dot: boolean\n dotRelative: boolean\n follow: boolean\n ignore?: string | string[] | IgnoreLike\n magicalBraces: boolean\n mark?: boolean\n matchBase: boolean\n maxDepth: number\n nobrace: boolean\n nocase: boolean\n nodir: boolean\n noext: boolean\n noglobstar: boolean\n pattern: string[]\n platform: NodeJS.Platform\n realpath: boolean\n scurry: PathScurry\n stat: boolean\n signal?: AbortSignal\n windowsPathsNoEscape: boolean\n withFileTypes: FileTypes\n includeChildMatches: boolean\n\n /**\n * The options provided to the constructor.\n */\n opts: Opts\n\n /**\n * An array of parsed immutable {@link Pattern} objects.\n */\n patterns: Pattern[]\n\n /**\n * All options are stored as properties on the `Glob` object.\n *\n * See {@link GlobOptions} for full options descriptions.\n *\n * Note that a previous `Glob` object can be passed as the\n * `GlobOptions` to another `Glob` instantiation to re-use settings\n * and caches with a new pattern.\n *\n * Traversal functions can be called multiple times to run the walk\n * again.\n */\n constructor(pattern: string | string[], opts: Opts) {\n /* c8 ignore start */\n if (!opts) throw new TypeError('glob options required')\n /* c8 ignore stop */\n this.withFileTypes = !!opts.withFileTypes as FileTypes\n this.signal = opts.signal\n this.follow = !!opts.follow\n this.dot = !!opts.dot\n this.dotRelative = !!opts.dotRelative\n this.nodir = !!opts.nodir\n this.mark = !!opts.mark\n if (!opts.cwd) {\n this.cwd = ''\n } else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) {\n opts.cwd = fileURLToPath(opts.cwd)\n }\n this.cwd = opts.cwd || ''\n this.root = opts.root\n this.magicalBraces = !!opts.magicalBraces\n this.nobrace = !!opts.nobrace\n this.noext = !!opts.noext\n this.realpath = !!opts.realpath\n this.absolute = opts.absolute\n this.includeChildMatches = opts.includeChildMatches !== false\n\n this.noglobstar = !!opts.noglobstar\n this.matchBase = !!opts.matchBase\n this.maxDepth =\n typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity\n this.stat = !!opts.stat\n this.ignore = opts.ignore\n\n if (this.withFileTypes && this.absolute !== undefined) {\n throw new Error('cannot set absolute and withFileTypes:true')\n }\n\n if (typeof pattern === 'string') {\n pattern = [pattern]\n }\n\n this.windowsPathsNoEscape =\n !!opts.windowsPathsNoEscape ||\n (opts as { allowWindowsEscape?: boolean }).allowWindowsEscape ===\n false\n\n if (this.windowsPathsNoEscape) {\n pattern = pattern.map(p => p.replace(/\\\\/g, '/'))\n }\n\n if (this.matchBase) {\n if (opts.noglobstar) {\n throw new TypeError('base matching requires globstar')\n }\n pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`))\n }\n\n this.pattern = pattern\n\n this.platform = opts.platform || defaultPlatform\n this.opts = { ...opts, platform: this.platform }\n if (opts.scurry) {\n this.scurry = opts.scurry\n if (\n opts.nocase !== undefined &&\n opts.nocase !== opts.scurry.nocase\n ) {\n throw new Error('nocase option contradicts provided scurry option')\n }\n } else {\n const Scurry =\n opts.platform === 'win32' ? PathScurryWin32\n : opts.platform === 'darwin' ? PathScurryDarwin\n : opts.platform ? PathScurryPosix\n : PathScurry\n this.scurry = new Scurry(this.cwd, {\n nocase: opts.nocase,\n fs: opts.fs,\n })\n }\n this.nocase = this.scurry.nocase\n\n // If you do nocase:true on a case-sensitive file system, then\n // we need to use regexps instead of strings for non-magic\n // path portions, because statting `aBc` won't return results\n // for the file `AbC` for example.\n const nocaseMagicOnly =\n this.platform === 'darwin' || this.platform === 'win32'\n\n const mmo: MinimatchOptions = {\n // default nocase based on platform\n ...opts,\n dot: this.dot,\n matchBase: this.matchBase,\n nobrace: this.nobrace,\n nocase: this.nocase,\n nocaseMagicOnly,\n nocomment: true,\n noext: this.noext,\n nonegate: true,\n optimizationLevel: 2,\n platform: this.platform,\n windowsPathsNoEscape: this.windowsPathsNoEscape,\n debug: !!this.opts.debug,\n }\n\n const mms = this.pattern.map(p => new Minimatch(p, mmo))\n const [matchSet, globParts] = mms.reduce(\n (set: [MatchSet, GlobParts], m) => {\n set[0].push(...m.set)\n set[1].push(...m.globParts)\n return set\n },\n [[], []],\n )\n this.patterns = matchSet.map((set, i) => {\n const g = globParts[i]\n /* c8 ignore start */\n if (!g) throw new Error('invalid pattern object')\n /* c8 ignore stop */\n return new Pattern(set, g, 0, this.platform)\n })\n }\n\n /**\n * Returns a Promise that resolves to the results array.\n */\n async walk(): Promise>\n async walk(): Promise<(string | Path)[]> {\n // Walkers always return array of Path objects, so we just have to\n // coerce them into the right shape. It will have already called\n // realpath() if the option was set to do so, so we know that's cached.\n // start out knowing the cwd, at least\n return [\n ...(await new GlobWalker(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity ?\n this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n includeChildMatches: this.includeChildMatches,\n }).walk()),\n ]\n }\n\n /**\n * synchronous {@link Glob.walk}\n */\n walkSync(): Results\n walkSync(): (string | Path)[] {\n return [\n ...new GlobWalker(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity ?\n this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n includeChildMatches: this.includeChildMatches,\n }).walkSync(),\n ]\n }\n\n /**\n * Stream results asynchronously.\n */\n stream(): Minipass, Result>\n stream(): Minipass {\n return new GlobStream(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity ?\n this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n includeChildMatches: this.includeChildMatches,\n }).stream()\n }\n\n /**\n * Stream results synchronously.\n */\n streamSync(): Minipass, Result>\n streamSync(): Minipass {\n return new GlobStream(this.patterns, this.scurry.cwd, {\n ...this.opts,\n maxDepth:\n this.maxDepth !== Infinity ?\n this.maxDepth + this.scurry.cwd.depth()\n : Infinity,\n platform: this.platform,\n nocase: this.nocase,\n includeChildMatches: this.includeChildMatches,\n }).streamSync()\n }\n\n /**\n * Default sync iteration function. Returns a Generator that\n * iterates over the results.\n */\n iterateSync(): Generator, void, void> {\n return this.streamSync()[Symbol.iterator]()\n }\n [Symbol.iterator]() {\n return this.iterateSync()\n }\n\n /**\n * Default async iteration function. Returns an AsyncGenerator that\n * iterates over the results.\n */\n iterate(): AsyncGenerator, void, void> {\n return this.stream()[Symbol.asyncIterator]()\n }\n [Symbol.asyncIterator]() {\n return this.iterate()\n }\n}\n"]} \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/has-magic.d.ts b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/has-magic.d.ts new file mode 100644 index 0000000000000000000000000000000000000000..8aec3bd9725175d9c72be14476fea2a117ca8b09 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/has-magic.d.ts @@ -0,0 +1,14 @@ +import { GlobOptions } from './glob.js'; +/** + * Return true if the patterns provided contain any magic glob characters, + * given the options provided. + * + * Brace expansion is not considered "magic" unless the `magicalBraces` option + * is set, as brace expansion just turns one string into an array of strings. + * So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and + * `'xby'` both do not contain any magic glob characters, and it's treated the + * same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true` + * is in the options, brace expansion _is_ treated as a pattern having magic. + */ +export declare const hasMagic: (pattern: string | string[], options?: GlobOptions) => boolean; +//# sourceMappingURL=has-magic.d.ts.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/has-magic.d.ts.map b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/has-magic.d.ts.map new file mode 100644 index 0000000000000000000000000000000000000000..b24dd4ec47e0bbc37be06a58f4622cc183b710af --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/has-magic.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"has-magic.d.ts","sourceRoot":"","sources":["../../src/has-magic.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,WAAW,EAAE,MAAM,WAAW,CAAA;AAEvC;;;;;;;;;;GAUG;AACH,eAAO,MAAM,QAAQ,YACV,MAAM,GAAG,MAAM,EAAE,YACjB,WAAW,KACnB,OAQF,CAAA"} \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/has-magic.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/has-magic.js new file mode 100644 index 0000000000000000000000000000000000000000..ba2321ab868d025d56e2ce360c0c02e3e622e6b8 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/has-magic.js @@ -0,0 +1,23 @@ +import { Minimatch } from 'minimatch'; +/** + * Return true if the patterns provided contain any magic glob characters, + * given the options provided. + * + * Brace expansion is not considered "magic" unless the `magicalBraces` option + * is set, as brace expansion just turns one string into an array of strings. + * So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and + * `'xby'` both do not contain any magic glob characters, and it's treated the + * same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true` + * is in the options, brace expansion _is_ treated as a pattern having magic. + */ +export const hasMagic = (pattern, options = {}) => { + if (!Array.isArray(pattern)) { + pattern = [pattern]; + } + for (const p of pattern) { + if (new Minimatch(p, options).hasMagic()) + return true; + } + return false; +}; +//# sourceMappingURL=has-magic.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/has-magic.js.map b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/has-magic.js.map new file mode 100644 index 0000000000000000000000000000000000000000..a20f5aa2e0fdb50071ab84db31b1993d2f220253 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/has-magic.js.map @@ -0,0 +1 @@ +{"version":3,"file":"has-magic.js","sourceRoot":"","sources":["../../src/has-magic.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAE,MAAM,WAAW,CAAA;AAGrC;;;;;;;;;;GAUG;AACH,MAAM,CAAC,MAAM,QAAQ,GAAG,CACtB,OAA0B,EAC1B,UAAuB,EAAE,EAChB,EAAE;IACX,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;QAC5B,OAAO,GAAG,CAAC,OAAO,CAAC,CAAA;IACrB,CAAC;IACD,KAAK,MAAM,CAAC,IAAI,OAAO,EAAE,CAAC;QACxB,IAAI,IAAI,SAAS,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,QAAQ,EAAE;YAAE,OAAO,IAAI,CAAA;IACvD,CAAC;IACD,OAAO,KAAK,CAAA;AACd,CAAC,CAAA","sourcesContent":["import { Minimatch } from 'minimatch'\nimport { GlobOptions } from './glob.js'\n\n/**\n * Return true if the patterns provided contain any magic glob characters,\n * given the options provided.\n *\n * Brace expansion is not considered \"magic\" unless the `magicalBraces` option\n * is set, as brace expansion just turns one string into an array of strings.\n * So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and\n * `'xby'` both do not contain any magic glob characters, and it's treated the\n * same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true`\n * is in the options, brace expansion _is_ treated as a pattern having magic.\n */\nexport const hasMagic = (\n pattern: string | string[],\n options: GlobOptions = {},\n): boolean => {\n if (!Array.isArray(pattern)) {\n pattern = [pattern]\n }\n for (const p of pattern) {\n if (new Minimatch(p, options).hasMagic()) return true\n }\n return false\n}\n"]} \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/ignore.d.ts b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/ignore.d.ts new file mode 100644 index 0000000000000000000000000000000000000000..1893b16df877c9b50c071ee7b066715f6e58c43e --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/ignore.d.ts @@ -0,0 +1,24 @@ +import { Minimatch, MinimatchOptions } from 'minimatch'; +import { Path } from 'path-scurry'; +import { GlobWalkerOpts } from './walker.js'; +export interface IgnoreLike { + ignored?: (p: Path) => boolean; + childrenIgnored?: (p: Path) => boolean; + add?: (ignore: string) => void; +} +/** + * Class used to process ignored patterns + */ +export declare class Ignore implements IgnoreLike { + relative: Minimatch[]; + relativeChildren: Minimatch[]; + absolute: Minimatch[]; + absoluteChildren: Minimatch[]; + platform: NodeJS.Platform; + mmopts: MinimatchOptions; + constructor(ignored: string[], { nobrace, nocase, noext, noglobstar, platform, }: GlobWalkerOpts); + add(ign: string): void; + ignored(p: Path): boolean; + childrenIgnored(p: Path): boolean; +} +//# sourceMappingURL=ignore.d.ts.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/ignore.d.ts.map b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/ignore.d.ts.map new file mode 100644 index 0000000000000000000000000000000000000000..57d6ab6153d770397a5acb7881ccf52a048dee89 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/ignore.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"ignore.d.ts","sourceRoot":"","sources":["../../src/ignore.ts"],"names":[],"mappings":"AAKA,OAAO,EAAE,SAAS,EAAE,gBAAgB,EAAE,MAAM,WAAW,CAAA;AACvD,OAAO,EAAE,IAAI,EAAE,MAAM,aAAa,CAAA;AAElC,OAAO,EAAE,cAAc,EAAE,MAAM,aAAa,CAAA;AAE5C,MAAM,WAAW,UAAU;IACzB,OAAO,CAAC,EAAE,CAAC,CAAC,EAAE,IAAI,KAAK,OAAO,CAAA;IAC9B,eAAe,CAAC,EAAE,CAAC,CAAC,EAAE,IAAI,KAAK,OAAO,CAAA;IACtC,GAAG,CAAC,EAAE,CAAC,MAAM,EAAE,MAAM,KAAK,IAAI,CAAA;CAC/B;AAWD;;GAEG;AACH,qBAAa,MAAO,YAAW,UAAU;IACvC,QAAQ,EAAE,SAAS,EAAE,CAAA;IACrB,gBAAgB,EAAE,SAAS,EAAE,CAAA;IAC7B,QAAQ,EAAE,SAAS,EAAE,CAAA;IACrB,gBAAgB,EAAE,SAAS,EAAE,CAAA;IAC7B,QAAQ,EAAE,MAAM,CAAC,QAAQ,CAAA;IACzB,MAAM,EAAE,gBAAgB,CAAA;gBAGtB,OAAO,EAAE,MAAM,EAAE,EACjB,EACE,OAAO,EACP,MAAM,EACN,KAAK,EACL,UAAU,EACV,QAA0B,GAC3B,EAAE,cAAc;IAqBnB,GAAG,CAAC,GAAG,EAAE,MAAM;IAyCf,OAAO,CAAC,CAAC,EAAE,IAAI,GAAG,OAAO;IAczB,eAAe,CAAC,CAAC,EAAE,IAAI,GAAG,OAAO;CAWlC"} \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/ignore.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/ignore.js new file mode 100644 index 0000000000000000000000000000000000000000..539c4a4fdebc4b036e5ca02060eae76572e5622d --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/ignore.js @@ -0,0 +1,115 @@ +// give it a pattern, and it'll be able to tell you if +// a given path should be ignored. +// Ignoring a path ignores its children if the pattern ends in /** +// Ignores are always parsed in dot:true mode +import { Minimatch } from 'minimatch'; +import { Pattern } from './pattern.js'; +const defaultPlatform = (typeof process === 'object' && + process && + typeof process.platform === 'string') ? + process.platform + : 'linux'; +/** + * Class used to process ignored patterns + */ +export class Ignore { + relative; + relativeChildren; + absolute; + absoluteChildren; + platform; + mmopts; + constructor(ignored, { nobrace, nocase, noext, noglobstar, platform = defaultPlatform, }) { + this.relative = []; + this.absolute = []; + this.relativeChildren = []; + this.absoluteChildren = []; + this.platform = platform; + this.mmopts = { + dot: true, + nobrace, + nocase, + noext, + noglobstar, + optimizationLevel: 2, + platform, + nocomment: true, + nonegate: true, + }; + for (const ign of ignored) + this.add(ign); + } + add(ign) { + // this is a little weird, but it gives us a clean set of optimized + // minimatch matchers, without getting tripped up if one of them + // ends in /** inside a brace section, and it's only inefficient at + // the start of the walk, not along it. + // It'd be nice if the Pattern class just had a .test() method, but + // handling globstars is a bit of a pita, and that code already lives + // in minimatch anyway. + // Another way would be if maybe Minimatch could take its set/globParts + // as an option, and then we could at least just use Pattern to test + // for absolute-ness. + // Yet another way, Minimatch could take an array of glob strings, and + // a cwd option, and do the right thing. + const mm = new Minimatch(ign, this.mmopts); + for (let i = 0; i < mm.set.length; i++) { + const parsed = mm.set[i]; + const globParts = mm.globParts[i]; + /* c8 ignore start */ + if (!parsed || !globParts) { + throw new Error('invalid pattern object'); + } + // strip off leading ./ portions + // https://github.com/isaacs/node-glob/issues/570 + while (parsed[0] === '.' && globParts[0] === '.') { + parsed.shift(); + globParts.shift(); + } + /* c8 ignore stop */ + const p = new Pattern(parsed, globParts, 0, this.platform); + const m = new Minimatch(p.globString(), this.mmopts); + const children = globParts[globParts.length - 1] === '**'; + const absolute = p.isAbsolute(); + if (absolute) + this.absolute.push(m); + else + this.relative.push(m); + if (children) { + if (absolute) + this.absoluteChildren.push(m); + else + this.relativeChildren.push(m); + } + } + } + ignored(p) { + const fullpath = p.fullpath(); + const fullpaths = `${fullpath}/`; + const relative = p.relative() || '.'; + const relatives = `${relative}/`; + for (const m of this.relative) { + if (m.match(relative) || m.match(relatives)) + return true; + } + for (const m of this.absolute) { + if (m.match(fullpath) || m.match(fullpaths)) + return true; + } + return false; + } + childrenIgnored(p) { + const fullpath = p.fullpath() + '/'; + const relative = (p.relative() || '.') + '/'; + for (const m of this.relativeChildren) { + if (m.match(relative)) + return true; + } + for (const m of this.absoluteChildren) { + if (m.match(fullpath)) + return true; + } + return false; + } +} +//# sourceMappingURL=ignore.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/ignore.js.map b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/ignore.js.map new file mode 100644 index 0000000000000000000000000000000000000000..2cddba2ecfe9f605f05b45ad1c23663de46945b8 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/ignore.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ignore.js","sourceRoot":"","sources":["../../src/ignore.ts"],"names":[],"mappings":"AAAA,sDAAsD;AACtD,kCAAkC;AAClC,kEAAkE;AAClE,6CAA6C;AAE7C,OAAO,EAAE,SAAS,EAAoB,MAAM,WAAW,CAAA;AAEvD,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAA;AAStC,MAAM,eAAe,GACnB,CACE,OAAO,OAAO,KAAK,QAAQ;IAC3B,OAAO;IACP,OAAO,OAAO,CAAC,QAAQ,KAAK,QAAQ,CACrC,CAAC,CAAC;IACD,OAAO,CAAC,QAAQ;IAClB,CAAC,CAAC,OAAO,CAAA;AAEX;;GAEG;AACH,MAAM,OAAO,MAAM;IACjB,QAAQ,CAAa;IACrB,gBAAgB,CAAa;IAC7B,QAAQ,CAAa;IACrB,gBAAgB,CAAa;IAC7B,QAAQ,CAAiB;IACzB,MAAM,CAAkB;IAExB,YACE,OAAiB,EACjB,EACE,OAAO,EACP,MAAM,EACN,KAAK,EACL,UAAU,EACV,QAAQ,GAAG,eAAe,GACX;QAEjB,IAAI,CAAC,QAAQ,GAAG,EAAE,CAAA;QAClB,IAAI,CAAC,QAAQ,GAAG,EAAE,CAAA;QAClB,IAAI,CAAC,gBAAgB,GAAG,EAAE,CAAA;QAC1B,IAAI,CAAC,gBAAgB,GAAG,EAAE,CAAA;QAC1B,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;QACxB,IAAI,CAAC,MAAM,GAAG;YACZ,GAAG,EAAE,IAAI;YACT,OAAO;YACP,MAAM;YACN,KAAK;YACL,UAAU;YACV,iBAAiB,EAAE,CAAC;YACpB,QAAQ;YACR,SAAS,EAAE,IAAI;YACf,QAAQ,EAAE,IAAI;SACf,CAAA;QACD,KAAK,MAAM,GAAG,IAAI,OAAO;YAAE,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,CAAA;IAC1C,CAAC;IAED,GAAG,CAAC,GAAW;QACb,mEAAmE;QACnE,gEAAgE;QAChE,mEAAmE;QACnE,uCAAuC;QACvC,mEAAmE;QACnE,qEAAqE;QACrE,uBAAuB;QACvB,uEAAuE;QACvE,oEAAoE;QACpE,qBAAqB;QACrB,sEAAsE;QACtE,wCAAwC;QACxC,MAAM,EAAE,GAAG,IAAI,SAAS,CAAC,GAAG,EAAE,IAAI,CAAC,MAAM,CAAC,CAAA;QAC1C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,EAAE,CAAC,GAAG,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;YACvC,MAAM,MAAM,GAAG,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;YACxB,MAAM,SAAS,GAAG,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC,CAAA;YACjC,qBAAqB;YACrB,IAAI,CAAC,MAAM,IAAI,CAAC,SAAS,EAAE,CAAC;gBAC1B,MAAM,IAAI,KAAK,CAAC,wBAAwB,CAAC,CAAA;YAC3C,CAAC;YACD,gCAAgC;YAChC,iDAAiD;YACjD,OAAO,MAAM,CAAC,CAAC,CAAC,KAAK,GAAG,IAAI,SAAS,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE,CAAC;gBACjD,MAAM,CAAC,KAAK,EAAE,CAAA;gBACd,SAAS,CAAC,KAAK,EAAE,CAAA;YACnB,CAAC;YACD,oBAAoB;YACpB,MAAM,CAAC,GAAG,IAAI,OAAO,CAAC,MAAM,EAAE,SAAS,EAAE,CAAC,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAA;YAC1D,MAAM,CAAC,GAAG,IAAI,SAAS,CAAC,CAAC,CAAC,UAAU,EAAE,EAAE,IAAI,CAAC,MAAM,CAAC,CAAA;YACpD,MAAM,QAAQ,GAAG,SAAS,CAAC,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC,KAAK,IAAI,CAAA;YACzD,MAAM,QAAQ,GAAG,CAAC,CAAC,UAAU,EAAE,CAAA;YAC/B,IAAI,QAAQ;gBAAE,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;;gBAC9B,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;YAC1B,IAAI,QAAQ,EAAE,CAAC;gBACb,IAAI,QAAQ;oBAAE,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;;oBACtC,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;YACpC,CAAC;QACH,CAAC;IACH,CAAC;IAED,OAAO,CAAC,CAAO;QACb,MAAM,QAAQ,GAAG,CAAC,CAAC,QAAQ,EAAE,CAAA;QAC7B,MAAM,SAAS,GAAG,GAAG,QAAQ,GAAG,CAAA;QAChC,MAAM,QAAQ,GAAG,CAAC,CAAC,QAAQ,EAAE,IAAI,GAAG,CAAA;QACpC,MAAM,SAAS,GAAG,GAAG,QAAQ,GAAG,CAAA;QAChC,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC9B,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,SAAS,CAAC;gBAAE,OAAO,IAAI,CAAA;QAC1D,CAAC;QACD,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC9B,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,SAAS,CAAC;gBAAE,OAAO,IAAI,CAAA;QAC1D,CAAC;QACD,OAAO,KAAK,CAAA;IACd,CAAC;IAED,eAAe,CAAC,CAAO;QACrB,MAAM,QAAQ,GAAG,CAAC,CAAC,QAAQ,EAAE,GAAG,GAAG,CAAA;QACnC,MAAM,QAAQ,GAAG,CAAC,CAAC,CAAC,QAAQ,EAAE,IAAI,GAAG,CAAC,GAAG,GAAG,CAAA;QAC5C,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,gBAAgB,EAAE,CAAC;YACtC,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC;gBAAE,OAAO,IAAI,CAAA;QACpC,CAAC;QACD,KAAK,MAAM,CAAC,IAAI,IAAI,CAAC,gBAAgB,EAAE,CAAC;YACtC,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC;gBAAE,OAAO,IAAI,CAAA;QACpC,CAAC;QACD,OAAO,KAAK,CAAA;IACd,CAAC;CACF","sourcesContent":["// give it a pattern, and it'll be able to tell you if\n// a given path should be ignored.\n// Ignoring a path ignores its children if the pattern ends in /**\n// Ignores are always parsed in dot:true mode\n\nimport { Minimatch, MinimatchOptions } from 'minimatch'\nimport { Path } from 'path-scurry'\nimport { Pattern } from './pattern.js'\nimport { GlobWalkerOpts } from './walker.js'\n\nexport interface IgnoreLike {\n ignored?: (p: Path) => boolean\n childrenIgnored?: (p: Path) => boolean\n add?: (ignore: string) => void\n}\n\nconst defaultPlatform: NodeJS.Platform =\n (\n typeof process === 'object' &&\n process &&\n typeof process.platform === 'string'\n ) ?\n process.platform\n : 'linux'\n\n/**\n * Class used to process ignored patterns\n */\nexport class Ignore implements IgnoreLike {\n relative: Minimatch[]\n relativeChildren: Minimatch[]\n absolute: Minimatch[]\n absoluteChildren: Minimatch[]\n platform: NodeJS.Platform\n mmopts: MinimatchOptions\n\n constructor(\n ignored: string[],\n {\n nobrace,\n nocase,\n noext,\n noglobstar,\n platform = defaultPlatform,\n }: GlobWalkerOpts,\n ) {\n this.relative = []\n this.absolute = []\n this.relativeChildren = []\n this.absoluteChildren = []\n this.platform = platform\n this.mmopts = {\n dot: true,\n nobrace,\n nocase,\n noext,\n noglobstar,\n optimizationLevel: 2,\n platform,\n nocomment: true,\n nonegate: true,\n }\n for (const ign of ignored) this.add(ign)\n }\n\n add(ign: string) {\n // this is a little weird, but it gives us a clean set of optimized\n // minimatch matchers, without getting tripped up if one of them\n // ends in /** inside a brace section, and it's only inefficient at\n // the start of the walk, not along it.\n // It'd be nice if the Pattern class just had a .test() method, but\n // handling globstars is a bit of a pita, and that code already lives\n // in minimatch anyway.\n // Another way would be if maybe Minimatch could take its set/globParts\n // as an option, and then we could at least just use Pattern to test\n // for absolute-ness.\n // Yet another way, Minimatch could take an array of glob strings, and\n // a cwd option, and do the right thing.\n const mm = new Minimatch(ign, this.mmopts)\n for (let i = 0; i < mm.set.length; i++) {\n const parsed = mm.set[i]\n const globParts = mm.globParts[i]\n /* c8 ignore start */\n if (!parsed || !globParts) {\n throw new Error('invalid pattern object')\n }\n // strip off leading ./ portions\n // https://github.com/isaacs/node-glob/issues/570\n while (parsed[0] === '.' && globParts[0] === '.') {\n parsed.shift()\n globParts.shift()\n }\n /* c8 ignore stop */\n const p = new Pattern(parsed, globParts, 0, this.platform)\n const m = new Minimatch(p.globString(), this.mmopts)\n const children = globParts[globParts.length - 1] === '**'\n const absolute = p.isAbsolute()\n if (absolute) this.absolute.push(m)\n else this.relative.push(m)\n if (children) {\n if (absolute) this.absoluteChildren.push(m)\n else this.relativeChildren.push(m)\n }\n }\n }\n\n ignored(p: Path): boolean {\n const fullpath = p.fullpath()\n const fullpaths = `${fullpath}/`\n const relative = p.relative() || '.'\n const relatives = `${relative}/`\n for (const m of this.relative) {\n if (m.match(relative) || m.match(relatives)) return true\n }\n for (const m of this.absolute) {\n if (m.match(fullpath) || m.match(fullpaths)) return true\n }\n return false\n }\n\n childrenIgnored(p: Path): boolean {\n const fullpath = p.fullpath() + '/'\n const relative = (p.relative() || '.') + '/'\n for (const m of this.relativeChildren) {\n if (m.match(relative)) return true\n }\n for (const m of this.absoluteChildren) {\n if (m.match(fullpath)) return true\n }\n return false\n }\n}\n"]} \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/index.d.ts b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/index.d.ts new file mode 100644 index 0000000000000000000000000000000000000000..9c326ddc895b6184475510d700a583732535a635 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/index.d.ts @@ -0,0 +1,97 @@ +import { Minipass } from 'minipass'; +import { Path } from 'path-scurry'; +import type { GlobOptions, GlobOptionsWithFileTypesFalse, GlobOptionsWithFileTypesTrue, GlobOptionsWithFileTypesUnset } from './glob.js'; +import { Glob } from './glob.js'; +export { escape, unescape } from 'minimatch'; +export type { FSOption, Path, WalkOptions, WalkOptionsWithFileTypesTrue, WalkOptionsWithFileTypesUnset, } from 'path-scurry'; +export { Glob } from './glob.js'; +export type { GlobOptions, GlobOptionsWithFileTypesFalse, GlobOptionsWithFileTypesTrue, GlobOptionsWithFileTypesUnset, } from './glob.js'; +export { hasMagic } from './has-magic.js'; +export { Ignore } from './ignore.js'; +export type { IgnoreLike } from './ignore.js'; +export type { MatchStream } from './walker.js'; +/** + * Syncronous form of {@link globStream}. Will read all the matches as fast as + * you consume them, even all in a single tick if you consume them immediately, + * but will still respond to backpressure if they're not consumed immediately. + */ +export declare function globStreamSync(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): Minipass; +export declare function globStreamSync(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): Minipass; +export declare function globStreamSync(pattern: string | string[], options: GlobOptionsWithFileTypesUnset): Minipass; +export declare function globStreamSync(pattern: string | string[], options: GlobOptions): Minipass | Minipass; +/** + * Return a stream that emits all the strings or `Path` objects and + * then emits `end` when completed. + */ +export declare function globStream(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): Minipass; +export declare function globStream(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): Minipass; +export declare function globStream(pattern: string | string[], options?: GlobOptionsWithFileTypesUnset | undefined): Minipass; +export declare function globStream(pattern: string | string[], options: GlobOptions): Minipass | Minipass; +/** + * Synchronous form of {@link glob} + */ +export declare function globSync(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): string[]; +export declare function globSync(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): Path[]; +export declare function globSync(pattern: string | string[], options?: GlobOptionsWithFileTypesUnset | undefined): string[]; +export declare function globSync(pattern: string | string[], options: GlobOptions): Path[] | string[]; +/** + * Perform an asynchronous glob search for the pattern(s) specified. Returns + * [Path](https://isaacs.github.io/path-scurry/classes/PathBase) objects if the + * {@link withFileTypes} option is set to `true`. See {@link GlobOptions} for + * full option descriptions. + */ +declare function glob_(pattern: string | string[], options?: GlobOptionsWithFileTypesUnset | undefined): Promise; +declare function glob_(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): Promise; +declare function glob_(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): Promise; +declare function glob_(pattern: string | string[], options: GlobOptions): Promise; +/** + * Return a sync iterator for walking glob pattern matches. + */ +export declare function globIterateSync(pattern: string | string[], options?: GlobOptionsWithFileTypesUnset | undefined): Generator; +export declare function globIterateSync(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): Generator; +export declare function globIterateSync(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): Generator; +export declare function globIterateSync(pattern: string | string[], options: GlobOptions): Generator | Generator; +/** + * Return an async iterator for walking glob pattern matches. + */ +export declare function globIterate(pattern: string | string[], options?: GlobOptionsWithFileTypesUnset | undefined): AsyncGenerator; +export declare function globIterate(pattern: string | string[], options: GlobOptionsWithFileTypesTrue): AsyncGenerator; +export declare function globIterate(pattern: string | string[], options: GlobOptionsWithFileTypesFalse): AsyncGenerator; +export declare function globIterate(pattern: string | string[], options: GlobOptions): AsyncGenerator | AsyncGenerator; +export declare const streamSync: typeof globStreamSync; +export declare const stream: typeof globStream & { + sync: typeof globStreamSync; +}; +export declare const iterateSync: typeof globIterateSync; +export declare const iterate: typeof globIterate & { + sync: typeof globIterateSync; +}; +export declare const sync: typeof globSync & { + stream: typeof globStreamSync; + iterate: typeof globIterateSync; +}; +export declare const glob: typeof glob_ & { + glob: typeof glob_; + globSync: typeof globSync; + sync: typeof globSync & { + stream: typeof globStreamSync; + iterate: typeof globIterateSync; + }; + globStream: typeof globStream; + stream: typeof globStream & { + sync: typeof globStreamSync; + }; + globStreamSync: typeof globStreamSync; + streamSync: typeof globStreamSync; + globIterate: typeof globIterate; + iterate: typeof globIterate & { + sync: typeof globIterateSync; + }; + globIterateSync: typeof globIterateSync; + iterateSync: typeof globIterateSync; + Glob: typeof Glob; + hasMagic: (pattern: string | string[], options?: GlobOptions) => boolean; + escape: (s: string, { windowsPathsNoEscape, }?: Pick) => string; + unescape: (s: string, { windowsPathsNoEscape, }?: Pick) => string; +}; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/index.d.ts.map b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/index.d.ts.map new file mode 100644 index 0000000000000000000000000000000000000000..5fb32252b63747526a971d82bf6ab2ee61b53631 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAA;AACnC,OAAO,EAAE,IAAI,EAAE,MAAM,aAAa,CAAA;AAClC,OAAO,KAAK,EACV,WAAW,EACX,6BAA6B,EAC7B,4BAA4B,EAC5B,6BAA6B,EAC9B,MAAM,WAAW,CAAA;AAClB,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAA;AAGhC,OAAO,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,WAAW,CAAA;AAC5C,YAAY,EACV,QAAQ,EACR,IAAI,EACJ,WAAW,EACX,4BAA4B,EAC5B,6BAA6B,GAC9B,MAAM,aAAa,CAAA;AACpB,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAA;AAChC,YAAY,EACV,WAAW,EACX,6BAA6B,EAC7B,4BAA4B,EAC5B,6BAA6B,GAC9B,MAAM,WAAW,CAAA;AAClB,OAAO,EAAE,QAAQ,EAAE,MAAM,gBAAgB,CAAA;AACzC,OAAO,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AACpC,YAAY,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AAC7C,YAAY,EAAE,WAAW,EAAE,MAAM,aAAa,CAAA;AAE9C;;;;GAIG;AACH,wBAAgB,cAAc,CAC5B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;AACvB,wBAAgB,cAAc,CAC5B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAC3B,wBAAgB,cAAc,CAC5B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAC3B,wBAAgB,cAAc,CAC5B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAQlD;;;GAGG;AACH,wBAAgB,UAAU,CACxB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAC3B,wBAAgB,UAAU,CACxB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;AACvB,wBAAgB,UAAU,CACxB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,CAAC,EAAE,6BAA6B,GAAG,SAAS,GAClD,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAC3B,wBAAgB,UAAU,CACxB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;AAQlD;;GAEG;AACH,wBAAgB,QAAQ,CACtB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,MAAM,EAAE,CAAA;AACX,wBAAgB,QAAQ,CACtB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,IAAI,EAAE,CAAA;AACT,wBAAgB,QAAQ,CACtB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,CAAC,EAAE,6BAA6B,GAAG,SAAS,GAClD,MAAM,EAAE,CAAA;AACX,wBAAgB,QAAQ,CACtB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,IAAI,EAAE,GAAG,MAAM,EAAE,CAAA;AAQpB;;;;;GAKG;AACH,iBAAe,KAAK,CAClB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,CAAC,EAAE,6BAA6B,GAAG,SAAS,GAClD,OAAO,CAAC,MAAM,EAAE,CAAC,CAAA;AACpB,iBAAe,KAAK,CAClB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,OAAO,CAAC,IAAI,EAAE,CAAC,CAAA;AAClB,iBAAe,KAAK,CAClB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,OAAO,CAAC,MAAM,EAAE,CAAC,CAAA;AACpB,iBAAe,KAAK,CAClB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,OAAO,CAAC,IAAI,EAAE,GAAG,MAAM,EAAE,CAAC,CAAA;AAQ7B;;GAEG;AACH,wBAAgB,eAAe,CAC7B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,CAAC,EAAE,6BAA6B,GAAG,SAAS,GAClD,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AAChC,wBAAgB,eAAe,CAC7B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,SAAS,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AAC9B,wBAAgB,eAAe,CAC7B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AAChC,wBAAgB,eAAe,CAC7B,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,SAAS,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AAQ9D;;GAEG;AACH,wBAAgB,WAAW,CACzB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,CAAC,EAAE,6BAA6B,GAAG,SAAS,GAClD,cAAc,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AACrC,wBAAgB,WAAW,CACzB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,4BAA4B,GACpC,cAAc,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AACnC,wBAAgB,WAAW,CACzB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,6BAA6B,GACrC,cAAc,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AACrC,wBAAgB,WAAW,CACzB,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAC1B,OAAO,EAAE,WAAW,GACnB,cAAc,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,GAAG,cAAc,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;AASxE,eAAO,MAAM,UAAU,uBAAiB,CAAA;AACxC,eAAO,MAAM,MAAM;;CAAsD,CAAA;AACzE,eAAO,MAAM,WAAW,wBAAkB,CAAA;AAC1C,eAAO,MAAM,OAAO;;CAElB,CAAA;AACF,eAAO,MAAM,IAAI;;;CAGf,CAAA;AAEF,eAAO,MAAM,IAAI;;;;;;;;;;;;;;;;;;;;;;;CAgBf,CAAA"} \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/index.js new file mode 100644 index 0000000000000000000000000000000000000000..e15c1f9c4cb03257181652847417840688521cb2 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/index.js @@ -0,0 +1,55 @@ +import { escape, unescape } from 'minimatch'; +import { Glob } from './glob.js'; +import { hasMagic } from './has-magic.js'; +export { escape, unescape } from 'minimatch'; +export { Glob } from './glob.js'; +export { hasMagic } from './has-magic.js'; +export { Ignore } from './ignore.js'; +export function globStreamSync(pattern, options = {}) { + return new Glob(pattern, options).streamSync(); +} +export function globStream(pattern, options = {}) { + return new Glob(pattern, options).stream(); +} +export function globSync(pattern, options = {}) { + return new Glob(pattern, options).walkSync(); +} +async function glob_(pattern, options = {}) { + return new Glob(pattern, options).walk(); +} +export function globIterateSync(pattern, options = {}) { + return new Glob(pattern, options).iterateSync(); +} +export function globIterate(pattern, options = {}) { + return new Glob(pattern, options).iterate(); +} +// aliases: glob.sync.stream() glob.stream.sync() glob.sync() etc +export const streamSync = globStreamSync; +export const stream = Object.assign(globStream, { sync: globStreamSync }); +export const iterateSync = globIterateSync; +export const iterate = Object.assign(globIterate, { + sync: globIterateSync, +}); +export const sync = Object.assign(globSync, { + stream: globStreamSync, + iterate: globIterateSync, +}); +export const glob = Object.assign(glob_, { + glob: glob_, + globSync, + sync, + globStream, + stream, + globStreamSync, + streamSync, + globIterate, + iterate, + globIterateSync, + iterateSync, + Glob, + hasMagic, + escape, + unescape, +}); +glob.glob = glob; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/index.js.map b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/index.js.map new file mode 100644 index 0000000000000000000000000000000000000000..a4f93dd0c1d87d292b2ce821897926fafeb582d4 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,WAAW,CAAA;AAS5C,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAA;AAChC,OAAO,EAAE,QAAQ,EAAE,MAAM,gBAAgB,CAAA;AAEzC,OAAO,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,WAAW,CAAA;AAQ5C,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAA;AAOhC,OAAO,EAAE,QAAQ,EAAE,MAAM,gBAAgB,CAAA;AACzC,OAAO,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AAyBpC,MAAM,UAAU,cAAc,CAC5B,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,UAAU,EAAE,CAAA;AAChD,CAAC;AAsBD,MAAM,UAAU,UAAU,CACxB,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,MAAM,EAAE,CAAA;AAC5C,CAAC;AAqBD,MAAM,UAAU,QAAQ,CACtB,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,QAAQ,EAAE,CAAA;AAC9C,CAAC;AAwBD,KAAK,UAAU,KAAK,CAClB,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,IAAI,EAAE,CAAA;AAC1C,CAAC;AAqBD,MAAM,UAAU,eAAe,CAC7B,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,WAAW,EAAE,CAAA;AACjD,CAAC;AAqBD,MAAM,UAAU,WAAW,CACzB,OAA0B,EAC1B,UAAuB,EAAE;IAEzB,OAAO,IAAI,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,OAAO,EAAE,CAAA;AAC7C,CAAC;AAED,iEAAiE;AACjE,MAAM,CAAC,MAAM,UAAU,GAAG,cAAc,CAAA;AACxC,MAAM,CAAC,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,IAAI,EAAE,cAAc,EAAE,CAAC,CAAA;AACzE,MAAM,CAAC,MAAM,WAAW,GAAG,eAAe,CAAA;AAC1C,MAAM,CAAC,MAAM,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,WAAW,EAAE;IAChD,IAAI,EAAE,eAAe;CACtB,CAAC,CAAA;AACF,MAAM,CAAC,MAAM,IAAI,GAAG,MAAM,CAAC,MAAM,CAAC,QAAQ,EAAE;IAC1C,MAAM,EAAE,cAAc;IACtB,OAAO,EAAE,eAAe;CACzB,CAAC,CAAA;AAEF,MAAM,CAAC,MAAM,IAAI,GAAG,MAAM,CAAC,MAAM,CAAC,KAAK,EAAE;IACvC,IAAI,EAAE,KAAK;IACX,QAAQ;IACR,IAAI;IACJ,UAAU;IACV,MAAM;IACN,cAAc;IACd,UAAU;IACV,WAAW;IACX,OAAO;IACP,eAAe;IACf,WAAW;IACX,IAAI;IACJ,QAAQ;IACR,MAAM;IACN,QAAQ;CACT,CAAC,CAAA;AACF,IAAI,CAAC,IAAI,GAAG,IAAI,CAAA","sourcesContent":["import { escape, unescape } from 'minimatch'\nimport { Minipass } from 'minipass'\nimport { Path } from 'path-scurry'\nimport type {\n GlobOptions,\n GlobOptionsWithFileTypesFalse,\n GlobOptionsWithFileTypesTrue,\n GlobOptionsWithFileTypesUnset,\n} from './glob.js'\nimport { Glob } from './glob.js'\nimport { hasMagic } from './has-magic.js'\n\nexport { escape, unescape } from 'minimatch'\nexport type {\n FSOption,\n Path,\n WalkOptions,\n WalkOptionsWithFileTypesTrue,\n WalkOptionsWithFileTypesUnset,\n} from 'path-scurry'\nexport { Glob } from './glob.js'\nexport type {\n GlobOptions,\n GlobOptionsWithFileTypesFalse,\n GlobOptionsWithFileTypesTrue,\n GlobOptionsWithFileTypesUnset,\n} from './glob.js'\nexport { hasMagic } from './has-magic.js'\nexport { Ignore } from './ignore.js'\nexport type { IgnoreLike } from './ignore.js'\nexport type { MatchStream } from './walker.js'\n\n/**\n * Syncronous form of {@link globStream}. Will read all the matches as fast as\n * you consume them, even all in a single tick if you consume them immediately,\n * but will still respond to backpressure if they're not consumed immediately.\n */\nexport function globStreamSync(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesTrue,\n): Minipass\nexport function globStreamSync(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesFalse,\n): Minipass\nexport function globStreamSync(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesUnset,\n): Minipass\nexport function globStreamSync(\n pattern: string | string[],\n options: GlobOptions,\n): Minipass | Minipass\nexport function globStreamSync(\n pattern: string | string[],\n options: GlobOptions = {},\n) {\n return new Glob(pattern, options).streamSync()\n}\n\n/**\n * Return a stream that emits all the strings or `Path` objects and\n * then emits `end` when completed.\n */\nexport function globStream(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesFalse,\n): Minipass\nexport function globStream(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesTrue,\n): Minipass\nexport function globStream(\n pattern: string | string[],\n options?: GlobOptionsWithFileTypesUnset | undefined,\n): Minipass\nexport function globStream(\n pattern: string | string[],\n options: GlobOptions,\n): Minipass | Minipass\nexport function globStream(\n pattern: string | string[],\n options: GlobOptions = {},\n) {\n return new Glob(pattern, options).stream()\n}\n\n/**\n * Synchronous form of {@link glob}\n */\nexport function globSync(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesFalse,\n): string[]\nexport function globSync(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesTrue,\n): Path[]\nexport function globSync(\n pattern: string | string[],\n options?: GlobOptionsWithFileTypesUnset | undefined,\n): string[]\nexport function globSync(\n pattern: string | string[],\n options: GlobOptions,\n): Path[] | string[]\nexport function globSync(\n pattern: string | string[],\n options: GlobOptions = {},\n) {\n return new Glob(pattern, options).walkSync()\n}\n\n/**\n * Perform an asynchronous glob search for the pattern(s) specified. Returns\n * [Path](https://isaacs.github.io/path-scurry/classes/PathBase) objects if the\n * {@link withFileTypes} option is set to `true`. See {@link GlobOptions} for\n * full option descriptions.\n */\nasync function glob_(\n pattern: string | string[],\n options?: GlobOptionsWithFileTypesUnset | undefined,\n): Promise\nasync function glob_(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesTrue,\n): Promise\nasync function glob_(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesFalse,\n): Promise\nasync function glob_(\n pattern: string | string[],\n options: GlobOptions,\n): Promise\nasync function glob_(\n pattern: string | string[],\n options: GlobOptions = {},\n) {\n return new Glob(pattern, options).walk()\n}\n\n/**\n * Return a sync iterator for walking glob pattern matches.\n */\nexport function globIterateSync(\n pattern: string | string[],\n options?: GlobOptionsWithFileTypesUnset | undefined,\n): Generator\nexport function globIterateSync(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesTrue,\n): Generator\nexport function globIterateSync(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesFalse,\n): Generator\nexport function globIterateSync(\n pattern: string | string[],\n options: GlobOptions,\n): Generator | Generator\nexport function globIterateSync(\n pattern: string | string[],\n options: GlobOptions = {},\n) {\n return new Glob(pattern, options).iterateSync()\n}\n\n/**\n * Return an async iterator for walking glob pattern matches.\n */\nexport function globIterate(\n pattern: string | string[],\n options?: GlobOptionsWithFileTypesUnset | undefined,\n): AsyncGenerator\nexport function globIterate(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesTrue,\n): AsyncGenerator\nexport function globIterate(\n pattern: string | string[],\n options: GlobOptionsWithFileTypesFalse,\n): AsyncGenerator\nexport function globIterate(\n pattern: string | string[],\n options: GlobOptions,\n): AsyncGenerator | AsyncGenerator\nexport function globIterate(\n pattern: string | string[],\n options: GlobOptions = {},\n) {\n return new Glob(pattern, options).iterate()\n}\n\n// aliases: glob.sync.stream() glob.stream.sync() glob.sync() etc\nexport const streamSync = globStreamSync\nexport const stream = Object.assign(globStream, { sync: globStreamSync })\nexport const iterateSync = globIterateSync\nexport const iterate = Object.assign(globIterate, {\n sync: globIterateSync,\n})\nexport const sync = Object.assign(globSync, {\n stream: globStreamSync,\n iterate: globIterateSync,\n})\n\nexport const glob = Object.assign(glob_, {\n glob: glob_,\n globSync,\n sync,\n globStream,\n stream,\n globStreamSync,\n streamSync,\n globIterate,\n iterate,\n globIterateSync,\n iterateSync,\n Glob,\n hasMagic,\n escape,\n unescape,\n})\nglob.glob = glob\n"]} \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/package.json new file mode 100644 index 0000000000000000000000000000000000000000..3dbc1ca591c0557e35b6004aeba250e6a70b56e3 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/pattern.d.ts b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/pattern.d.ts new file mode 100644 index 0000000000000000000000000000000000000000..9636df3b54df2912790cdb6d4551c3ebe6e4d42e --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/pattern.d.ts @@ -0,0 +1,76 @@ +import { GLOBSTAR } from 'minimatch'; +export type MMPattern = string | RegExp | typeof GLOBSTAR; +export type PatternList = [p: MMPattern, ...rest: MMPattern[]]; +export type UNCPatternList = [ + p0: '', + p1: '', + p2: string, + p3: string, + ...rest: MMPattern[] +]; +export type DrivePatternList = [p0: string, ...rest: MMPattern[]]; +export type AbsolutePatternList = [p0: '', ...rest: MMPattern[]]; +export type GlobList = [p: string, ...rest: string[]]; +/** + * An immutable-ish view on an array of glob parts and their parsed + * results + */ +export declare class Pattern { + #private; + readonly length: number; + constructor(patternList: MMPattern[], globList: string[], index: number, platform: NodeJS.Platform); + /** + * The first entry in the parsed list of patterns + */ + pattern(): MMPattern; + /** + * true of if pattern() returns a string + */ + isString(): boolean; + /** + * true of if pattern() returns GLOBSTAR + */ + isGlobstar(): boolean; + /** + * true if pattern() returns a regexp + */ + isRegExp(): boolean; + /** + * The /-joined set of glob parts that make up this pattern + */ + globString(): string; + /** + * true if there are more pattern parts after this one + */ + hasMore(): boolean; + /** + * The rest of the pattern after this part, or null if this is the end + */ + rest(): Pattern | null; + /** + * true if the pattern represents a //unc/path/ on windows + */ + isUNC(): boolean; + /** + * True if the pattern starts with a drive letter on Windows + */ + isDrive(): boolean; + /** + * True if the pattern is rooted on an absolute path + */ + isAbsolute(): boolean; + /** + * consume the root of the pattern, and return it + */ + root(): string; + /** + * Check to see if the current globstar pattern is allowed to follow + * a symbolic link. + */ + checkFollowGlobstar(): boolean; + /** + * Mark that the current globstar pattern is following a symbolic link + */ + markFollowGlobstar(): boolean; +} +//# sourceMappingURL=pattern.d.ts.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/pattern.d.ts.map b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/pattern.d.ts.map new file mode 100644 index 0000000000000000000000000000000000000000..cdf322346317d8a12efa4c8fa613b693d2bf8bbe --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/pattern.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"pattern.d.ts","sourceRoot":"","sources":["../../src/pattern.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,QAAQ,EAAE,MAAM,WAAW,CAAA;AACpC,MAAM,MAAM,SAAS,GAAG,MAAM,GAAG,MAAM,GAAG,OAAO,QAAQ,CAAA;AAGzD,MAAM,MAAM,WAAW,GAAG,CAAC,CAAC,EAAE,SAAS,EAAE,GAAG,IAAI,EAAE,SAAS,EAAE,CAAC,CAAA;AAC9D,MAAM,MAAM,cAAc,GAAG;IAC3B,EAAE,EAAE,EAAE;IACN,EAAE,EAAE,EAAE;IACN,EAAE,EAAE,MAAM;IACV,EAAE,EAAE,MAAM;IACV,GAAG,IAAI,EAAE,SAAS,EAAE;CACrB,CAAA;AACD,MAAM,MAAM,gBAAgB,GAAG,CAAC,EAAE,EAAE,MAAM,EAAE,GAAG,IAAI,EAAE,SAAS,EAAE,CAAC,CAAA;AACjE,MAAM,MAAM,mBAAmB,GAAG,CAAC,EAAE,EAAE,EAAE,EAAE,GAAG,IAAI,EAAE,SAAS,EAAE,CAAC,CAAA;AAChE,MAAM,MAAM,QAAQ,GAAG,CAAC,CAAC,EAAE,MAAM,EAAE,GAAG,IAAI,EAAE,MAAM,EAAE,CAAC,CAAA;AAMrD;;;GAGG;AACH,qBAAa,OAAO;;IAIlB,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAA;gBAUrB,WAAW,EAAE,SAAS,EAAE,EACxB,QAAQ,EAAE,MAAM,EAAE,EAClB,KAAK,EAAE,MAAM,EACb,QAAQ,EAAE,MAAM,CAAC,QAAQ;IA6D3B;;OAEG;IACH,OAAO,IAAI,SAAS;IAIpB;;OAEG;IACH,QAAQ,IAAI,OAAO;IAGnB;;OAEG;IACH,UAAU,IAAI,OAAO;IAGrB;;OAEG;IACH,QAAQ,IAAI,OAAO;IAInB;;OAEG;IACH,UAAU,IAAI,MAAM;IAUpB;;OAEG;IACH,OAAO,IAAI,OAAO;IAIlB;;OAEG;IACH,IAAI,IAAI,OAAO,GAAG,IAAI;IAetB;;OAEG;IACH,KAAK,IAAI,OAAO;IAoBhB;;OAEG;IACH,OAAO,IAAI,OAAO;IAelB;;OAEG;IACH,UAAU,IAAI,OAAO;IAUrB;;OAEG;IACH,IAAI,IAAI,MAAM;IASd;;;OAGG;IACH,mBAAmB,IAAI,OAAO;IAQ9B;;OAEG;IACH,kBAAkB,IAAI,OAAO;CAM9B"} \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/pattern.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/pattern.js new file mode 100644 index 0000000000000000000000000000000000000000..b41defa10c6a3acd347a2b8464ef28af970dd741 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/pattern.js @@ -0,0 +1,215 @@ +// this is just a very light wrapper around 2 arrays with an offset index +import { GLOBSTAR } from 'minimatch'; +const isPatternList = (pl) => pl.length >= 1; +const isGlobList = (gl) => gl.length >= 1; +/** + * An immutable-ish view on an array of glob parts and their parsed + * results + */ +export class Pattern { + #patternList; + #globList; + #index; + length; + #platform; + #rest; + #globString; + #isDrive; + #isUNC; + #isAbsolute; + #followGlobstar = true; + constructor(patternList, globList, index, platform) { + if (!isPatternList(patternList)) { + throw new TypeError('empty pattern list'); + } + if (!isGlobList(globList)) { + throw new TypeError('empty glob list'); + } + if (globList.length !== patternList.length) { + throw new TypeError('mismatched pattern list and glob list lengths'); + } + this.length = patternList.length; + if (index < 0 || index >= this.length) { + throw new TypeError('index out of range'); + } + this.#patternList = patternList; + this.#globList = globList; + this.#index = index; + this.#platform = platform; + // normalize root entries of absolute patterns on initial creation. + if (this.#index === 0) { + // c: => ['c:/'] + // C:/ => ['C:/'] + // C:/x => ['C:/', 'x'] + // //host/share => ['//host/share/'] + // //host/share/ => ['//host/share/'] + // //host/share/x => ['//host/share/', 'x'] + // /etc => ['/', 'etc'] + // / => ['/'] + if (this.isUNC()) { + // '' / '' / 'host' / 'share' + const [p0, p1, p2, p3, ...prest] = this.#patternList; + const [g0, g1, g2, g3, ...grest] = this.#globList; + if (prest[0] === '') { + // ends in / + prest.shift(); + grest.shift(); + } + const p = [p0, p1, p2, p3, ''].join('/'); + const g = [g0, g1, g2, g3, ''].join('/'); + this.#patternList = [p, ...prest]; + this.#globList = [g, ...grest]; + this.length = this.#patternList.length; + } + else if (this.isDrive() || this.isAbsolute()) { + const [p1, ...prest] = this.#patternList; + const [g1, ...grest] = this.#globList; + if (prest[0] === '') { + // ends in / + prest.shift(); + grest.shift(); + } + const p = p1 + '/'; + const g = g1 + '/'; + this.#patternList = [p, ...prest]; + this.#globList = [g, ...grest]; + this.length = this.#patternList.length; + } + } + } + /** + * The first entry in the parsed list of patterns + */ + pattern() { + return this.#patternList[this.#index]; + } + /** + * true of if pattern() returns a string + */ + isString() { + return typeof this.#patternList[this.#index] === 'string'; + } + /** + * true of if pattern() returns GLOBSTAR + */ + isGlobstar() { + return this.#patternList[this.#index] === GLOBSTAR; + } + /** + * true if pattern() returns a regexp + */ + isRegExp() { + return this.#patternList[this.#index] instanceof RegExp; + } + /** + * The /-joined set of glob parts that make up this pattern + */ + globString() { + return (this.#globString = + this.#globString || + (this.#index === 0 ? + this.isAbsolute() ? + this.#globList[0] + this.#globList.slice(1).join('/') + : this.#globList.join('/') + : this.#globList.slice(this.#index).join('/'))); + } + /** + * true if there are more pattern parts after this one + */ + hasMore() { + return this.length > this.#index + 1; + } + /** + * The rest of the pattern after this part, or null if this is the end + */ + rest() { + if (this.#rest !== undefined) + return this.#rest; + if (!this.hasMore()) + return (this.#rest = null); + this.#rest = new Pattern(this.#patternList, this.#globList, this.#index + 1, this.#platform); + this.#rest.#isAbsolute = this.#isAbsolute; + this.#rest.#isUNC = this.#isUNC; + this.#rest.#isDrive = this.#isDrive; + return this.#rest; + } + /** + * true if the pattern represents a //unc/path/ on windows + */ + isUNC() { + const pl = this.#patternList; + return this.#isUNC !== undefined ? + this.#isUNC + : (this.#isUNC = + this.#platform === 'win32' && + this.#index === 0 && + pl[0] === '' && + pl[1] === '' && + typeof pl[2] === 'string' && + !!pl[2] && + typeof pl[3] === 'string' && + !!pl[3]); + } + // pattern like C:/... + // split = ['C:', ...] + // XXX: would be nice to handle patterns like `c:*` to test the cwd + // in c: for *, but I don't know of a way to even figure out what that + // cwd is without actually chdir'ing into it? + /** + * True if the pattern starts with a drive letter on Windows + */ + isDrive() { + const pl = this.#patternList; + return this.#isDrive !== undefined ? + this.#isDrive + : (this.#isDrive = + this.#platform === 'win32' && + this.#index === 0 && + this.length > 1 && + typeof pl[0] === 'string' && + /^[a-z]:$/i.test(pl[0])); + } + // pattern = '/' or '/...' or '/x/...' + // split = ['', ''] or ['', ...] or ['', 'x', ...] + // Drive and UNC both considered absolute on windows + /** + * True if the pattern is rooted on an absolute path + */ + isAbsolute() { + const pl = this.#patternList; + return this.#isAbsolute !== undefined ? + this.#isAbsolute + : (this.#isAbsolute = + (pl[0] === '' && pl.length > 1) || + this.isDrive() || + this.isUNC()); + } + /** + * consume the root of the pattern, and return it + */ + root() { + const p = this.#patternList[0]; + return (typeof p === 'string' && this.isAbsolute() && this.#index === 0) ? + p + : ''; + } + /** + * Check to see if the current globstar pattern is allowed to follow + * a symbolic link. + */ + checkFollowGlobstar() { + return !(this.#index === 0 || + !this.isGlobstar() || + !this.#followGlobstar); + } + /** + * Mark that the current globstar pattern is following a symbolic link + */ + markFollowGlobstar() { + if (this.#index === 0 || !this.isGlobstar() || !this.#followGlobstar) + return false; + this.#followGlobstar = false; + return true; + } +} +//# sourceMappingURL=pattern.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/pattern.js.map b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/pattern.js.map new file mode 100644 index 0000000000000000000000000000000000000000..566a306ad1bf400413ab69cf023c067ffc0bbe08 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/pattern.js.map @@ -0,0 +1 @@ +{"version":3,"file":"pattern.js","sourceRoot":"","sources":["../../src/pattern.ts"],"names":[],"mappings":"AAAA,yEAAyE;AAEzE,OAAO,EAAE,QAAQ,EAAE,MAAM,WAAW,CAAA;AAgBpC,MAAM,aAAa,GAAG,CAAC,EAAe,EAAqB,EAAE,CAC3D,EAAE,CAAC,MAAM,IAAI,CAAC,CAAA;AAChB,MAAM,UAAU,GAAG,CAAC,EAAY,EAAkB,EAAE,CAAC,EAAE,CAAC,MAAM,IAAI,CAAC,CAAA;AAEnE;;;GAGG;AACH,MAAM,OAAO,OAAO;IACT,YAAY,CAAa;IACzB,SAAS,CAAU;IACnB,MAAM,CAAQ;IACd,MAAM,CAAQ;IACd,SAAS,CAAiB;IACnC,KAAK,CAAiB;IACtB,WAAW,CAAS;IACpB,QAAQ,CAAU;IAClB,MAAM,CAAU;IAChB,WAAW,CAAU;IACrB,eAAe,GAAY,IAAI,CAAA;IAE/B,YACE,WAAwB,EACxB,QAAkB,EAClB,KAAa,EACb,QAAyB;QAEzB,IAAI,CAAC,aAAa,CAAC,WAAW,CAAC,EAAE,CAAC;YAChC,MAAM,IAAI,SAAS,CAAC,oBAAoB,CAAC,CAAA;QAC3C,CAAC;QACD,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE,CAAC;YAC1B,MAAM,IAAI,SAAS,CAAC,iBAAiB,CAAC,CAAA;QACxC,CAAC;QACD,IAAI,QAAQ,CAAC,MAAM,KAAK,WAAW,CAAC,MAAM,EAAE,CAAC;YAC3C,MAAM,IAAI,SAAS,CAAC,+CAA+C,CAAC,CAAA;QACtE,CAAC;QACD,IAAI,CAAC,MAAM,GAAG,WAAW,CAAC,MAAM,CAAA;QAChC,IAAI,KAAK,GAAG,CAAC,IAAI,KAAK,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;YACtC,MAAM,IAAI,SAAS,CAAC,oBAAoB,CAAC,CAAA;QAC3C,CAAC;QACD,IAAI,CAAC,YAAY,GAAG,WAAW,CAAA;QAC/B,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAA;QACzB,IAAI,CAAC,MAAM,GAAG,KAAK,CAAA;QACnB,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAA;QAEzB,mEAAmE;QACnE,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YACtB,gBAAgB;YAChB,iBAAiB;YACjB,uBAAuB;YACvB,oCAAoC;YACpC,qCAAqC;YACrC,2CAA2C;YAC3C,uBAAuB;YACvB,aAAa;YACb,IAAI,IAAI,CAAC,KAAK,EAAE,EAAE,CAAC;gBACjB,6BAA6B;gBAC7B,MAAM,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,GAAG,KAAK,CAAC,GAAG,IAAI,CAAC,YAAY,CAAA;gBACpD,MAAM,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,GAAG,KAAK,CAAC,GAAG,IAAI,CAAC,SAAS,CAAA;gBACjD,IAAI,KAAK,CAAC,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC;oBACpB,YAAY;oBACZ,KAAK,CAAC,KAAK,EAAE,CAAA;oBACb,KAAK,CAAC,KAAK,EAAE,CAAA;gBACf,CAAC;gBACD,MAAM,CAAC,GAAG,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;gBACxC,MAAM,CAAC,GAAG,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;gBACxC,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC,EAAE,GAAG,KAAK,CAAC,CAAA;gBACjC,IAAI,CAAC,SAAS,GAAG,CAAC,CAAC,EAAE,GAAG,KAAK,CAAC,CAAA;gBAC9B,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,YAAY,CAAC,MAAM,CAAA;YACxC,CAAC;iBAAM,IAAI,IAAI,CAAC,OAAO,EAAE,IAAI,IAAI,CAAC,UAAU,EAAE,EAAE,CAAC;gBAC/C,MAAM,CAAC,EAAE,EAAE,GAAG,KAAK,CAAC,GAAG,IAAI,CAAC,YAAY,CAAA;gBACxC,MAAM,CAAC,EAAE,EAAE,GAAG,KAAK,CAAC,GAAG,IAAI,CAAC,SAAS,CAAA;gBACrC,IAAI,KAAK,CAAC,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC;oBACpB,YAAY;oBACZ,KAAK,CAAC,KAAK,EAAE,CAAA;oBACb,KAAK,CAAC,KAAK,EAAE,CAAA;gBACf,CAAC;gBACD,MAAM,CAAC,GAAI,EAAa,GAAG,GAAG,CAAA;gBAC9B,MAAM,CAAC,GAAG,EAAE,GAAG,GAAG,CAAA;gBAClB,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC,EAAE,GAAG,KAAK,CAAC,CAAA;gBACjC,IAAI,CAAC,SAAS,GAAG,CAAC,CAAC,EAAE,GAAG,KAAK,CAAC,CAAA;gBAC9B,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,YAAY,CAAC,MAAM,CAAA;YACxC,CAAC;QACH,CAAC;IACH,CAAC;IAED;;OAEG;IACH,OAAO;QACL,OAAO,IAAI,CAAC,YAAY,CAAC,IAAI,CAAC,MAAM,CAAc,CAAA;IACpD,CAAC;IAED;;OAEG;IACH,QAAQ;QACN,OAAO,OAAO,IAAI,CAAC,YAAY,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,QAAQ,CAAA;IAC3D,CAAC;IACD;;OAEG;IACH,UAAU;QACR,OAAO,IAAI,CAAC,YAAY,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,QAAQ,CAAA;IACpD,CAAC;IACD;;OAEG;IACH,QAAQ;QACN,OAAO,IAAI,CAAC,YAAY,CAAC,IAAI,CAAC,MAAM,CAAC,YAAY,MAAM,CAAA;IACzD,CAAC;IAED;;OAEG;IACH,UAAU;QACR,OAAO,CAAC,IAAI,CAAC,WAAW;YACtB,IAAI,CAAC,WAAW;gBAChB,CAAC,IAAI,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC;oBAClB,IAAI,CAAC,UAAU,EAAE,CAAC,CAAC;wBACjB,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC;wBACvD,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC;oBAC5B,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;IACnD,CAAC;IAED;;OAEG;IACH,OAAO;QACL,OAAO,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,GAAG,CAAC,CAAA;IACtC,CAAC;IAED;;OAEG;IACH,IAAI;QACF,IAAI,IAAI,CAAC,KAAK,KAAK,SAAS;YAAE,OAAO,IAAI,CAAC,KAAK,CAAA;QAC/C,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE;YAAE,OAAO,CAAC,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,CAAA;QAC/C,IAAI,CAAC,KAAK,GAAG,IAAI,OAAO,CACtB,IAAI,CAAC,YAAY,EACjB,IAAI,CAAC,SAAS,EACd,IAAI,CAAC,MAAM,GAAG,CAAC,EACf,IAAI,CAAC,SAAS,CACf,CAAA;QACD,IAAI,CAAC,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC,WAAW,CAAA;QACzC,IAAI,CAAC,KAAK,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;QAC/B,IAAI,CAAC,KAAK,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAA;QACnC,OAAO,IAAI,CAAC,KAAK,CAAA;IACnB,CAAC;IAED;;OAEG;IACH,KAAK;QACH,MAAM,EAAE,GAAG,IAAI,CAAC,YAAY,CAAA;QAC5B,OAAO,IAAI,CAAC,MAAM,KAAK,SAAS,CAAC,CAAC;YAC9B,IAAI,CAAC,MAAM;YACb,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM;gBACV,IAAI,CAAC,SAAS,KAAK,OAAO;oBAC1B,IAAI,CAAC,MAAM,KAAK,CAAC;oBACjB,EAAE,CAAC,CAAC,CAAC,KAAK,EAAE;oBACZ,EAAE,CAAC,CAAC,CAAC,KAAK,EAAE;oBACZ,OAAO,EAAE,CAAC,CAAC,CAAC,KAAK,QAAQ;oBACzB,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;oBACP,OAAO,EAAE,CAAC,CAAC,CAAC,KAAK,QAAQ;oBACzB,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAA;IAChB,CAAC;IAED,sBAAsB;IACtB,sBAAsB;IACtB,mEAAmE;IACnE,sEAAsE;IACtE,6CAA6C;IAC7C;;OAEG;IACH,OAAO;QACL,MAAM,EAAE,GAAG,IAAI,CAAC,YAAY,CAAA;QAC5B,OAAO,IAAI,CAAC,QAAQ,KAAK,SAAS,CAAC,CAAC;YAChC,IAAI,CAAC,QAAQ;YACf,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ;gBACZ,IAAI,CAAC,SAAS,KAAK,OAAO;oBAC1B,IAAI,CAAC,MAAM,KAAK,CAAC;oBACjB,IAAI,CAAC,MAAM,GAAG,CAAC;oBACf,OAAO,EAAE,CAAC,CAAC,CAAC,KAAK,QAAQ;oBACzB,WAAW,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;IAChC,CAAC;IAED,sCAAsC;IACtC,kDAAkD;IAClD,oDAAoD;IACpD;;OAEG;IACH,UAAU;QACR,MAAM,EAAE,GAAG,IAAI,CAAC,YAAY,CAAA;QAC5B,OAAO,IAAI,CAAC,WAAW,KAAK,SAAS,CAAC,CAAC;YACnC,IAAI,CAAC,WAAW;YAClB,CAAC,CAAC,CAAC,IAAI,CAAC,WAAW;gBACf,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,EAAE,IAAI,EAAE,CAAC,MAAM,GAAG,CAAC,CAAC;oBAC/B,IAAI,CAAC,OAAO,EAAE;oBACd,IAAI,CAAC,KAAK,EAAE,CAAC,CAAA;IACrB,CAAC;IAED;;OAEG;IACH,IAAI;QACF,MAAM,CAAC,GAAG,IAAI,CAAC,YAAY,CAAC,CAAC,CAAC,CAAA;QAC9B,OAAO,CACH,OAAO,CAAC,KAAK,QAAQ,IAAI,IAAI,CAAC,UAAU,EAAE,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,CAChE,CAAC,CAAC;YACD,CAAC;YACH,CAAC,CAAC,EAAE,CAAA;IACR,CAAC;IAED;;;OAGG;IACH,mBAAmB;QACjB,OAAO,CAAC,CACN,IAAI,CAAC,MAAM,KAAK,CAAC;YACjB,CAAC,IAAI,CAAC,UAAU,EAAE;YAClB,CAAC,IAAI,CAAC,eAAe,CACtB,CAAA;IACH,CAAC;IAED;;OAEG;IACH,kBAAkB;QAChB,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,IAAI,CAAC,eAAe;YAClE,OAAO,KAAK,CAAA;QACd,IAAI,CAAC,eAAe,GAAG,KAAK,CAAA;QAC5B,OAAO,IAAI,CAAA;IACb,CAAC;CACF","sourcesContent":["// this is just a very light wrapper around 2 arrays with an offset index\n\nimport { GLOBSTAR } from 'minimatch'\nexport type MMPattern = string | RegExp | typeof GLOBSTAR\n\n// an array of length >= 1\nexport type PatternList = [p: MMPattern, ...rest: MMPattern[]]\nexport type UNCPatternList = [\n p0: '',\n p1: '',\n p2: string,\n p3: string,\n ...rest: MMPattern[],\n]\nexport type DrivePatternList = [p0: string, ...rest: MMPattern[]]\nexport type AbsolutePatternList = [p0: '', ...rest: MMPattern[]]\nexport type GlobList = [p: string, ...rest: string[]]\n\nconst isPatternList = (pl: MMPattern[]): pl is PatternList =>\n pl.length >= 1\nconst isGlobList = (gl: string[]): gl is GlobList => gl.length >= 1\n\n/**\n * An immutable-ish view on an array of glob parts and their parsed\n * results\n */\nexport class Pattern {\n readonly #patternList: PatternList\n readonly #globList: GlobList\n readonly #index: number\n readonly length: number\n readonly #platform: NodeJS.Platform\n #rest?: Pattern | null\n #globString?: string\n #isDrive?: boolean\n #isUNC?: boolean\n #isAbsolute?: boolean\n #followGlobstar: boolean = true\n\n constructor(\n patternList: MMPattern[],\n globList: string[],\n index: number,\n platform: NodeJS.Platform,\n ) {\n if (!isPatternList(patternList)) {\n throw new TypeError('empty pattern list')\n }\n if (!isGlobList(globList)) {\n throw new TypeError('empty glob list')\n }\n if (globList.length !== patternList.length) {\n throw new TypeError('mismatched pattern list and glob list lengths')\n }\n this.length = patternList.length\n if (index < 0 || index >= this.length) {\n throw new TypeError('index out of range')\n }\n this.#patternList = patternList\n this.#globList = globList\n this.#index = index\n this.#platform = platform\n\n // normalize root entries of absolute patterns on initial creation.\n if (this.#index === 0) {\n // c: => ['c:/']\n // C:/ => ['C:/']\n // C:/x => ['C:/', 'x']\n // //host/share => ['//host/share/']\n // //host/share/ => ['//host/share/']\n // //host/share/x => ['//host/share/', 'x']\n // /etc => ['/', 'etc']\n // / => ['/']\n if (this.isUNC()) {\n // '' / '' / 'host' / 'share'\n const [p0, p1, p2, p3, ...prest] = this.#patternList\n const [g0, g1, g2, g3, ...grest] = this.#globList\n if (prest[0] === '') {\n // ends in /\n prest.shift()\n grest.shift()\n }\n const p = [p0, p1, p2, p3, ''].join('/')\n const g = [g0, g1, g2, g3, ''].join('/')\n this.#patternList = [p, ...prest]\n this.#globList = [g, ...grest]\n this.length = this.#patternList.length\n } else if (this.isDrive() || this.isAbsolute()) {\n const [p1, ...prest] = this.#patternList\n const [g1, ...grest] = this.#globList\n if (prest[0] === '') {\n // ends in /\n prest.shift()\n grest.shift()\n }\n const p = (p1 as string) + '/'\n const g = g1 + '/'\n this.#patternList = [p, ...prest]\n this.#globList = [g, ...grest]\n this.length = this.#patternList.length\n }\n }\n }\n\n /**\n * The first entry in the parsed list of patterns\n */\n pattern(): MMPattern {\n return this.#patternList[this.#index] as MMPattern\n }\n\n /**\n * true of if pattern() returns a string\n */\n isString(): boolean {\n return typeof this.#patternList[this.#index] === 'string'\n }\n /**\n * true of if pattern() returns GLOBSTAR\n */\n isGlobstar(): boolean {\n return this.#patternList[this.#index] === GLOBSTAR\n }\n /**\n * true if pattern() returns a regexp\n */\n isRegExp(): boolean {\n return this.#patternList[this.#index] instanceof RegExp\n }\n\n /**\n * The /-joined set of glob parts that make up this pattern\n */\n globString(): string {\n return (this.#globString =\n this.#globString ||\n (this.#index === 0 ?\n this.isAbsolute() ?\n this.#globList[0] + this.#globList.slice(1).join('/')\n : this.#globList.join('/')\n : this.#globList.slice(this.#index).join('/')))\n }\n\n /**\n * true if there are more pattern parts after this one\n */\n hasMore(): boolean {\n return this.length > this.#index + 1\n }\n\n /**\n * The rest of the pattern after this part, or null if this is the end\n */\n rest(): Pattern | null {\n if (this.#rest !== undefined) return this.#rest\n if (!this.hasMore()) return (this.#rest = null)\n this.#rest = new Pattern(\n this.#patternList,\n this.#globList,\n this.#index + 1,\n this.#platform,\n )\n this.#rest.#isAbsolute = this.#isAbsolute\n this.#rest.#isUNC = this.#isUNC\n this.#rest.#isDrive = this.#isDrive\n return this.#rest\n }\n\n /**\n * true if the pattern represents a //unc/path/ on windows\n */\n isUNC(): boolean {\n const pl = this.#patternList\n return this.#isUNC !== undefined ?\n this.#isUNC\n : (this.#isUNC =\n this.#platform === 'win32' &&\n this.#index === 0 &&\n pl[0] === '' &&\n pl[1] === '' &&\n typeof pl[2] === 'string' &&\n !!pl[2] &&\n typeof pl[3] === 'string' &&\n !!pl[3])\n }\n\n // pattern like C:/...\n // split = ['C:', ...]\n // XXX: would be nice to handle patterns like `c:*` to test the cwd\n // in c: for *, but I don't know of a way to even figure out what that\n // cwd is without actually chdir'ing into it?\n /**\n * True if the pattern starts with a drive letter on Windows\n */\n isDrive(): boolean {\n const pl = this.#patternList\n return this.#isDrive !== undefined ?\n this.#isDrive\n : (this.#isDrive =\n this.#platform === 'win32' &&\n this.#index === 0 &&\n this.length > 1 &&\n typeof pl[0] === 'string' &&\n /^[a-z]:$/i.test(pl[0]))\n }\n\n // pattern = '/' or '/...' or '/x/...'\n // split = ['', ''] or ['', ...] or ['', 'x', ...]\n // Drive and UNC both considered absolute on windows\n /**\n * True if the pattern is rooted on an absolute path\n */\n isAbsolute(): boolean {\n const pl = this.#patternList\n return this.#isAbsolute !== undefined ?\n this.#isAbsolute\n : (this.#isAbsolute =\n (pl[0] === '' && pl.length > 1) ||\n this.isDrive() ||\n this.isUNC())\n }\n\n /**\n * consume the root of the pattern, and return it\n */\n root(): string {\n const p = this.#patternList[0]\n return (\n typeof p === 'string' && this.isAbsolute() && this.#index === 0\n ) ?\n p\n : ''\n }\n\n /**\n * Check to see if the current globstar pattern is allowed to follow\n * a symbolic link.\n */\n checkFollowGlobstar(): boolean {\n return !(\n this.#index === 0 ||\n !this.isGlobstar() ||\n !this.#followGlobstar\n )\n }\n\n /**\n * Mark that the current globstar pattern is following a symbolic link\n */\n markFollowGlobstar(): boolean {\n if (this.#index === 0 || !this.isGlobstar() || !this.#followGlobstar)\n return false\n this.#followGlobstar = false\n return true\n }\n}\n"]} \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/processor.d.ts b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/processor.d.ts new file mode 100644 index 0000000000000000000000000000000000000000..ccedfbf2820f7d51167574666a80785ca1b91b07 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/processor.d.ts @@ -0,0 +1,59 @@ +import { MMRegExp } from 'minimatch'; +import { Path } from 'path-scurry'; +import { Pattern } from './pattern.js'; +import { GlobWalkerOpts } from './walker.js'; +/** + * A cache of which patterns have been processed for a given Path + */ +export declare class HasWalkedCache { + store: Map>; + constructor(store?: Map>); + copy(): HasWalkedCache; + hasWalked(target: Path, pattern: Pattern): boolean | undefined; + storeWalked(target: Path, pattern: Pattern): void; +} +/** + * A record of which paths have been matched in a given walk step, + * and whether they only are considered a match if they are a directory, + * and whether their absolute or relative path should be returned. + */ +export declare class MatchRecord { + store: Map; + add(target: Path, absolute: boolean, ifDir: boolean): void; + entries(): [Path, boolean, boolean][]; +} +/** + * A collection of patterns that must be processed in a subsequent step + * for a given path. + */ +export declare class SubWalks { + store: Map; + add(target: Path, pattern: Pattern): void; + get(target: Path): Pattern[]; + entries(): [Path, Pattern[]][]; + keys(): Path[]; +} +/** + * The class that processes patterns for a given path. + * + * Handles child entry filtering, and determining whether a path's + * directory contents must be read. + */ +export declare class Processor { + hasWalkedCache: HasWalkedCache; + matches: MatchRecord; + subwalks: SubWalks; + patterns?: Pattern[]; + follow: boolean; + dot: boolean; + opts: GlobWalkerOpts; + constructor(opts: GlobWalkerOpts, hasWalkedCache?: HasWalkedCache); + processPatterns(target: Path, patterns: Pattern[]): this; + subwalkTargets(): Path[]; + child(): Processor; + filterEntries(parent: Path, entries: Path[]): Processor; + testGlobstar(e: Path, pattern: Pattern, rest: Pattern | null, absolute: boolean): void; + testRegExp(e: Path, p: MMRegExp, rest: Pattern | null, absolute: boolean): void; + testString(e: Path, p: string, rest: Pattern | null, absolute: boolean): void; +} +//# sourceMappingURL=processor.d.ts.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/processor.d.ts.map b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/processor.d.ts.map new file mode 100644 index 0000000000000000000000000000000000000000..aa266fee4a0544a0a8f7fddceb347d7d059643e0 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/processor.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"processor.d.ts","sourceRoot":"","sources":["../../src/processor.ts"],"names":[],"mappings":"AAEA,OAAO,EAAY,QAAQ,EAAE,MAAM,WAAW,CAAA;AAC9C,OAAO,EAAE,IAAI,EAAE,MAAM,aAAa,CAAA;AAClC,OAAO,EAAa,OAAO,EAAE,MAAM,cAAc,CAAA;AACjD,OAAO,EAAE,cAAc,EAAE,MAAM,aAAa,CAAA;AAE5C;;GAEG;AACH,qBAAa,cAAc;IACzB,KAAK,EAAE,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,MAAM,CAAC,CAAC,CAAA;gBACnB,KAAK,GAAE,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,MAAM,CAAC,CAAa;IAGvD,IAAI;IAGJ,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO;IAGxC,WAAW,CAAC,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO;CAM3C;AAED;;;;GAIG;AACH,qBAAa,WAAW;IACtB,KAAK,EAAE,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,CAAY;IACpC,GAAG,CAAC,MAAM,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,KAAK,EAAE,OAAO;IAMnD,OAAO,IAAI,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,EAAE;CAOtC;AAED;;;GAGG;AACH,qBAAa,QAAQ;IACnB,KAAK,EAAE,GAAG,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC,CAAY;IACvC,GAAG,CAAC,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO;IAWlC,GAAG,CAAC,MAAM,EAAE,IAAI,GAAG,OAAO,EAAE;IAS5B,OAAO,IAAI,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC,EAAE;IAG9B,IAAI,IAAI,IAAI,EAAE;CAGf;AAED;;;;;GAKG;AACH,qBAAa,SAAS;IACpB,cAAc,EAAE,cAAc,CAAA;IAC9B,OAAO,cAAoB;IAC3B,QAAQ,WAAiB;IACzB,QAAQ,CAAC,EAAE,OAAO,EAAE,CAAA;IACpB,MAAM,EAAE,OAAO,CAAA;IACf,GAAG,EAAE,OAAO,CAAA;IACZ,IAAI,EAAE,cAAc,CAAA;gBAER,IAAI,EAAE,cAAc,EAAE,cAAc,CAAC,EAAE,cAAc;IAQjE,eAAe,CAAC,MAAM,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE;IAmGjD,cAAc,IAAI,IAAI,EAAE;IAIxB,KAAK;IAQL,aAAa,CAAC,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,IAAI,EAAE,GAAG,SAAS;IAqBvD,YAAY,CACV,CAAC,EAAE,IAAI,EACP,OAAO,EAAE,OAAO,EAChB,IAAI,EAAE,OAAO,GAAG,IAAI,EACpB,QAAQ,EAAE,OAAO;IA8CnB,UAAU,CACR,CAAC,EAAE,IAAI,EACP,CAAC,EAAE,QAAQ,EACX,IAAI,EAAE,OAAO,GAAG,IAAI,EACpB,QAAQ,EAAE,OAAO;IAUnB,UAAU,CAAC,CAAC,EAAE,IAAI,EAAE,CAAC,EAAE,MAAM,EAAE,IAAI,EAAE,OAAO,GAAG,IAAI,EAAE,QAAQ,EAAE,OAAO;CASvE"} \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/processor.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/processor.js new file mode 100644 index 0000000000000000000000000000000000000000..f874892ffed0c4affb2e0c2c17895e9ebe2a9afd --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/processor.js @@ -0,0 +1,294 @@ +// synchronous utility for filtering entries and calculating subwalks +import { GLOBSTAR } from 'minimatch'; +/** + * A cache of which patterns have been processed for a given Path + */ +export class HasWalkedCache { + store; + constructor(store = new Map()) { + this.store = store; + } + copy() { + return new HasWalkedCache(new Map(this.store)); + } + hasWalked(target, pattern) { + return this.store.get(target.fullpath())?.has(pattern.globString()); + } + storeWalked(target, pattern) { + const fullpath = target.fullpath(); + const cached = this.store.get(fullpath); + if (cached) + cached.add(pattern.globString()); + else + this.store.set(fullpath, new Set([pattern.globString()])); + } +} +/** + * A record of which paths have been matched in a given walk step, + * and whether they only are considered a match if they are a directory, + * and whether their absolute or relative path should be returned. + */ +export class MatchRecord { + store = new Map(); + add(target, absolute, ifDir) { + const n = (absolute ? 2 : 0) | (ifDir ? 1 : 0); + const current = this.store.get(target); + this.store.set(target, current === undefined ? n : n & current); + } + // match, absolute, ifdir + entries() { + return [...this.store.entries()].map(([path, n]) => [ + path, + !!(n & 2), + !!(n & 1), + ]); + } +} +/** + * A collection of patterns that must be processed in a subsequent step + * for a given path. + */ +export class SubWalks { + store = new Map(); + add(target, pattern) { + if (!target.canReaddir()) { + return; + } + const subs = this.store.get(target); + if (subs) { + if (!subs.find(p => p.globString() === pattern.globString())) { + subs.push(pattern); + } + } + else + this.store.set(target, [pattern]); + } + get(target) { + const subs = this.store.get(target); + /* c8 ignore start */ + if (!subs) { + throw new Error('attempting to walk unknown path'); + } + /* c8 ignore stop */ + return subs; + } + entries() { + return this.keys().map(k => [k, this.store.get(k)]); + } + keys() { + return [...this.store.keys()].filter(t => t.canReaddir()); + } +} +/** + * The class that processes patterns for a given path. + * + * Handles child entry filtering, and determining whether a path's + * directory contents must be read. + */ +export class Processor { + hasWalkedCache; + matches = new MatchRecord(); + subwalks = new SubWalks(); + patterns; + follow; + dot; + opts; + constructor(opts, hasWalkedCache) { + this.opts = opts; + this.follow = !!opts.follow; + this.dot = !!opts.dot; + this.hasWalkedCache = + hasWalkedCache ? hasWalkedCache.copy() : new HasWalkedCache(); + } + processPatterns(target, patterns) { + this.patterns = patterns; + const processingSet = patterns.map(p => [target, p]); + // map of paths to the magic-starting subwalks they need to walk + // first item in patterns is the filter + for (let [t, pattern] of processingSet) { + this.hasWalkedCache.storeWalked(t, pattern); + const root = pattern.root(); + const absolute = pattern.isAbsolute() && this.opts.absolute !== false; + // start absolute patterns at root + if (root) { + t = t.resolve(root === '/' && this.opts.root !== undefined ? + this.opts.root + : root); + const rest = pattern.rest(); + if (!rest) { + this.matches.add(t, true, false); + continue; + } + else { + pattern = rest; + } + } + if (t.isENOENT()) + continue; + let p; + let rest; + let changed = false; + while (typeof (p = pattern.pattern()) === 'string' && + (rest = pattern.rest())) { + const c = t.resolve(p); + t = c; + pattern = rest; + changed = true; + } + p = pattern.pattern(); + rest = pattern.rest(); + if (changed) { + if (this.hasWalkedCache.hasWalked(t, pattern)) + continue; + this.hasWalkedCache.storeWalked(t, pattern); + } + // now we have either a final string for a known entry, + // more strings for an unknown entry, + // or a pattern starting with magic, mounted on t. + if (typeof p === 'string') { + // must not be final entry, otherwise we would have + // concatenated it earlier. + const ifDir = p === '..' || p === '' || p === '.'; + this.matches.add(t.resolve(p), absolute, ifDir); + continue; + } + else if (p === GLOBSTAR) { + // if no rest, match and subwalk pattern + // if rest, process rest and subwalk pattern + // if it's a symlink, but we didn't get here by way of a + // globstar match (meaning it's the first time THIS globstar + // has traversed a symlink), then we follow it. Otherwise, stop. + if (!t.isSymbolicLink() || + this.follow || + pattern.checkFollowGlobstar()) { + this.subwalks.add(t, pattern); + } + const rp = rest?.pattern(); + const rrest = rest?.rest(); + if (!rest || ((rp === '' || rp === '.') && !rrest)) { + // only HAS to be a dir if it ends in **/ or **/. + // but ending in ** will match files as well. + this.matches.add(t, absolute, rp === '' || rp === '.'); + } + else { + if (rp === '..') { + // this would mean you're matching **/.. at the fs root, + // and no thanks, I'm not gonna test that specific case. + /* c8 ignore start */ + const tp = t.parent || t; + /* c8 ignore stop */ + if (!rrest) + this.matches.add(tp, absolute, true); + else if (!this.hasWalkedCache.hasWalked(tp, rrest)) { + this.subwalks.add(tp, rrest); + } + } + } + } + else if (p instanceof RegExp) { + this.subwalks.add(t, pattern); + } + } + return this; + } + subwalkTargets() { + return this.subwalks.keys(); + } + child() { + return new Processor(this.opts, this.hasWalkedCache); + } + // return a new Processor containing the subwalks for each + // child entry, and a set of matches, and + // a hasWalkedCache that's a copy of this one + // then we're going to call + filterEntries(parent, entries) { + const patterns = this.subwalks.get(parent); + // put matches and entry walks into the results processor + const results = this.child(); + for (const e of entries) { + for (const pattern of patterns) { + const absolute = pattern.isAbsolute(); + const p = pattern.pattern(); + const rest = pattern.rest(); + if (p === GLOBSTAR) { + results.testGlobstar(e, pattern, rest, absolute); + } + else if (p instanceof RegExp) { + results.testRegExp(e, p, rest, absolute); + } + else { + results.testString(e, p, rest, absolute); + } + } + } + return results; + } + testGlobstar(e, pattern, rest, absolute) { + if (this.dot || !e.name.startsWith('.')) { + if (!pattern.hasMore()) { + this.matches.add(e, absolute, false); + } + if (e.canReaddir()) { + // if we're in follow mode or it's not a symlink, just keep + // testing the same pattern. If there's more after the globstar, + // then this symlink consumes the globstar. If not, then we can + // follow at most ONE symlink along the way, so we mark it, which + // also checks to ensure that it wasn't already marked. + if (this.follow || !e.isSymbolicLink()) { + this.subwalks.add(e, pattern); + } + else if (e.isSymbolicLink()) { + if (rest && pattern.checkFollowGlobstar()) { + this.subwalks.add(e, rest); + } + else if (pattern.markFollowGlobstar()) { + this.subwalks.add(e, pattern); + } + } + } + } + // if the NEXT thing matches this entry, then also add + // the rest. + if (rest) { + const rp = rest.pattern(); + if (typeof rp === 'string' && + // dots and empty were handled already + rp !== '..' && + rp !== '' && + rp !== '.') { + this.testString(e, rp, rest.rest(), absolute); + } + else if (rp === '..') { + /* c8 ignore start */ + const ep = e.parent || e; + /* c8 ignore stop */ + this.subwalks.add(ep, rest); + } + else if (rp instanceof RegExp) { + this.testRegExp(e, rp, rest.rest(), absolute); + } + } + } + testRegExp(e, p, rest, absolute) { + if (!p.test(e.name)) + return; + if (!rest) { + this.matches.add(e, absolute, false); + } + else { + this.subwalks.add(e, rest); + } + } + testString(e, p, rest, absolute) { + // should never happen? + if (!e.isNamed(p)) + return; + if (!rest) { + this.matches.add(e, absolute, false); + } + else { + this.subwalks.add(e, rest); + } + } +} +//# sourceMappingURL=processor.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/processor.js.map b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/processor.js.map new file mode 100644 index 0000000000000000000000000000000000000000..05a832420b8b2fa97eef4e7ab05b6612b5ec17e2 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/processor.js.map @@ -0,0 +1 @@ +{"version":3,"file":"processor.js","sourceRoot":"","sources":["../../src/processor.ts"],"names":[],"mappings":"AAAA,qEAAqE;AAErE,OAAO,EAAE,QAAQ,EAAY,MAAM,WAAW,CAAA;AAK9C;;GAEG;AACH,MAAM,OAAO,cAAc;IACzB,KAAK,CAA0B;IAC/B,YAAY,QAAkC,IAAI,GAAG,EAAE;QACrD,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;IACpB,CAAC;IACD,IAAI;QACF,OAAO,IAAI,cAAc,CAAC,IAAI,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAA;IAChD,CAAC;IACD,SAAS,CAAC,MAAY,EAAE,OAAgB;QACtC,OAAO,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC,EAAE,GAAG,CAAC,OAAO,CAAC,UAAU,EAAE,CAAC,CAAA;IACrE,CAAC;IACD,WAAW,CAAC,MAAY,EAAE,OAAgB;QACxC,MAAM,QAAQ,GAAG,MAAM,CAAC,QAAQ,EAAE,CAAA;QAClC,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAA;QACvC,IAAI,MAAM;YAAE,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,UAAU,EAAE,CAAC,CAAA;;YACvC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,QAAQ,EAAE,IAAI,GAAG,CAAC,CAAC,OAAO,CAAC,UAAU,EAAE,CAAC,CAAC,CAAC,CAAA;IAChE,CAAC;CACF;AAED;;;;GAIG;AACH,MAAM,OAAO,WAAW;IACtB,KAAK,GAAsB,IAAI,GAAG,EAAE,CAAA;IACpC,GAAG,CAAC,MAAY,EAAE,QAAiB,EAAE,KAAc;QACjD,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;QAC9C,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,CAAA;QACtC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,EAAE,OAAO,KAAK,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,CAAA;IACjE,CAAC;IACD,yBAAyB;IACzB,OAAO;QACL,OAAO,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC;YAClD,IAAI;YACJ,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;YACT,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;SACV,CAAC,CAAA;IACJ,CAAC;CACF;AAED;;;GAGG;AACH,MAAM,OAAO,QAAQ;IACnB,KAAK,GAAyB,IAAI,GAAG,EAAE,CAAA;IACvC,GAAG,CAAC,MAAY,EAAE,OAAgB;QAChC,IAAI,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,CAAC;YACzB,OAAM;QACR,CAAC;QACD,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,CAAA;QACnC,IAAI,IAAI,EAAE,CAAC;YACT,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,UAAU,EAAE,KAAK,OAAO,CAAC,UAAU,EAAE,CAAC,EAAE,CAAC;gBAC7D,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;YACpB,CAAC;QACH,CAAC;;YAAM,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,EAAE,CAAC,OAAO,CAAC,CAAC,CAAA;IAC1C,CAAC;IACD,GAAG,CAAC,MAAY;QACd,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,MAAM,CAAC,CAAA;QACnC,qBAAqB;QACrB,IAAI,CAAC,IAAI,EAAE,CAAC;YACV,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAA;QACpD,CAAC;QACD,oBAAoB;QACpB,OAAO,IAAI,CAAA;IACb,CAAC;IACD,OAAO;QACL,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAc,CAAC,CAAC,CAAA;IAClE,CAAC;IACD,IAAI;QACF,OAAO,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,UAAU,EAAE,CAAC,CAAA;IAC3D,CAAC;CACF;AAED;;;;;GAKG;AACH,MAAM,OAAO,SAAS;IACpB,cAAc,CAAgB;IAC9B,OAAO,GAAG,IAAI,WAAW,EAAE,CAAA;IAC3B,QAAQ,GAAG,IAAI,QAAQ,EAAE,CAAA;IACzB,QAAQ,CAAY;IACpB,MAAM,CAAS;IACf,GAAG,CAAS;IACZ,IAAI,CAAgB;IAEpB,YAAY,IAAoB,EAAE,cAA+B;QAC/D,IAAI,CAAC,IAAI,GAAG,IAAI,CAAA;QAChB,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,IAAI,CAAC,MAAM,CAAA;QAC3B,IAAI,CAAC,GAAG,GAAG,CAAC,CAAC,IAAI,CAAC,GAAG,CAAA;QACrB,IAAI,CAAC,cAAc;YACjB,cAAc,CAAC,CAAC,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,IAAI,cAAc,EAAE,CAAA;IACjE,CAAC;IAED,eAAe,CAAC,MAAY,EAAE,QAAmB;QAC/C,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;QACxB,MAAM,aAAa,GAAsB,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,CAAA;QAEvE,gEAAgE;QAChE,uCAAuC;QAEvC,KAAK,IAAI,CAAC,CAAC,EAAE,OAAO,CAAC,IAAI,aAAa,EAAE,CAAC;YACvC,IAAI,CAAC,cAAc,CAAC,WAAW,CAAC,CAAC,EAAE,OAAO,CAAC,CAAA;YAE3C,MAAM,IAAI,GAAG,OAAO,CAAC,IAAI,EAAE,CAAA;YAC3B,MAAM,QAAQ,GAAG,OAAO,CAAC,UAAU,EAAE,IAAI,IAAI,CAAC,IAAI,CAAC,QAAQ,KAAK,KAAK,CAAA;YAErE,kCAAkC;YAClC,IAAI,IAAI,EAAE,CAAC;gBACT,CAAC,GAAG,CAAC,CAAC,OAAO,CACX,IAAI,KAAK,GAAG,IAAI,IAAI,CAAC,IAAI,CAAC,IAAI,KAAK,SAAS,CAAC,CAAC;oBAC5C,IAAI,CAAC,IAAI,CAAC,IAAI;oBAChB,CAAC,CAAC,IAAI,CACP,CAAA;gBACD,MAAM,IAAI,GAAG,OAAO,CAAC,IAAI,EAAE,CAAA;gBAC3B,IAAI,CAAC,IAAI,EAAE,CAAC;oBACV,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,EAAE,KAAK,CAAC,CAAA;oBAChC,SAAQ;gBACV,CAAC;qBAAM,CAAC;oBACN,OAAO,GAAG,IAAI,CAAA;gBAChB,CAAC;YACH,CAAC;YAED,IAAI,CAAC,CAAC,QAAQ,EAAE;gBAAE,SAAQ;YAE1B,IAAI,CAAY,CAAA;YAChB,IAAI,IAAoB,CAAA;YACxB,IAAI,OAAO,GAAG,KAAK,CAAA;YACnB,OACE,OAAO,CAAC,CAAC,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC,KAAK,QAAQ;gBAC3C,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,EAAE,CAAC,EACvB,CAAC;gBACD,MAAM,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAA;gBACtB,CAAC,GAAG,CAAC,CAAA;gBACL,OAAO,GAAG,IAAI,CAAA;gBACd,OAAO,GAAG,IAAI,CAAA;YAChB,CAAC;YACD,CAAC,GAAG,OAAO,CAAC,OAAO,EAAE,CAAA;YACrB,IAAI,GAAG,OAAO,CAAC,IAAI,EAAE,CAAA;YACrB,IAAI,OAAO,EAAE,CAAC;gBACZ,IAAI,IAAI,CAAC,cAAc,CAAC,SAAS,CAAC,CAAC,EAAE,OAAO,CAAC;oBAAE,SAAQ;gBACvD,IAAI,CAAC,cAAc,CAAC,WAAW,CAAC,CAAC,EAAE,OAAO,CAAC,CAAA;YAC7C,CAAC;YAED,uDAAuD;YACvD,qCAAqC;YACrC,kDAAkD;YAClD,IAAI,OAAO,CAAC,KAAK,QAAQ,EAAE,CAAC;gBAC1B,mDAAmD;gBACnD,2BAA2B;gBAC3B,MAAM,KAAK,GAAG,CAAC,KAAK,IAAI,IAAI,CAAC,KAAK,EAAE,IAAI,CAAC,KAAK,GAAG,CAAA;gBACjD,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAA;gBAC/C,SAAQ;YACV,CAAC;iBAAM,IAAI,CAAC,KAAK,QAAQ,EAAE,CAAC;gBAC1B,wCAAwC;gBACxC,4CAA4C;gBAC5C,wDAAwD;gBACxD,4DAA4D;gBAC5D,gEAAgE;gBAChE,IACE,CAAC,CAAC,CAAC,cAAc,EAAE;oBACnB,IAAI,CAAC,MAAM;oBACX,OAAO,CAAC,mBAAmB,EAAE,EAC7B,CAAC;oBACD,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,CAAC,CAAA;gBAC/B,CAAC;gBACD,MAAM,EAAE,GAAG,IAAI,EAAE,OAAO,EAAE,CAAA;gBAC1B,MAAM,KAAK,GAAG,IAAI,EAAE,IAAI,EAAE,CAAA;gBAC1B,IAAI,CAAC,IAAI,IAAI,CAAC,CAAC,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC;oBACnD,iDAAiD;oBACjD,6CAA6C;oBAC7C,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,EAAE,QAAQ,EAAE,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,GAAG,CAAC,CAAA;gBACxD,CAAC;qBAAM,CAAC;oBACN,IAAI,EAAE,KAAK,IAAI,EAAE,CAAC;wBAChB,wDAAwD;wBACxD,wDAAwD;wBACxD,qBAAqB;wBACrB,MAAM,EAAE,GAAG,CAAC,CAAC,MAAM,IAAI,CAAC,CAAA;wBACxB,oBAAoB;wBACpB,IAAI,CAAC,KAAK;4BAAE,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE,EAAE,QAAQ,EAAE,IAAI,CAAC,CAAA;6BAC3C,IAAI,CAAC,IAAI,CAAC,cAAc,CAAC,SAAS,CAAC,EAAE,EAAE,KAAK,CAAC,EAAE,CAAC;4BACnD,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE,EAAE,KAAK,CAAC,CAAA;wBAC9B,CAAC;oBACH,CAAC;gBACH,CAAC;YACH,CAAC;iBAAM,IAAI,CAAC,YAAY,MAAM,EAAE,CAAC;gBAC/B,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,CAAC,CAAA;YAC/B,CAAC;QACH,CAAC;QAED,OAAO,IAAI,CAAA;IACb,CAAC;IAED,cAAc;QACZ,OAAO,IAAI,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAA;IAC7B,CAAC;IAED,KAAK;QACH,OAAO,IAAI,SAAS,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,cAAc,CAAC,CAAA;IACtD,CAAC;IAED,0DAA0D;IAC1D,yCAAyC;IACzC,6CAA6C;IAC7C,2BAA2B;IAC3B,aAAa,CAAC,MAAY,EAAE,OAAe;QACzC,MAAM,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,MAAM,CAAC,CAAA;QAC1C,yDAAyD;QACzD,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,EAAE,CAAA;QAC5B,KAAK,MAAM,CAAC,IAAI,OAAO,EAAE,CAAC;YACxB,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE,CAAC;gBAC/B,MAAM,QAAQ,GAAG,OAAO,CAAC,UAAU,EAAE,CAAA;gBACrC,MAAM,CAAC,GAAG,OAAO,CAAC,OAAO,EAAE,CAAA;gBAC3B,MAAM,IAAI,GAAG,OAAO,CAAC,IAAI,EAAE,CAAA;gBAC3B,IAAI,CAAC,KAAK,QAAQ,EAAE,CAAC;oBACnB,OAAO,CAAC,YAAY,CAAC,CAAC,EAAE,OAAO,EAAE,IAAI,EAAE,QAAQ,CAAC,CAAA;gBAClD,CAAC;qBAAM,IAAI,CAAC,YAAY,MAAM,EAAE,CAAC;oBAC/B,OAAO,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,EAAE,QAAQ,CAAC,CAAA;gBAC1C,CAAC;qBAAM,CAAC;oBACN,OAAO,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,EAAE,QAAQ,CAAC,CAAA;gBAC1C,CAAC;YACH,CAAC;QACH,CAAC;QACD,OAAO,OAAO,CAAA;IAChB,CAAC;IAED,YAAY,CACV,CAAO,EACP,OAAgB,EAChB,IAAoB,EACpB,QAAiB;QAEjB,IAAI,IAAI,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE,CAAC;YACxC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE,EAAE,CAAC;gBACvB,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAA;YACtC,CAAC;YACD,IAAI,CAAC,CAAC,UAAU,EAAE,EAAE,CAAC;gBACnB,2DAA2D;gBAC3D,gEAAgE;gBAChE,+DAA+D;gBAC/D,iEAAiE;gBACjE,uDAAuD;gBACvD,IAAI,IAAI,CAAC,MAAM,IAAI,CAAC,CAAC,CAAC,cAAc,EAAE,EAAE,CAAC;oBACvC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,CAAC,CAAA;gBAC/B,CAAC;qBAAM,IAAI,CAAC,CAAC,cAAc,EAAE,EAAE,CAAC;oBAC9B,IAAI,IAAI,IAAI,OAAO,CAAC,mBAAmB,EAAE,EAAE,CAAC;wBAC1C,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,CAAA;oBAC5B,CAAC;yBAAM,IAAI,OAAO,CAAC,kBAAkB,EAAE,EAAE,CAAC;wBACxC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,CAAC,CAAA;oBAC/B,CAAC;gBACH,CAAC;YACH,CAAC;QACH,CAAC;QACD,sDAAsD;QACtD,YAAY;QACZ,IAAI,IAAI,EAAE,CAAC;YACT,MAAM,EAAE,GAAG,IAAI,CAAC,OAAO,EAAE,CAAA;YACzB,IACE,OAAO,EAAE,KAAK,QAAQ;gBACtB,sCAAsC;gBACtC,EAAE,KAAK,IAAI;gBACX,EAAE,KAAK,EAAE;gBACT,EAAE,KAAK,GAAG,EACV,CAAC;gBACD,IAAI,CAAC,UAAU,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,IAAI,EAAE,EAAE,QAAQ,CAAC,CAAA;YAC/C,CAAC;iBAAM,IAAI,EAAE,KAAK,IAAI,EAAE,CAAC;gBACvB,qBAAqB;gBACrB,MAAM,EAAE,GAAG,CAAC,CAAC,MAAM,IAAI,CAAC,CAAA;gBACxB,oBAAoB;gBACpB,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE,EAAE,IAAI,CAAC,CAAA;YAC7B,CAAC;iBAAM,IAAI,EAAE,YAAY,MAAM,EAAE,CAAC;gBAChC,IAAI,CAAC,UAAU,CAAC,CAAC,EAAE,EAAE,EAAE,IAAI,CAAC,IAAI,EAAE,EAAE,QAAQ,CAAC,CAAA;YAC/C,CAAC;QACH,CAAC;IACH,CAAC;IAED,UAAU,CACR,CAAO,EACP,CAAW,EACX,IAAoB,EACpB,QAAiB;QAEjB,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC;YAAE,OAAM;QAC3B,IAAI,CAAC,IAAI,EAAE,CAAC;YACV,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAA;QACtC,CAAC;aAAM,CAAC;YACN,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,CAAA;QAC5B,CAAC;IACH,CAAC;IAED,UAAU,CAAC,CAAO,EAAE,CAAS,EAAE,IAAoB,EAAE,QAAiB;QACpE,uBAAuB;QACvB,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC;YAAE,OAAM;QACzB,IAAI,CAAC,IAAI,EAAE,CAAC;YACV,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAA;QACtC,CAAC;aAAM,CAAC;YACN,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,CAAA;QAC5B,CAAC;IACH,CAAC;CACF","sourcesContent":["// synchronous utility for filtering entries and calculating subwalks\n\nimport { GLOBSTAR, MMRegExp } from 'minimatch'\nimport { Path } from 'path-scurry'\nimport { MMPattern, Pattern } from './pattern.js'\nimport { GlobWalkerOpts } from './walker.js'\n\n/**\n * A cache of which patterns have been processed for a given Path\n */\nexport class HasWalkedCache {\n store: Map>\n constructor(store: Map> = new Map()) {\n this.store = store\n }\n copy() {\n return new HasWalkedCache(new Map(this.store))\n }\n hasWalked(target: Path, pattern: Pattern) {\n return this.store.get(target.fullpath())?.has(pattern.globString())\n }\n storeWalked(target: Path, pattern: Pattern) {\n const fullpath = target.fullpath()\n const cached = this.store.get(fullpath)\n if (cached) cached.add(pattern.globString())\n else this.store.set(fullpath, new Set([pattern.globString()]))\n }\n}\n\n/**\n * A record of which paths have been matched in a given walk step,\n * and whether they only are considered a match if they are a directory,\n * and whether their absolute or relative path should be returned.\n */\nexport class MatchRecord {\n store: Map = new Map()\n add(target: Path, absolute: boolean, ifDir: boolean) {\n const n = (absolute ? 2 : 0) | (ifDir ? 1 : 0)\n const current = this.store.get(target)\n this.store.set(target, current === undefined ? n : n & current)\n }\n // match, absolute, ifdir\n entries(): [Path, boolean, boolean][] {\n return [...this.store.entries()].map(([path, n]) => [\n path,\n !!(n & 2),\n !!(n & 1),\n ])\n }\n}\n\n/**\n * A collection of patterns that must be processed in a subsequent step\n * for a given path.\n */\nexport class SubWalks {\n store: Map = new Map()\n add(target: Path, pattern: Pattern) {\n if (!target.canReaddir()) {\n return\n }\n const subs = this.store.get(target)\n if (subs) {\n if (!subs.find(p => p.globString() === pattern.globString())) {\n subs.push(pattern)\n }\n } else this.store.set(target, [pattern])\n }\n get(target: Path): Pattern[] {\n const subs = this.store.get(target)\n /* c8 ignore start */\n if (!subs) {\n throw new Error('attempting to walk unknown path')\n }\n /* c8 ignore stop */\n return subs\n }\n entries(): [Path, Pattern[]][] {\n return this.keys().map(k => [k, this.store.get(k) as Pattern[]])\n }\n keys(): Path[] {\n return [...this.store.keys()].filter(t => t.canReaddir())\n }\n}\n\n/**\n * The class that processes patterns for a given path.\n *\n * Handles child entry filtering, and determining whether a path's\n * directory contents must be read.\n */\nexport class Processor {\n hasWalkedCache: HasWalkedCache\n matches = new MatchRecord()\n subwalks = new SubWalks()\n patterns?: Pattern[]\n follow: boolean\n dot: boolean\n opts: GlobWalkerOpts\n\n constructor(opts: GlobWalkerOpts, hasWalkedCache?: HasWalkedCache) {\n this.opts = opts\n this.follow = !!opts.follow\n this.dot = !!opts.dot\n this.hasWalkedCache =\n hasWalkedCache ? hasWalkedCache.copy() : new HasWalkedCache()\n }\n\n processPatterns(target: Path, patterns: Pattern[]) {\n this.patterns = patterns\n const processingSet: [Path, Pattern][] = patterns.map(p => [target, p])\n\n // map of paths to the magic-starting subwalks they need to walk\n // first item in patterns is the filter\n\n for (let [t, pattern] of processingSet) {\n this.hasWalkedCache.storeWalked(t, pattern)\n\n const root = pattern.root()\n const absolute = pattern.isAbsolute() && this.opts.absolute !== false\n\n // start absolute patterns at root\n if (root) {\n t = t.resolve(\n root === '/' && this.opts.root !== undefined ?\n this.opts.root\n : root,\n )\n const rest = pattern.rest()\n if (!rest) {\n this.matches.add(t, true, false)\n continue\n } else {\n pattern = rest\n }\n }\n\n if (t.isENOENT()) continue\n\n let p: MMPattern\n let rest: Pattern | null\n let changed = false\n while (\n typeof (p = pattern.pattern()) === 'string' &&\n (rest = pattern.rest())\n ) {\n const c = t.resolve(p)\n t = c\n pattern = rest\n changed = true\n }\n p = pattern.pattern()\n rest = pattern.rest()\n if (changed) {\n if (this.hasWalkedCache.hasWalked(t, pattern)) continue\n this.hasWalkedCache.storeWalked(t, pattern)\n }\n\n // now we have either a final string for a known entry,\n // more strings for an unknown entry,\n // or a pattern starting with magic, mounted on t.\n if (typeof p === 'string') {\n // must not be final entry, otherwise we would have\n // concatenated it earlier.\n const ifDir = p === '..' || p === '' || p === '.'\n this.matches.add(t.resolve(p), absolute, ifDir)\n continue\n } else if (p === GLOBSTAR) {\n // if no rest, match and subwalk pattern\n // if rest, process rest and subwalk pattern\n // if it's a symlink, but we didn't get here by way of a\n // globstar match (meaning it's the first time THIS globstar\n // has traversed a symlink), then we follow it. Otherwise, stop.\n if (\n !t.isSymbolicLink() ||\n this.follow ||\n pattern.checkFollowGlobstar()\n ) {\n this.subwalks.add(t, pattern)\n }\n const rp = rest?.pattern()\n const rrest = rest?.rest()\n if (!rest || ((rp === '' || rp === '.') && !rrest)) {\n // only HAS to be a dir if it ends in **/ or **/.\n // but ending in ** will match files as well.\n this.matches.add(t, absolute, rp === '' || rp === '.')\n } else {\n if (rp === '..') {\n // this would mean you're matching **/.. at the fs root,\n // and no thanks, I'm not gonna test that specific case.\n /* c8 ignore start */\n const tp = t.parent || t\n /* c8 ignore stop */\n if (!rrest) this.matches.add(tp, absolute, true)\n else if (!this.hasWalkedCache.hasWalked(tp, rrest)) {\n this.subwalks.add(tp, rrest)\n }\n }\n }\n } else if (p instanceof RegExp) {\n this.subwalks.add(t, pattern)\n }\n }\n\n return this\n }\n\n subwalkTargets(): Path[] {\n return this.subwalks.keys()\n }\n\n child() {\n return new Processor(this.opts, this.hasWalkedCache)\n }\n\n // return a new Processor containing the subwalks for each\n // child entry, and a set of matches, and\n // a hasWalkedCache that's a copy of this one\n // then we're going to call\n filterEntries(parent: Path, entries: Path[]): Processor {\n const patterns = this.subwalks.get(parent)\n // put matches and entry walks into the results processor\n const results = this.child()\n for (const e of entries) {\n for (const pattern of patterns) {\n const absolute = pattern.isAbsolute()\n const p = pattern.pattern()\n const rest = pattern.rest()\n if (p === GLOBSTAR) {\n results.testGlobstar(e, pattern, rest, absolute)\n } else if (p instanceof RegExp) {\n results.testRegExp(e, p, rest, absolute)\n } else {\n results.testString(e, p, rest, absolute)\n }\n }\n }\n return results\n }\n\n testGlobstar(\n e: Path,\n pattern: Pattern,\n rest: Pattern | null,\n absolute: boolean,\n ) {\n if (this.dot || !e.name.startsWith('.')) {\n if (!pattern.hasMore()) {\n this.matches.add(e, absolute, false)\n }\n if (e.canReaddir()) {\n // if we're in follow mode or it's not a symlink, just keep\n // testing the same pattern. If there's more after the globstar,\n // then this symlink consumes the globstar. If not, then we can\n // follow at most ONE symlink along the way, so we mark it, which\n // also checks to ensure that it wasn't already marked.\n if (this.follow || !e.isSymbolicLink()) {\n this.subwalks.add(e, pattern)\n } else if (e.isSymbolicLink()) {\n if (rest && pattern.checkFollowGlobstar()) {\n this.subwalks.add(e, rest)\n } else if (pattern.markFollowGlobstar()) {\n this.subwalks.add(e, pattern)\n }\n }\n }\n }\n // if the NEXT thing matches this entry, then also add\n // the rest.\n if (rest) {\n const rp = rest.pattern()\n if (\n typeof rp === 'string' &&\n // dots and empty were handled already\n rp !== '..' &&\n rp !== '' &&\n rp !== '.'\n ) {\n this.testString(e, rp, rest.rest(), absolute)\n } else if (rp === '..') {\n /* c8 ignore start */\n const ep = e.parent || e\n /* c8 ignore stop */\n this.subwalks.add(ep, rest)\n } else if (rp instanceof RegExp) {\n this.testRegExp(e, rp, rest.rest(), absolute)\n }\n }\n }\n\n testRegExp(\n e: Path,\n p: MMRegExp,\n rest: Pattern | null,\n absolute: boolean,\n ) {\n if (!p.test(e.name)) return\n if (!rest) {\n this.matches.add(e, absolute, false)\n } else {\n this.subwalks.add(e, rest)\n }\n }\n\n testString(e: Path, p: string, rest: Pattern | null, absolute: boolean) {\n // should never happen?\n if (!e.isNamed(p)) return\n if (!rest) {\n this.matches.add(e, absolute, false)\n } else {\n this.subwalks.add(e, rest)\n }\n }\n}\n"]} \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/walker.d.ts b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/walker.d.ts new file mode 100644 index 0000000000000000000000000000000000000000..499c8f4933857a8720e77b7b96d9842bbd3248ae --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/walker.d.ts @@ -0,0 +1,97 @@ +/** + * Single-use utility classes to provide functionality to the {@link Glob} + * methods. + * + * @module + */ +import { Minipass } from 'minipass'; +import { Path } from 'path-scurry'; +import { IgnoreLike } from './ignore.js'; +import { Pattern } from './pattern.js'; +import { Processor } from './processor.js'; +export interface GlobWalkerOpts { + absolute?: boolean; + allowWindowsEscape?: boolean; + cwd?: string | URL; + dot?: boolean; + dotRelative?: boolean; + follow?: boolean; + ignore?: string | string[] | IgnoreLike; + mark?: boolean; + matchBase?: boolean; + maxDepth?: number; + nobrace?: boolean; + nocase?: boolean; + nodir?: boolean; + noext?: boolean; + noglobstar?: boolean; + platform?: NodeJS.Platform; + posix?: boolean; + realpath?: boolean; + root?: string; + stat?: boolean; + signal?: AbortSignal; + windowsPathsNoEscape?: boolean; + withFileTypes?: boolean; + includeChildMatches?: boolean; +} +export type GWOFileTypesTrue = GlobWalkerOpts & { + withFileTypes: true; +}; +export type GWOFileTypesFalse = GlobWalkerOpts & { + withFileTypes: false; +}; +export type GWOFileTypesUnset = GlobWalkerOpts & { + withFileTypes?: undefined; +}; +export type Result = O extends GWOFileTypesTrue ? Path : O extends GWOFileTypesFalse ? string : O extends GWOFileTypesUnset ? string : Path | string; +export type Matches = O extends GWOFileTypesTrue ? Set : O extends GWOFileTypesFalse ? Set : O extends GWOFileTypesUnset ? Set : Set; +export type MatchStream = Minipass, Result>; +/** + * basic walking utilities that all the glob walker types use + */ +export declare abstract class GlobUtil { + #private; + path: Path; + patterns: Pattern[]; + opts: O; + seen: Set; + paused: boolean; + aborted: boolean; + signal?: AbortSignal; + maxDepth: number; + includeChildMatches: boolean; + constructor(patterns: Pattern[], path: Path, opts: O); + pause(): void; + resume(): void; + onResume(fn: () => any): void; + matchCheck(e: Path, ifDir: boolean): Promise; + matchCheckTest(e: Path | undefined, ifDir: boolean): Path | undefined; + matchCheckSync(e: Path, ifDir: boolean): Path | undefined; + abstract matchEmit(p: Result): void; + abstract matchEmit(p: string | Path): void; + matchFinish(e: Path, absolute: boolean): void; + match(e: Path, absolute: boolean, ifDir: boolean): Promise; + matchSync(e: Path, absolute: boolean, ifDir: boolean): void; + walkCB(target: Path, patterns: Pattern[], cb: () => any): void; + walkCB2(target: Path, patterns: Pattern[], processor: Processor, cb: () => any): any; + walkCB3(target: Path, entries: Path[], processor: Processor, cb: () => any): void; + walkCBSync(target: Path, patterns: Pattern[], cb: () => any): void; + walkCB2Sync(target: Path, patterns: Pattern[], processor: Processor, cb: () => any): any; + walkCB3Sync(target: Path, entries: Path[], processor: Processor, cb: () => any): void; +} +export declare class GlobWalker extends GlobUtil { + matches: Set>; + constructor(patterns: Pattern[], path: Path, opts: O); + matchEmit(e: Result): void; + walk(): Promise>>; + walkSync(): Set>; +} +export declare class GlobStream extends GlobUtil { + results: Minipass, Result>; + constructor(patterns: Pattern[], path: Path, opts: O); + matchEmit(e: Result): void; + stream(): MatchStream; + streamSync(): MatchStream; +} +//# sourceMappingURL=walker.d.ts.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/walker.d.ts.map b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/walker.d.ts.map new file mode 100644 index 0000000000000000000000000000000000000000..769957bd59bb1ce67cdf28134be59ca86946c405 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/walker.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"walker.d.ts","sourceRoot":"","sources":["../../src/walker.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AACH,OAAO,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAA;AACnC,OAAO,EAAE,IAAI,EAAE,MAAM,aAAa,CAAA;AAClC,OAAO,EAAU,UAAU,EAAE,MAAM,aAAa,CAAA;AAOhD,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAA;AACtC,OAAO,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAA;AAE1C,MAAM,WAAW,cAAc;IAC7B,QAAQ,CAAC,EAAE,OAAO,CAAA;IAClB,kBAAkB,CAAC,EAAE,OAAO,CAAA;IAC5B,GAAG,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;IAClB,GAAG,CAAC,EAAE,OAAO,CAAA;IACb,WAAW,CAAC,EAAE,OAAO,CAAA;IACrB,MAAM,CAAC,EAAE,OAAO,CAAA;IAChB,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,UAAU,CAAA;IACvC,IAAI,CAAC,EAAE,OAAO,CAAA;IACd,SAAS,CAAC,EAAE,OAAO,CAAA;IAGnB,QAAQ,CAAC,EAAE,MAAM,CAAA;IACjB,OAAO,CAAC,EAAE,OAAO,CAAA;IACjB,MAAM,CAAC,EAAE,OAAO,CAAA;IAChB,KAAK,CAAC,EAAE,OAAO,CAAA;IACf,KAAK,CAAC,EAAE,OAAO,CAAA;IACf,UAAU,CAAC,EAAE,OAAO,CAAA;IACpB,QAAQ,CAAC,EAAE,MAAM,CAAC,QAAQ,CAAA;IAC1B,KAAK,CAAC,EAAE,OAAO,CAAA;IACf,QAAQ,CAAC,EAAE,OAAO,CAAA;IAClB,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,IAAI,CAAC,EAAE,OAAO,CAAA;IACd,MAAM,CAAC,EAAE,WAAW,CAAA;IACpB,oBAAoB,CAAC,EAAE,OAAO,CAAA;IAC9B,aAAa,CAAC,EAAE,OAAO,CAAA;IACvB,mBAAmB,CAAC,EAAE,OAAO,CAAA;CAC9B;AAED,MAAM,MAAM,gBAAgB,GAAG,cAAc,GAAG;IAC9C,aAAa,EAAE,IAAI,CAAA;CACpB,CAAA;AACD,MAAM,MAAM,iBAAiB,GAAG,cAAc,GAAG;IAC/C,aAAa,EAAE,KAAK,CAAA;CACrB,CAAA;AACD,MAAM,MAAM,iBAAiB,GAAG,cAAc,GAAG;IAC/C,aAAa,CAAC,EAAE,SAAS,CAAA;CAC1B,CAAA;AAED,MAAM,MAAM,MAAM,CAAC,CAAC,SAAS,cAAc,IACzC,CAAC,SAAS,gBAAgB,GAAG,IAAI,GAC/B,CAAC,SAAS,iBAAiB,GAAG,MAAM,GACpC,CAAC,SAAS,iBAAiB,GAAG,MAAM,GACpC,IAAI,GAAG,MAAM,CAAA;AAEjB,MAAM,MAAM,OAAO,CAAC,CAAC,SAAS,cAAc,IAC1C,CAAC,SAAS,gBAAgB,GAAG,GAAG,CAAC,IAAI,CAAC,GACpC,CAAC,SAAS,iBAAiB,GAAG,GAAG,CAAC,MAAM,CAAC,GACzC,CAAC,SAAS,iBAAiB,GAAG,GAAG,CAAC,MAAM,CAAC,GACzC,GAAG,CAAC,IAAI,GAAG,MAAM,CAAC,CAAA;AAEtB,MAAM,MAAM,WAAW,CAAC,CAAC,SAAS,cAAc,IAAI,QAAQ,CAC1D,MAAM,CAAC,CAAC,CAAC,EACT,MAAM,CAAC,CAAC,CAAC,CACV,CAAA;AAUD;;GAEG;AACH,8BAAsB,QAAQ,CAAC,CAAC,SAAS,cAAc,GAAG,cAAc;;IACtE,IAAI,EAAE,IAAI,CAAA;IACV,QAAQ,EAAE,OAAO,EAAE,CAAA;IACnB,IAAI,EAAE,CAAC,CAAA;IACP,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,CAAkB;IACjC,MAAM,EAAE,OAAO,CAAQ;IACvB,OAAO,EAAE,OAAO,CAAQ;IAIxB,MAAM,CAAC,EAAE,WAAW,CAAA;IACpB,QAAQ,EAAE,MAAM,CAAA;IAChB,mBAAmB,EAAE,OAAO,CAAA;gBAEhB,QAAQ,EAAE,OAAO,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC;IAsCpD,KAAK;IAGL,MAAM;IAUN,QAAQ,CAAC,EAAE,EAAE,MAAM,GAAG;IAahB,UAAU,CAAC,CAAC,EAAE,IAAI,EAAE,KAAK,EAAE,OAAO,GAAG,OAAO,CAAC,IAAI,GAAG,SAAS,CAAC;IAqBpE,cAAc,CAAC,CAAC,EAAE,IAAI,GAAG,SAAS,EAAE,KAAK,EAAE,OAAO,GAAG,IAAI,GAAG,SAAS;IAgBrE,cAAc,CAAC,CAAC,EAAE,IAAI,EAAE,KAAK,EAAE,OAAO,GAAG,IAAI,GAAG,SAAS;IAmBzD,QAAQ,CAAC,SAAS,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,GAAG,IAAI;IACtC,QAAQ,CAAC,SAAS,CAAC,CAAC,EAAE,MAAM,GAAG,IAAI,GAAG,IAAI;IAE1C,WAAW,CAAC,CAAC,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO;IA2BhC,KAAK,CAAC,CAAC,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,KAAK,EAAE,OAAO,GAAG,OAAO,CAAC,IAAI,CAAC;IAKtE,SAAS,CAAC,CAAC,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,KAAK,EAAE,OAAO,GAAG,IAAI;IAK3D,MAAM,CAAC,MAAM,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,EAAE,EAAE,EAAE,MAAM,GAAG;IAOvD,OAAO,CACL,MAAM,EAAE,IAAI,EACZ,QAAQ,EAAE,OAAO,EAAE,EACnB,SAAS,EAAE,SAAS,EACpB,EAAE,EAAE,MAAM,GAAG;IA2Cf,OAAO,CACL,MAAM,EAAE,IAAI,EACZ,OAAO,EAAE,IAAI,EAAE,EACf,SAAS,EAAE,SAAS,EACpB,EAAE,EAAE,MAAM,GAAG;IAsBf,UAAU,CAAC,MAAM,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,EAAE,EAAE,EAAE,MAAM,GAAG;IAO3D,WAAW,CACT,MAAM,EAAE,IAAI,EACZ,QAAQ,EAAE,OAAO,EAAE,EACnB,SAAS,EAAE,SAAS,EACpB,EAAE,EAAE,MAAM,GAAG;IAqCf,WAAW,CACT,MAAM,EAAE,IAAI,EACZ,OAAO,EAAE,IAAI,EAAE,EACf,SAAS,EAAE,SAAS,EACpB,EAAE,EAAE,MAAM,GAAG;CAoBhB;AAED,qBAAa,UAAU,CACrB,CAAC,SAAS,cAAc,GAAG,cAAc,CACzC,SAAQ,QAAQ,CAAC,CAAC,CAAC;IACnB,OAAO,iBAAuB;gBAElB,QAAQ,EAAE,OAAO,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC;IAIpD,SAAS,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,GAAG,IAAI;IAIvB,IAAI,IAAI,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;IAiBrC,QAAQ,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;CAW3B;AAED,qBAAa,UAAU,CACrB,CAAC,SAAS,cAAc,GAAG,cAAc,CACzC,SAAQ,QAAQ,CAAC,CAAC,CAAC;IACnB,OAAO,EAAE,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,CAAA;gBAE3B,QAAQ,EAAE,OAAO,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC;IAUpD,SAAS,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,GAAG,IAAI;IAK7B,MAAM,IAAI,WAAW,CAAC,CAAC,CAAC;IAYxB,UAAU,IAAI,WAAW,CAAC,CAAC,CAAC;CAO7B"} \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/walker.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/walker.js new file mode 100644 index 0000000000000000000000000000000000000000..3d68196c4f175f6b314b444e409f9b8a77164963 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/walker.js @@ -0,0 +1,381 @@ +/** + * Single-use utility classes to provide functionality to the {@link Glob} + * methods. + * + * @module + */ +import { Minipass } from 'minipass'; +import { Ignore } from './ignore.js'; +import { Processor } from './processor.js'; +const makeIgnore = (ignore, opts) => typeof ignore === 'string' ? new Ignore([ignore], opts) + : Array.isArray(ignore) ? new Ignore(ignore, opts) + : ignore; +/** + * basic walking utilities that all the glob walker types use + */ +export class GlobUtil { + path; + patterns; + opts; + seen = new Set(); + paused = false; + aborted = false; + #onResume = []; + #ignore; + #sep; + signal; + maxDepth; + includeChildMatches; + constructor(patterns, path, opts) { + this.patterns = patterns; + this.path = path; + this.opts = opts; + this.#sep = !opts.posix && opts.platform === 'win32' ? '\\' : '/'; + this.includeChildMatches = opts.includeChildMatches !== false; + if (opts.ignore || !this.includeChildMatches) { + this.#ignore = makeIgnore(opts.ignore ?? [], opts); + if (!this.includeChildMatches && + typeof this.#ignore.add !== 'function') { + const m = 'cannot ignore child matches, ignore lacks add() method.'; + throw new Error(m); + } + } + // ignore, always set with maxDepth, but it's optional on the + // GlobOptions type + /* c8 ignore start */ + this.maxDepth = opts.maxDepth || Infinity; + /* c8 ignore stop */ + if (opts.signal) { + this.signal = opts.signal; + this.signal.addEventListener('abort', () => { + this.#onResume.length = 0; + }); + } + } + #ignored(path) { + return this.seen.has(path) || !!this.#ignore?.ignored?.(path); + } + #childrenIgnored(path) { + return !!this.#ignore?.childrenIgnored?.(path); + } + // backpressure mechanism + pause() { + this.paused = true; + } + resume() { + /* c8 ignore start */ + if (this.signal?.aborted) + return; + /* c8 ignore stop */ + this.paused = false; + let fn = undefined; + while (!this.paused && (fn = this.#onResume.shift())) { + fn(); + } + } + onResume(fn) { + if (this.signal?.aborted) + return; + /* c8 ignore start */ + if (!this.paused) { + fn(); + } + else { + /* c8 ignore stop */ + this.#onResume.push(fn); + } + } + // do the requisite realpath/stat checking, and return the path + // to add or undefined to filter it out. + async matchCheck(e, ifDir) { + if (ifDir && this.opts.nodir) + return undefined; + let rpc; + if (this.opts.realpath) { + rpc = e.realpathCached() || (await e.realpath()); + if (!rpc) + return undefined; + e = rpc; + } + const needStat = e.isUnknown() || this.opts.stat; + const s = needStat ? await e.lstat() : e; + if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) { + const target = await s.realpath(); + /* c8 ignore start */ + if (target && (target.isUnknown() || this.opts.stat)) { + await target.lstat(); + } + /* c8 ignore stop */ + } + return this.matchCheckTest(s, ifDir); + } + matchCheckTest(e, ifDir) { + return (e && + (this.maxDepth === Infinity || e.depth() <= this.maxDepth) && + (!ifDir || e.canReaddir()) && + (!this.opts.nodir || !e.isDirectory()) && + (!this.opts.nodir || + !this.opts.follow || + !e.isSymbolicLink() || + !e.realpathCached()?.isDirectory()) && + !this.#ignored(e)) ? + e + : undefined; + } + matchCheckSync(e, ifDir) { + if (ifDir && this.opts.nodir) + return undefined; + let rpc; + if (this.opts.realpath) { + rpc = e.realpathCached() || e.realpathSync(); + if (!rpc) + return undefined; + e = rpc; + } + const needStat = e.isUnknown() || this.opts.stat; + const s = needStat ? e.lstatSync() : e; + if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) { + const target = s.realpathSync(); + if (target && (target?.isUnknown() || this.opts.stat)) { + target.lstatSync(); + } + } + return this.matchCheckTest(s, ifDir); + } + matchFinish(e, absolute) { + if (this.#ignored(e)) + return; + // we know we have an ignore if this is false, but TS doesn't + if (!this.includeChildMatches && this.#ignore?.add) { + const ign = `${e.relativePosix()}/**`; + this.#ignore.add(ign); + } + const abs = this.opts.absolute === undefined ? absolute : this.opts.absolute; + this.seen.add(e); + const mark = this.opts.mark && e.isDirectory() ? this.#sep : ''; + // ok, we have what we need! + if (this.opts.withFileTypes) { + this.matchEmit(e); + } + else if (abs) { + const abs = this.opts.posix ? e.fullpathPosix() : e.fullpath(); + this.matchEmit(abs + mark); + } + else { + const rel = this.opts.posix ? e.relativePosix() : e.relative(); + const pre = this.opts.dotRelative && !rel.startsWith('..' + this.#sep) ? + '.' + this.#sep + : ''; + this.matchEmit(!rel ? '.' + mark : pre + rel + mark); + } + } + async match(e, absolute, ifDir) { + const p = await this.matchCheck(e, ifDir); + if (p) + this.matchFinish(p, absolute); + } + matchSync(e, absolute, ifDir) { + const p = this.matchCheckSync(e, ifDir); + if (p) + this.matchFinish(p, absolute); + } + walkCB(target, patterns, cb) { + /* c8 ignore start */ + if (this.signal?.aborted) + cb(); + /* c8 ignore stop */ + this.walkCB2(target, patterns, new Processor(this.opts), cb); + } + walkCB2(target, patterns, processor, cb) { + if (this.#childrenIgnored(target)) + return cb(); + if (this.signal?.aborted) + cb(); + if (this.paused) { + this.onResume(() => this.walkCB2(target, patterns, processor, cb)); + return; + } + processor.processPatterns(target, patterns); + // done processing. all of the above is sync, can be abstracted out. + // subwalks is a map of paths to the entry filters they need + // matches is a map of paths to [absolute, ifDir] tuples. + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + tasks++; + this.match(m, absolute, ifDir).then(() => next()); + } + for (const t of processor.subwalkTargets()) { + if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) { + continue; + } + tasks++; + const childrenCached = t.readdirCached(); + if (t.calledReaddir()) + this.walkCB3(t, childrenCached, processor, next); + else { + t.readdirCB((_, entries) => this.walkCB3(t, entries, processor, next), true); + } + } + next(); + } + walkCB3(target, entries, processor, cb) { + processor = processor.filterEntries(target, entries); + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + tasks++; + this.match(m, absolute, ifDir).then(() => next()); + } + for (const [target, patterns] of processor.subwalks.entries()) { + tasks++; + this.walkCB2(target, patterns, processor.child(), next); + } + next(); + } + walkCBSync(target, patterns, cb) { + /* c8 ignore start */ + if (this.signal?.aborted) + cb(); + /* c8 ignore stop */ + this.walkCB2Sync(target, patterns, new Processor(this.opts), cb); + } + walkCB2Sync(target, patterns, processor, cb) { + if (this.#childrenIgnored(target)) + return cb(); + if (this.signal?.aborted) + cb(); + if (this.paused) { + this.onResume(() => this.walkCB2Sync(target, patterns, processor, cb)); + return; + } + processor.processPatterns(target, patterns); + // done processing. all of the above is sync, can be abstracted out. + // subwalks is a map of paths to the entry filters they need + // matches is a map of paths to [absolute, ifDir] tuples. + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + this.matchSync(m, absolute, ifDir); + } + for (const t of processor.subwalkTargets()) { + if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) { + continue; + } + tasks++; + const children = t.readdirSync(); + this.walkCB3Sync(t, children, processor, next); + } + next(); + } + walkCB3Sync(target, entries, processor, cb) { + processor = processor.filterEntries(target, entries); + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + this.matchSync(m, absolute, ifDir); + } + for (const [target, patterns] of processor.subwalks.entries()) { + tasks++; + this.walkCB2Sync(target, patterns, processor.child(), next); + } + next(); + } +} +export class GlobWalker extends GlobUtil { + matches = new Set(); + constructor(patterns, path, opts) { + super(patterns, path, opts); + } + matchEmit(e) { + this.matches.add(e); + } + async walk() { + if (this.signal?.aborted) + throw this.signal.reason; + if (this.path.isUnknown()) { + await this.path.lstat(); + } + await new Promise((res, rej) => { + this.walkCB(this.path, this.patterns, () => { + if (this.signal?.aborted) { + rej(this.signal.reason); + } + else { + res(this.matches); + } + }); + }); + return this.matches; + } + walkSync() { + if (this.signal?.aborted) + throw this.signal.reason; + if (this.path.isUnknown()) { + this.path.lstatSync(); + } + // nothing for the callback to do, because this never pauses + this.walkCBSync(this.path, this.patterns, () => { + if (this.signal?.aborted) + throw this.signal.reason; + }); + return this.matches; + } +} +export class GlobStream extends GlobUtil { + results; + constructor(patterns, path, opts) { + super(patterns, path, opts); + this.results = new Minipass({ + signal: this.signal, + objectMode: true, + }); + this.results.on('drain', () => this.resume()); + this.results.on('resume', () => this.resume()); + } + matchEmit(e) { + this.results.write(e); + if (!this.results.flowing) + this.pause(); + } + stream() { + const target = this.path; + if (target.isUnknown()) { + target.lstat().then(() => { + this.walkCB(target, this.patterns, () => this.results.end()); + }); + } + else { + this.walkCB(target, this.patterns, () => this.results.end()); + } + return this.results; + } + streamSync() { + if (this.path.isUnknown()) { + this.path.lstatSync(); + } + this.walkCBSync(this.path, this.patterns, () => this.results.end()); + return this.results; + } +} +//# sourceMappingURL=walker.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/walker.js.map b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/walker.js.map new file mode 100644 index 0000000000000000000000000000000000000000..daeeda6752713f72f18fae72f9d0113bb6f1b954 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/glob/dist/esm/walker.js.map @@ -0,0 +1 @@ +{"version":3,"file":"walker.js","sourceRoot":"","sources":["../../src/walker.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AACH,OAAO,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAA;AAEnC,OAAO,EAAE,MAAM,EAAc,MAAM,aAAa,CAAA;AAQhD,OAAO,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAA;AA0D1C,MAAM,UAAU,GAAG,CACjB,MAAsC,EACtC,IAAoB,EACR,EAAE,CACd,OAAO,MAAM,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC,CAAC,MAAM,CAAC,EAAE,IAAI,CAAC;IACvD,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC,MAAM,EAAE,IAAI,CAAC;QAClD,CAAC,CAAC,MAAM,CAAA;AAEV;;GAEG;AACH,MAAM,OAAgB,QAAQ;IAC5B,IAAI,CAAM;IACV,QAAQ,CAAW;IACnB,IAAI,CAAG;IACP,IAAI,GAAc,IAAI,GAAG,EAAQ,CAAA;IACjC,MAAM,GAAY,KAAK,CAAA;IACvB,OAAO,GAAY,KAAK,CAAA;IACxB,SAAS,GAAkB,EAAE,CAAA;IAC7B,OAAO,CAAa;IACpB,IAAI,CAAY;IAChB,MAAM,CAAc;IACpB,QAAQ,CAAQ;IAChB,mBAAmB,CAAS;IAG5B,YAAY,QAAmB,EAAE,IAAU,EAAE,IAAO;QAClD,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAA;QACxB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAA;QAChB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAA;QAChB,IAAI,CAAC,IAAI,GAAG,CAAC,IAAI,CAAC,KAAK,IAAI,IAAI,CAAC,QAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAA;QACjE,IAAI,CAAC,mBAAmB,GAAG,IAAI,CAAC,mBAAmB,KAAK,KAAK,CAAA;QAC7D,IAAI,IAAI,CAAC,MAAM,IAAI,CAAC,IAAI,CAAC,mBAAmB,EAAE,CAAC;YAC7C,IAAI,CAAC,OAAO,GAAG,UAAU,CAAC,IAAI,CAAC,MAAM,IAAI,EAAE,EAAE,IAAI,CAAC,CAAA;YAClD,IACE,CAAC,IAAI,CAAC,mBAAmB;gBACzB,OAAO,IAAI,CAAC,OAAO,CAAC,GAAG,KAAK,UAAU,EACtC,CAAC;gBACD,MAAM,CAAC,GAAG,yDAAyD,CAAA;gBACnE,MAAM,IAAI,KAAK,CAAC,CAAC,CAAC,CAAA;YACpB,CAAC;QACH,CAAC;QACD,6DAA6D;QAC7D,mBAAmB;QACnB,qBAAqB;QACrB,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,IAAI,QAAQ,CAAA;QACzC,oBAAoB;QACpB,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;YAChB,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;YACzB,IAAI,CAAC,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE;gBACzC,IAAI,CAAC,SAAS,CAAC,MAAM,GAAG,CAAC,CAAA;YAC3B,CAAC,CAAC,CAAA;QACJ,CAAC;IACH,CAAC;IAED,QAAQ,CAAC,IAAU;QACjB,OAAO,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,EAAE,CAAC,IAAI,CAAC,CAAA;IAC/D,CAAC;IACD,gBAAgB,CAAC,IAAU;QACzB,OAAO,CAAC,CAAC,IAAI,CAAC,OAAO,EAAE,eAAe,EAAE,CAAC,IAAI,CAAC,CAAA;IAChD,CAAC;IAED,yBAAyB;IACzB,KAAK;QACH,IAAI,CAAC,MAAM,GAAG,IAAI,CAAA;IACpB,CAAC;IACD,MAAM;QACJ,qBAAqB;QACrB,IAAI,IAAI,CAAC,MAAM,EAAE,OAAO;YAAE,OAAM;QAChC,oBAAoB;QACpB,IAAI,CAAC,MAAM,GAAG,KAAK,CAAA;QACnB,IAAI,EAAE,GAA4B,SAAS,CAAA;QAC3C,OAAO,CAAC,IAAI,CAAC,MAAM,IAAI,CAAC,EAAE,GAAG,IAAI,CAAC,SAAS,CAAC,KAAK,EAAE,CAAC,EAAE,CAAC;YACrD,EAAE,EAAE,CAAA;QACN,CAAC;IACH,CAAC;IACD,QAAQ,CAAC,EAAa;QACpB,IAAI,IAAI,CAAC,MAAM,EAAE,OAAO;YAAE,OAAM;QAChC,qBAAqB;QACrB,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC;YACjB,EAAE,EAAE,CAAA;QACN,CAAC;aAAM,CAAC;YACN,oBAAoB;YACpB,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE,CAAC,CAAA;QACzB,CAAC;IACH,CAAC;IAED,+DAA+D;IAC/D,wCAAwC;IACxC,KAAK,CAAC,UAAU,CAAC,CAAO,EAAE,KAAc;QACtC,IAAI,KAAK,IAAI,IAAI,CAAC,IAAI,CAAC,KAAK;YAAE,OAAO,SAAS,CAAA;QAC9C,IAAI,GAAqB,CAAA;QACzB,IAAI,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC;YACvB,GAAG,GAAG,CAAC,CAAC,cAAc,EAAE,IAAI,CAAC,MAAM,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAA;YAChD,IAAI,CAAC,GAAG;gBAAE,OAAO,SAAS,CAAA;YAC1B,CAAC,GAAG,GAAG,CAAA;QACT,CAAC;QACD,MAAM,QAAQ,GAAG,CAAC,CAAC,SAAS,EAAE,IAAI,IAAI,CAAC,IAAI,CAAC,IAAI,CAAA;QAChD,MAAM,CAAC,GAAG,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,CAAC,CAAA;QACxC,IAAI,IAAI,CAAC,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,IAAI,CAAC,KAAK,IAAI,CAAC,EAAE,cAAc,EAAE,EAAE,CAAC;YAC/D,MAAM,MAAM,GAAG,MAAM,CAAC,CAAC,QAAQ,EAAE,CAAA;YACjC,qBAAqB;YACrB,IAAI,MAAM,IAAI,CAAC,MAAM,CAAC,SAAS,EAAE,IAAI,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC;gBACrD,MAAM,MAAM,CAAC,KAAK,EAAE,CAAA;YACtB,CAAC;YACD,oBAAoB;QACtB,CAAC;QACD,OAAO,IAAI,CAAC,cAAc,CAAC,CAAC,EAAE,KAAK,CAAC,CAAA;IACtC,CAAC;IAED,cAAc,CAAC,CAAmB,EAAE,KAAc;QAChD,OAAO,CACH,CAAC;YACC,CAAC,IAAI,CAAC,QAAQ,KAAK,QAAQ,IAAI,CAAC,CAAC,KAAK,EAAE,IAAI,IAAI,CAAC,QAAQ,CAAC;YAC1D,CAAC,CAAC,KAAK,IAAI,CAAC,CAAC,UAAU,EAAE,CAAC;YAC1B,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,IAAI,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC;YACtC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK;gBACf,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM;gBACjB,CAAC,CAAC,CAAC,cAAc,EAAE;gBACnB,CAAC,CAAC,CAAC,cAAc,EAAE,EAAE,WAAW,EAAE,CAAC;YACrC,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CACpB,CAAC,CAAC;YACD,CAAC;YACH,CAAC,CAAC,SAAS,CAAA;IACf,CAAC;IAED,cAAc,CAAC,CAAO,EAAE,KAAc;QACpC,IAAI,KAAK,IAAI,IAAI,CAAC,IAAI,CAAC,KAAK;YAAE,OAAO,SAAS,CAAA;QAC9C,IAAI,GAAqB,CAAA;QACzB,IAAI,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC;YACvB,GAAG,GAAG,CAAC,CAAC,cAAc,EAAE,IAAI,CAAC,CAAC,YAAY,EAAE,CAAA;YAC5C,IAAI,CAAC,GAAG;gBAAE,OAAO,SAAS,CAAA;YAC1B,CAAC,GAAG,GAAG,CAAA;QACT,CAAC;QACD,MAAM,QAAQ,GAAG,CAAC,CAAC,SAAS,EAAE,IAAI,IAAI,CAAC,IAAI,CAAC,IAAI,CAAA;QAChD,MAAM,CAAC,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,CAAC,CAAA;QACtC,IAAI,IAAI,CAAC,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,IAAI,CAAC,KAAK,IAAI,CAAC,EAAE,cAAc,EAAE,EAAE,CAAC;YAC/D,MAAM,MAAM,GAAG,CAAC,CAAC,YAAY,EAAE,CAAA;YAC/B,IAAI,MAAM,IAAI,CAAC,MAAM,EAAE,SAAS,EAAE,IAAI,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC;gBACtD,MAAM,CAAC,SAAS,EAAE,CAAA;YACpB,CAAC;QACH,CAAC;QACD,OAAO,IAAI,CAAC,cAAc,CAAC,CAAC,EAAE,KAAK,CAAC,CAAA;IACtC,CAAC;IAKD,WAAW,CAAC,CAAO,EAAE,QAAiB;QACpC,IAAI,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC;YAAE,OAAM;QAC5B,6DAA6D;QAC7D,IAAI,CAAC,IAAI,CAAC,mBAAmB,IAAI,IAAI,CAAC,OAAO,EAAE,GAAG,EAAE,CAAC;YACnD,MAAM,GAAG,GAAG,GAAG,CAAC,CAAC,aAAa,EAAE,KAAK,CAAA;YACrC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,CAAA;QACvB,CAAC;QACD,MAAM,GAAG,GACP,IAAI,CAAC,IAAI,CAAC,QAAQ,KAAK,SAAS,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAA;QAClE,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;QAChB,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAA;QAC/D,4BAA4B;QAC5B,IAAI,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,CAAC;YAC5B,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAA;QACnB,CAAC;aAAM,IAAI,GAAG,EAAE,CAAC;YACf,MAAM,GAAG,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,aAAa,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAA;YAC9D,IAAI,CAAC,SAAS,CAAC,GAAG,GAAG,IAAI,CAAC,CAAA;QAC5B,CAAC;aAAM,CAAC;YACN,MAAM,GAAG,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,aAAa,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAA;YAC9D,MAAM,GAAG,GACP,IAAI,CAAC,IAAI,CAAC,WAAW,IAAI,CAAC,GAAG,CAAC,UAAU,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;gBAC1D,GAAG,GAAG,IAAI,CAAC,IAAI;gBACjB,CAAC,CAAC,EAAE,CAAA;YACN,IAAI,CAAC,SAAS,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,GAAG,IAAI,CAAC,CAAC,CAAC,GAAG,GAAG,GAAG,GAAG,IAAI,CAAC,CAAA;QACtD,CAAC;IACH,CAAC;IAED,KAAK,CAAC,KAAK,CAAC,CAAO,EAAE,QAAiB,EAAE,KAAc;QACpD,MAAM,CAAC,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,CAAC,EAAE,KAAK,CAAC,CAAA;QACzC,IAAI,CAAC;YAAE,IAAI,CAAC,WAAW,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAA;IACtC,CAAC;IAED,SAAS,CAAC,CAAO,EAAE,QAAiB,EAAE,KAAc;QAClD,MAAM,CAAC,GAAG,IAAI,CAAC,cAAc,CAAC,CAAC,EAAE,KAAK,CAAC,CAAA;QACvC,IAAI,CAAC;YAAE,IAAI,CAAC,WAAW,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAA;IACtC,CAAC;IAED,MAAM,CAAC,MAAY,EAAE,QAAmB,EAAE,EAAa;QACrD,qBAAqB;QACrB,IAAI,IAAI,CAAC,MAAM,EAAE,OAAO;YAAE,EAAE,EAAE,CAAA;QAC9B,oBAAoB;QACpB,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,QAAQ,EAAE,IAAI,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,EAAE,CAAC,CAAA;IAC9D,CAAC;IAED,OAAO,CACL,MAAY,EACZ,QAAmB,EACnB,SAAoB,EACpB,EAAa;QAEb,IAAI,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC;YAAE,OAAO,EAAE,EAAE,CAAA;QAC9C,IAAI,IAAI,CAAC,MAAM,EAAE,OAAO;YAAE,EAAE,EAAE,CAAA;QAC9B,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;YAChB,IAAI,CAAC,QAAQ,CAAC,GAAG,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,QAAQ,EAAE,SAAS,EAAE,EAAE,CAAC,CAAC,CAAA;YAClE,OAAM;QACR,CAAC;QACD,SAAS,CAAC,eAAe,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QAE3C,qEAAqE;QACrE,4DAA4D;QAC5D,yDAAyD;QACzD,IAAI,KAAK,GAAG,CAAC,CAAA;QACb,MAAM,IAAI,GAAG,GAAG,EAAE;YAChB,IAAI,EAAE,KAAK,KAAK,CAAC;gBAAE,EAAE,EAAE,CAAA;QACzB,CAAC,CAAA;QAED,KAAK,MAAM,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,IAAI,SAAS,CAAC,OAAO,CAAC,OAAO,EAAE,EAAE,CAAC;YAC/D,IAAI,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC;gBAAE,SAAQ;YAC9B,KAAK,EAAE,CAAA;YACP,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC,IAAI,EAAE,CAAC,CAAA;QACnD,CAAC;QAED,KAAK,MAAM,CAAC,IAAI,SAAS,CAAC,cAAc,EAAE,EAAE,CAAC;YAC3C,IAAI,IAAI,CAAC,QAAQ,KAAK,QAAQ,IAAI,CAAC,CAAC,KAAK,EAAE,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;gBAC7D,SAAQ;YACV,CAAC;YACD,KAAK,EAAE,CAAA;YACP,MAAM,cAAc,GAAG,CAAC,CAAC,aAAa,EAAE,CAAA;YACxC,IAAI,CAAC,CAAC,aAAa,EAAE;gBACnB,IAAI,CAAC,OAAO,CAAC,CAAC,EAAE,cAAc,EAAE,SAAS,EAAE,IAAI,CAAC,CAAA;iBAC7C,CAAC;gBACJ,CAAC,CAAC,SAAS,CACT,CAAC,CAAC,EAAE,OAAO,EAAE,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,EAAE,OAAO,EAAE,SAAS,EAAE,IAAI,CAAC,EACzD,IAAI,CACL,CAAA;YACH,CAAC;QACH,CAAC;QAED,IAAI,EAAE,CAAA;IACR,CAAC;IAED,OAAO,CACL,MAAY,EACZ,OAAe,EACf,SAAoB,EACpB,EAAa;QAEb,SAAS,GAAG,SAAS,CAAC,aAAa,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA;QAEpD,IAAI,KAAK,GAAG,CAAC,CAAA;QACb,MAAM,IAAI,GAAG,GAAG,EAAE;YAChB,IAAI,EAAE,KAAK,KAAK,CAAC;gBAAE,EAAE,EAAE,CAAA;QACzB,CAAC,CAAA;QAED,KAAK,MAAM,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,IAAI,SAAS,CAAC,OAAO,CAAC,OAAO,EAAE,EAAE,CAAC;YAC/D,IAAI,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC;gBAAE,SAAQ;YAC9B,KAAK,EAAE,CAAA;YACP,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC,IAAI,EAAE,CAAC,CAAA;QACnD,CAAC;QACD,KAAK,MAAM,CAAC,MAAM,EAAE,QAAQ,CAAC,IAAI,SAAS,CAAC,QAAQ,CAAC,OAAO,EAAE,EAAE,CAAC;YAC9D,KAAK,EAAE,CAAA;YACP,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,QAAQ,EAAE,SAAS,CAAC,KAAK,EAAE,EAAE,IAAI,CAAC,CAAA;QACzD,CAAC;QAED,IAAI,EAAE,CAAA;IACR,CAAC;IAED,UAAU,CAAC,MAAY,EAAE,QAAmB,EAAE,EAAa;QACzD,qBAAqB;QACrB,IAAI,IAAI,CAAC,MAAM,EAAE,OAAO;YAAE,EAAE,EAAE,CAAA;QAC9B,oBAAoB;QACpB,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,QAAQ,EAAE,IAAI,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,EAAE,CAAC,CAAA;IAClE,CAAC;IAED,WAAW,CACT,MAAY,EACZ,QAAmB,EACnB,SAAoB,EACpB,EAAa;QAEb,IAAI,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC;YAAE,OAAO,EAAE,EAAE,CAAA;QAC9C,IAAI,IAAI,CAAC,MAAM,EAAE,OAAO;YAAE,EAAE,EAAE,CAAA;QAC9B,IAAI,IAAI,CAAC,MAAM,EAAE,CAAC;YAChB,IAAI,CAAC,QAAQ,CAAC,GAAG,EAAE,CACjB,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,QAAQ,EAAE,SAAS,EAAE,EAAE,CAAC,CAClD,CAAA;YACD,OAAM;QACR,CAAC;QACD,SAAS,CAAC,eAAe,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QAE3C,qEAAqE;QACrE,4DAA4D;QAC5D,yDAAyD;QACzD,IAAI,KAAK,GAAG,CAAC,CAAA;QACb,MAAM,IAAI,GAAG,GAAG,EAAE;YAChB,IAAI,EAAE,KAAK,KAAK,CAAC;gBAAE,EAAE,EAAE,CAAA;QACzB,CAAC,CAAA;QAED,KAAK,MAAM,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,IAAI,SAAS,CAAC,OAAO,CAAC,OAAO,EAAE,EAAE,CAAC;YAC/D,IAAI,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC;gBAAE,SAAQ;YAC9B,IAAI,CAAC,SAAS,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAA;QACpC,CAAC;QAED,KAAK,MAAM,CAAC,IAAI,SAAS,CAAC,cAAc,EAAE,EAAE,CAAC;YAC3C,IAAI,IAAI,CAAC,QAAQ,KAAK,QAAQ,IAAI,CAAC,CAAC,KAAK,EAAE,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;gBAC7D,SAAQ;YACV,CAAC;YACD,KAAK,EAAE,CAAA;YACP,MAAM,QAAQ,GAAG,CAAC,CAAC,WAAW,EAAE,CAAA;YAChC,IAAI,CAAC,WAAW,CAAC,CAAC,EAAE,QAAQ,EAAE,SAAS,EAAE,IAAI,CAAC,CAAA;QAChD,CAAC;QAED,IAAI,EAAE,CAAA;IACR,CAAC;IAED,WAAW,CACT,MAAY,EACZ,OAAe,EACf,SAAoB,EACpB,EAAa;QAEb,SAAS,GAAG,SAAS,CAAC,aAAa,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA;QAEpD,IAAI,KAAK,GAAG,CAAC,CAAA;QACb,MAAM,IAAI,GAAG,GAAG,EAAE;YAChB,IAAI,EAAE,KAAK,KAAK,CAAC;gBAAE,EAAE,EAAE,CAAA;QACzB,CAAC,CAAA;QAED,KAAK,MAAM,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,IAAI,SAAS,CAAC,OAAO,CAAC,OAAO,EAAE,EAAE,CAAC;YAC/D,IAAI,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC;gBAAE,SAAQ;YAC9B,IAAI,CAAC,SAAS,CAAC,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAA;QACpC,CAAC;QACD,KAAK,MAAM,CAAC,MAAM,EAAE,QAAQ,CAAC,IAAI,SAAS,CAAC,QAAQ,CAAC,OAAO,EAAE,EAAE,CAAC;YAC9D,KAAK,EAAE,CAAA;YACP,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,QAAQ,EAAE,SAAS,CAAC,KAAK,EAAE,EAAE,IAAI,CAAC,CAAA;QAC7D,CAAC;QAED,IAAI,EAAE,CAAA;IACR,CAAC;CACF;AAED,MAAM,OAAO,UAEX,SAAQ,QAAW;IACnB,OAAO,GAAG,IAAI,GAAG,EAAa,CAAA;IAE9B,YAAY,QAAmB,EAAE,IAAU,EAAE,IAAO;QAClD,KAAK,CAAC,QAAQ,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;IAC7B,CAAC;IAED,SAAS,CAAC,CAAY;QACpB,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;IACrB,CAAC;IAED,KAAK,CAAC,IAAI;QACR,IAAI,IAAI,CAAC,MAAM,EAAE,OAAO;YAAE,MAAM,IAAI,CAAC,MAAM,CAAC,MAAM,CAAA;QAClD,IAAI,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE,CAAC;YAC1B,MAAM,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,CAAA;QACzB,CAAC;QACD,MAAM,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE;YAC7B,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,QAAQ,EAAE,GAAG,EAAE;gBACzC,IAAI,IAAI,CAAC,MAAM,EAAE,OAAO,EAAE,CAAC;oBACzB,GAAG,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,CAAA;gBACzB,CAAC;qBAAM,CAAC;oBACN,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;gBACnB,CAAC;YACH,CAAC,CAAC,CAAA;QACJ,CAAC,CAAC,CAAA;QACF,OAAO,IAAI,CAAC,OAAO,CAAA;IACrB,CAAC;IAED,QAAQ;QACN,IAAI,IAAI,CAAC,MAAM,EAAE,OAAO;YAAE,MAAM,IAAI,CAAC,MAAM,CAAC,MAAM,CAAA;QAClD,IAAI,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE,CAAC;YAC1B,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,CAAA;QACvB,CAAC;QACD,4DAA4D;QAC5D,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,QAAQ,EAAE,GAAG,EAAE;YAC7C,IAAI,IAAI,CAAC,MAAM,EAAE,OAAO;gBAAE,MAAM,IAAI,CAAC,MAAM,CAAC,MAAM,CAAA;QACpD,CAAC,CAAC,CAAA;QACF,OAAO,IAAI,CAAC,OAAO,CAAA;IACrB,CAAC;CACF;AAED,MAAM,OAAO,UAEX,SAAQ,QAAW;IACnB,OAAO,CAAgC;IAEvC,YAAY,QAAmB,EAAE,IAAU,EAAE,IAAO;QAClD,KAAK,CAAC,QAAQ,EAAE,IAAI,EAAE,IAAI,CAAC,CAAA;QAC3B,IAAI,CAAC,OAAO,GAAG,IAAI,QAAQ,CAAuB;YAChD,MAAM,EAAE,IAAI,CAAC,MAAM;YACnB,UAAU,EAAE,IAAI;SACjB,CAAC,CAAA;QACF,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,OAAO,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAA;QAC7C,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,QAAQ,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAA;IAChD,CAAC;IAED,SAAS,CAAC,CAAY;QACpB,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,CAAA;QACrB,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO;YAAE,IAAI,CAAC,KAAK,EAAE,CAAA;IACzC,CAAC;IAED,MAAM;QACJ,MAAM,MAAM,GAAG,IAAI,CAAC,IAAI,CAAA;QACxB,IAAI,MAAM,CAAC,SAAS,EAAE,EAAE,CAAC;YACvB,MAAM,CAAC,KAAK,EAAE,CAAC,IAAI,CAAC,GAAG,EAAE;gBACvB,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,IAAI,CAAC,QAAQ,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC,CAAA;YAC9D,CAAC,CAAC,CAAA;QACJ,CAAC;aAAM,CAAC;YACN,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,IAAI,CAAC,QAAQ,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC,CAAA;QAC9D,CAAC;QACD,OAAO,IAAI,CAAC,OAAO,CAAA;IACrB,CAAC;IAED,UAAU;QACR,IAAI,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE,CAAC;YAC1B,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,CAAA;QACvB,CAAC;QACD,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,QAAQ,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC,CAAA;QACnE,OAAO,IAAI,CAAC,OAAO,CAAA;IACrB,CAAC;CACF","sourcesContent":["/**\n * Single-use utility classes to provide functionality to the {@link Glob}\n * methods.\n *\n * @module\n */\nimport { Minipass } from 'minipass'\nimport { Path } from 'path-scurry'\nimport { Ignore, IgnoreLike } from './ignore.js'\n\n// XXX can we somehow make it so that it NEVER processes a given path more than\n// once, enough that the match set tracking is no longer needed? that'd speed\n// things up a lot. Or maybe bring back nounique, and skip it in that case?\n\n// a single minimatch set entry with 1 or more parts\nimport { Pattern } from './pattern.js'\nimport { Processor } from './processor.js'\n\nexport interface GlobWalkerOpts {\n absolute?: boolean\n allowWindowsEscape?: boolean\n cwd?: string | URL\n dot?: boolean\n dotRelative?: boolean\n follow?: boolean\n ignore?: string | string[] | IgnoreLike\n mark?: boolean\n matchBase?: boolean\n // Note: maxDepth here means \"maximum actual Path.depth()\",\n // not \"maximum depth beyond cwd\"\n maxDepth?: number\n nobrace?: boolean\n nocase?: boolean\n nodir?: boolean\n noext?: boolean\n noglobstar?: boolean\n platform?: NodeJS.Platform\n posix?: boolean\n realpath?: boolean\n root?: string\n stat?: boolean\n signal?: AbortSignal\n windowsPathsNoEscape?: boolean\n withFileTypes?: boolean\n includeChildMatches?: boolean\n}\n\nexport type GWOFileTypesTrue = GlobWalkerOpts & {\n withFileTypes: true\n}\nexport type GWOFileTypesFalse = GlobWalkerOpts & {\n withFileTypes: false\n}\nexport type GWOFileTypesUnset = GlobWalkerOpts & {\n withFileTypes?: undefined\n}\n\nexport type Result =\n O extends GWOFileTypesTrue ? Path\n : O extends GWOFileTypesFalse ? string\n : O extends GWOFileTypesUnset ? string\n : Path | string\n\nexport type Matches =\n O extends GWOFileTypesTrue ? Set\n : O extends GWOFileTypesFalse ? Set\n : O extends GWOFileTypesUnset ? Set\n : Set\n\nexport type MatchStream = Minipass<\n Result,\n Result\n>\n\nconst makeIgnore = (\n ignore: string | string[] | IgnoreLike,\n opts: GlobWalkerOpts,\n): IgnoreLike =>\n typeof ignore === 'string' ? new Ignore([ignore], opts)\n : Array.isArray(ignore) ? new Ignore(ignore, opts)\n : ignore\n\n/**\n * basic walking utilities that all the glob walker types use\n */\nexport abstract class GlobUtil {\n path: Path\n patterns: Pattern[]\n opts: O\n seen: Set = new Set()\n paused: boolean = false\n aborted: boolean = false\n #onResume: (() => any)[] = []\n #ignore?: IgnoreLike\n #sep: '\\\\' | '/'\n signal?: AbortSignal\n maxDepth: number\n includeChildMatches: boolean\n\n constructor(patterns: Pattern[], path: Path, opts: O)\n constructor(patterns: Pattern[], path: Path, opts: O) {\n this.patterns = patterns\n this.path = path\n this.opts = opts\n this.#sep = !opts.posix && opts.platform === 'win32' ? '\\\\' : '/'\n this.includeChildMatches = opts.includeChildMatches !== false\n if (opts.ignore || !this.includeChildMatches) {\n this.#ignore = makeIgnore(opts.ignore ?? [], opts)\n if (\n !this.includeChildMatches &&\n typeof this.#ignore.add !== 'function'\n ) {\n const m = 'cannot ignore child matches, ignore lacks add() method.'\n throw new Error(m)\n }\n }\n // ignore, always set with maxDepth, but it's optional on the\n // GlobOptions type\n /* c8 ignore start */\n this.maxDepth = opts.maxDepth || Infinity\n /* c8 ignore stop */\n if (opts.signal) {\n this.signal = opts.signal\n this.signal.addEventListener('abort', () => {\n this.#onResume.length = 0\n })\n }\n }\n\n #ignored(path: Path): boolean {\n return this.seen.has(path) || !!this.#ignore?.ignored?.(path)\n }\n #childrenIgnored(path: Path): boolean {\n return !!this.#ignore?.childrenIgnored?.(path)\n }\n\n // backpressure mechanism\n pause() {\n this.paused = true\n }\n resume() {\n /* c8 ignore start */\n if (this.signal?.aborted) return\n /* c8 ignore stop */\n this.paused = false\n let fn: (() => any) | undefined = undefined\n while (!this.paused && (fn = this.#onResume.shift())) {\n fn()\n }\n }\n onResume(fn: () => any) {\n if (this.signal?.aborted) return\n /* c8 ignore start */\n if (!this.paused) {\n fn()\n } else {\n /* c8 ignore stop */\n this.#onResume.push(fn)\n }\n }\n\n // do the requisite realpath/stat checking, and return the path\n // to add or undefined to filter it out.\n async matchCheck(e: Path, ifDir: boolean): Promise {\n if (ifDir && this.opts.nodir) return undefined\n let rpc: Path | undefined\n if (this.opts.realpath) {\n rpc = e.realpathCached() || (await e.realpath())\n if (!rpc) return undefined\n e = rpc\n }\n const needStat = e.isUnknown() || this.opts.stat\n const s = needStat ? await e.lstat() : e\n if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) {\n const target = await s.realpath()\n /* c8 ignore start */\n if (target && (target.isUnknown() || this.opts.stat)) {\n await target.lstat()\n }\n /* c8 ignore stop */\n }\n return this.matchCheckTest(s, ifDir)\n }\n\n matchCheckTest(e: Path | undefined, ifDir: boolean): Path | undefined {\n return (\n e &&\n (this.maxDepth === Infinity || e.depth() <= this.maxDepth) &&\n (!ifDir || e.canReaddir()) &&\n (!this.opts.nodir || !e.isDirectory()) &&\n (!this.opts.nodir ||\n !this.opts.follow ||\n !e.isSymbolicLink() ||\n !e.realpathCached()?.isDirectory()) &&\n !this.#ignored(e)\n ) ?\n e\n : undefined\n }\n\n matchCheckSync(e: Path, ifDir: boolean): Path | undefined {\n if (ifDir && this.opts.nodir) return undefined\n let rpc: Path | undefined\n if (this.opts.realpath) {\n rpc = e.realpathCached() || e.realpathSync()\n if (!rpc) return undefined\n e = rpc\n }\n const needStat = e.isUnknown() || this.opts.stat\n const s = needStat ? e.lstatSync() : e\n if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) {\n const target = s.realpathSync()\n if (target && (target?.isUnknown() || this.opts.stat)) {\n target.lstatSync()\n }\n }\n return this.matchCheckTest(s, ifDir)\n }\n\n abstract matchEmit(p: Result): void\n abstract matchEmit(p: string | Path): void\n\n matchFinish(e: Path, absolute: boolean) {\n if (this.#ignored(e)) return\n // we know we have an ignore if this is false, but TS doesn't\n if (!this.includeChildMatches && this.#ignore?.add) {\n const ign = `${e.relativePosix()}/**`\n this.#ignore.add(ign)\n }\n const abs =\n this.opts.absolute === undefined ? absolute : this.opts.absolute\n this.seen.add(e)\n const mark = this.opts.mark && e.isDirectory() ? this.#sep : ''\n // ok, we have what we need!\n if (this.opts.withFileTypes) {\n this.matchEmit(e)\n } else if (abs) {\n const abs = this.opts.posix ? e.fullpathPosix() : e.fullpath()\n this.matchEmit(abs + mark)\n } else {\n const rel = this.opts.posix ? e.relativePosix() : e.relative()\n const pre =\n this.opts.dotRelative && !rel.startsWith('..' + this.#sep) ?\n '.' + this.#sep\n : ''\n this.matchEmit(!rel ? '.' + mark : pre + rel + mark)\n }\n }\n\n async match(e: Path, absolute: boolean, ifDir: boolean): Promise {\n const p = await this.matchCheck(e, ifDir)\n if (p) this.matchFinish(p, absolute)\n }\n\n matchSync(e: Path, absolute: boolean, ifDir: boolean): void {\n const p = this.matchCheckSync(e, ifDir)\n if (p) this.matchFinish(p, absolute)\n }\n\n walkCB(target: Path, patterns: Pattern[], cb: () => any) {\n /* c8 ignore start */\n if (this.signal?.aborted) cb()\n /* c8 ignore stop */\n this.walkCB2(target, patterns, new Processor(this.opts), cb)\n }\n\n walkCB2(\n target: Path,\n patterns: Pattern[],\n processor: Processor,\n cb: () => any,\n ) {\n if (this.#childrenIgnored(target)) return cb()\n if (this.signal?.aborted) cb()\n if (this.paused) {\n this.onResume(() => this.walkCB2(target, patterns, processor, cb))\n return\n }\n processor.processPatterns(target, patterns)\n\n // done processing. all of the above is sync, can be abstracted out.\n // subwalks is a map of paths to the entry filters they need\n // matches is a map of paths to [absolute, ifDir] tuples.\n let tasks = 1\n const next = () => {\n if (--tasks === 0) cb()\n }\n\n for (const [m, absolute, ifDir] of processor.matches.entries()) {\n if (this.#ignored(m)) continue\n tasks++\n this.match(m, absolute, ifDir).then(() => next())\n }\n\n for (const t of processor.subwalkTargets()) {\n if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) {\n continue\n }\n tasks++\n const childrenCached = t.readdirCached()\n if (t.calledReaddir())\n this.walkCB3(t, childrenCached, processor, next)\n else {\n t.readdirCB(\n (_, entries) => this.walkCB3(t, entries, processor, next),\n true,\n )\n }\n }\n\n next()\n }\n\n walkCB3(\n target: Path,\n entries: Path[],\n processor: Processor,\n cb: () => any,\n ) {\n processor = processor.filterEntries(target, entries)\n\n let tasks = 1\n const next = () => {\n if (--tasks === 0) cb()\n }\n\n for (const [m, absolute, ifDir] of processor.matches.entries()) {\n if (this.#ignored(m)) continue\n tasks++\n this.match(m, absolute, ifDir).then(() => next())\n }\n for (const [target, patterns] of processor.subwalks.entries()) {\n tasks++\n this.walkCB2(target, patterns, processor.child(), next)\n }\n\n next()\n }\n\n walkCBSync(target: Path, patterns: Pattern[], cb: () => any) {\n /* c8 ignore start */\n if (this.signal?.aborted) cb()\n /* c8 ignore stop */\n this.walkCB2Sync(target, patterns, new Processor(this.opts), cb)\n }\n\n walkCB2Sync(\n target: Path,\n patterns: Pattern[],\n processor: Processor,\n cb: () => any,\n ) {\n if (this.#childrenIgnored(target)) return cb()\n if (this.signal?.aborted) cb()\n if (this.paused) {\n this.onResume(() =>\n this.walkCB2Sync(target, patterns, processor, cb),\n )\n return\n }\n processor.processPatterns(target, patterns)\n\n // done processing. all of the above is sync, can be abstracted out.\n // subwalks is a map of paths to the entry filters they need\n // matches is a map of paths to [absolute, ifDir] tuples.\n let tasks = 1\n const next = () => {\n if (--tasks === 0) cb()\n }\n\n for (const [m, absolute, ifDir] of processor.matches.entries()) {\n if (this.#ignored(m)) continue\n this.matchSync(m, absolute, ifDir)\n }\n\n for (const t of processor.subwalkTargets()) {\n if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) {\n continue\n }\n tasks++\n const children = t.readdirSync()\n this.walkCB3Sync(t, children, processor, next)\n }\n\n next()\n }\n\n walkCB3Sync(\n target: Path,\n entries: Path[],\n processor: Processor,\n cb: () => any,\n ) {\n processor = processor.filterEntries(target, entries)\n\n let tasks = 1\n const next = () => {\n if (--tasks === 0) cb()\n }\n\n for (const [m, absolute, ifDir] of processor.matches.entries()) {\n if (this.#ignored(m)) continue\n this.matchSync(m, absolute, ifDir)\n }\n for (const [target, patterns] of processor.subwalks.entries()) {\n tasks++\n this.walkCB2Sync(target, patterns, processor.child(), next)\n }\n\n next()\n }\n}\n\nexport class GlobWalker<\n O extends GlobWalkerOpts = GlobWalkerOpts,\n> extends GlobUtil {\n matches = new Set>()\n\n constructor(patterns: Pattern[], path: Path, opts: O) {\n super(patterns, path, opts)\n }\n\n matchEmit(e: Result): void {\n this.matches.add(e)\n }\n\n async walk(): Promise>> {\n if (this.signal?.aborted) throw this.signal.reason\n if (this.path.isUnknown()) {\n await this.path.lstat()\n }\n await new Promise((res, rej) => {\n this.walkCB(this.path, this.patterns, () => {\n if (this.signal?.aborted) {\n rej(this.signal.reason)\n } else {\n res(this.matches)\n }\n })\n })\n return this.matches\n }\n\n walkSync(): Set> {\n if (this.signal?.aborted) throw this.signal.reason\n if (this.path.isUnknown()) {\n this.path.lstatSync()\n }\n // nothing for the callback to do, because this never pauses\n this.walkCBSync(this.path, this.patterns, () => {\n if (this.signal?.aborted) throw this.signal.reason\n })\n return this.matches\n }\n}\n\nexport class GlobStream<\n O extends GlobWalkerOpts = GlobWalkerOpts,\n> extends GlobUtil {\n results: Minipass, Result>\n\n constructor(patterns: Pattern[], path: Path, opts: O) {\n super(patterns, path, opts)\n this.results = new Minipass, Result>({\n signal: this.signal,\n objectMode: true,\n })\n this.results.on('drain', () => this.resume())\n this.results.on('resume', () => this.resume())\n }\n\n matchEmit(e: Result): void {\n this.results.write(e)\n if (!this.results.flowing) this.pause()\n }\n\n stream(): MatchStream {\n const target = this.path\n if (target.isUnknown()) {\n target.lstat().then(() => {\n this.walkCB(target, this.patterns, () => this.results.end())\n })\n } else {\n this.walkCB(target, this.patterns, () => this.results.end())\n }\n return this.results\n }\n\n streamSync(): MatchStream {\n if (this.path.isUnknown()) {\n this.path.lstatSync()\n }\n this.walkCBSync(this.path, this.patterns, () => this.results.end())\n return this.results\n }\n}\n"]} \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/graceful-fs/polyfills.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/graceful-fs/polyfills.js new file mode 100644 index 0000000000000000000000000000000000000000..453f1a9e702d1aaf9a34cb8460f30073dc83bcf5 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/graceful-fs/polyfills.js @@ -0,0 +1,355 @@ +var constants = require('constants') + +var origCwd = process.cwd +var cwd = null + +var platform = process.env.GRACEFUL_FS_PLATFORM || process.platform + +process.cwd = function() { + if (!cwd) + cwd = origCwd.call(process) + return cwd +} +try { + process.cwd() +} catch (er) {} + +// This check is needed until node.js 12 is required +if (typeof process.chdir === 'function') { + var chdir = process.chdir + process.chdir = function (d) { + cwd = null + chdir.call(process, d) + } + if (Object.setPrototypeOf) Object.setPrototypeOf(process.chdir, chdir) +} + +module.exports = patch + +function patch (fs) { + // (re-)implement some things that are known busted or missing. + + // lchmod, broken prior to 0.6.2 + // back-port the fix here. + if (constants.hasOwnProperty('O_SYMLINK') && + process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) { + patchLchmod(fs) + } + + // lutimes implementation, or no-op + if (!fs.lutimes) { + patchLutimes(fs) + } + + // https://github.com/isaacs/node-graceful-fs/issues/4 + // Chown should not fail on einval or eperm if non-root. + // It should not fail on enosys ever, as this just indicates + // that a fs doesn't support the intended operation. + + fs.chown = chownFix(fs.chown) + fs.fchown = chownFix(fs.fchown) + fs.lchown = chownFix(fs.lchown) + + fs.chmod = chmodFix(fs.chmod) + fs.fchmod = chmodFix(fs.fchmod) + fs.lchmod = chmodFix(fs.lchmod) + + fs.chownSync = chownFixSync(fs.chownSync) + fs.fchownSync = chownFixSync(fs.fchownSync) + fs.lchownSync = chownFixSync(fs.lchownSync) + + fs.chmodSync = chmodFixSync(fs.chmodSync) + fs.fchmodSync = chmodFixSync(fs.fchmodSync) + fs.lchmodSync = chmodFixSync(fs.lchmodSync) + + fs.stat = statFix(fs.stat) + fs.fstat = statFix(fs.fstat) + fs.lstat = statFix(fs.lstat) + + fs.statSync = statFixSync(fs.statSync) + fs.fstatSync = statFixSync(fs.fstatSync) + fs.lstatSync = statFixSync(fs.lstatSync) + + // if lchmod/lchown do not exist, then make them no-ops + if (fs.chmod && !fs.lchmod) { + fs.lchmod = function (path, mode, cb) { + if (cb) process.nextTick(cb) + } + fs.lchmodSync = function () {} + } + if (fs.chown && !fs.lchown) { + fs.lchown = function (path, uid, gid, cb) { + if (cb) process.nextTick(cb) + } + fs.lchownSync = function () {} + } + + // on Windows, A/V software can lock the directory, causing this + // to fail with an EACCES or EPERM if the directory contains newly + // created files. Try again on failure, for up to 60 seconds. + + // Set the timeout this long because some Windows Anti-Virus, such as Parity + // bit9, may lock files for up to a minute, causing npm package install + // failures. Also, take care to yield the scheduler. Windows scheduling gives + // CPU to a busy looping process, which can cause the program causing the lock + // contention to be starved of CPU by node, so the contention doesn't resolve. + if (platform === "win32") { + fs.rename = typeof fs.rename !== 'function' ? fs.rename + : (function (fs$rename) { + function rename (from, to, cb) { + var start = Date.now() + var backoff = 0; + fs$rename(from, to, function CB (er) { + if (er + && (er.code === "EACCES" || er.code === "EPERM" || er.code === "EBUSY") + && Date.now() - start < 60000) { + setTimeout(function() { + fs.stat(to, function (stater, st) { + if (stater && stater.code === "ENOENT") + fs$rename(from, to, CB); + else + cb(er) + }) + }, backoff) + if (backoff < 100) + backoff += 10; + return; + } + if (cb) cb(er) + }) + } + if (Object.setPrototypeOf) Object.setPrototypeOf(rename, fs$rename) + return rename + })(fs.rename) + } + + // if read() returns EAGAIN, then just try it again. + fs.read = typeof fs.read !== 'function' ? fs.read + : (function (fs$read) { + function read (fd, buffer, offset, length, position, callback_) { + var callback + if (callback_ && typeof callback_ === 'function') { + var eagCounter = 0 + callback = function (er, _, __) { + if (er && er.code === 'EAGAIN' && eagCounter < 10) { + eagCounter ++ + return fs$read.call(fs, fd, buffer, offset, length, position, callback) + } + callback_.apply(this, arguments) + } + } + return fs$read.call(fs, fd, buffer, offset, length, position, callback) + } + + // This ensures `util.promisify` works as it does for native `fs.read`. + if (Object.setPrototypeOf) Object.setPrototypeOf(read, fs$read) + return read + })(fs.read) + + fs.readSync = typeof fs.readSync !== 'function' ? fs.readSync + : (function (fs$readSync) { return function (fd, buffer, offset, length, position) { + var eagCounter = 0 + while (true) { + try { + return fs$readSync.call(fs, fd, buffer, offset, length, position) + } catch (er) { + if (er.code === 'EAGAIN' && eagCounter < 10) { + eagCounter ++ + continue + } + throw er + } + } + }})(fs.readSync) + + function patchLchmod (fs) { + fs.lchmod = function (path, mode, callback) { + fs.open( path + , constants.O_WRONLY | constants.O_SYMLINK + , mode + , function (err, fd) { + if (err) { + if (callback) callback(err) + return + } + // prefer to return the chmod error, if one occurs, + // but still try to close, and report closing errors if they occur. + fs.fchmod(fd, mode, function (err) { + fs.close(fd, function(err2) { + if (callback) callback(err || err2) + }) + }) + }) + } + + fs.lchmodSync = function (path, mode) { + var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode) + + // prefer to return the chmod error, if one occurs, + // but still try to close, and report closing errors if they occur. + var threw = true + var ret + try { + ret = fs.fchmodSync(fd, mode) + threw = false + } finally { + if (threw) { + try { + fs.closeSync(fd) + } catch (er) {} + } else { + fs.closeSync(fd) + } + } + return ret + } + } + + function patchLutimes (fs) { + if (constants.hasOwnProperty("O_SYMLINK") && fs.futimes) { + fs.lutimes = function (path, at, mt, cb) { + fs.open(path, constants.O_SYMLINK, function (er, fd) { + if (er) { + if (cb) cb(er) + return + } + fs.futimes(fd, at, mt, function (er) { + fs.close(fd, function (er2) { + if (cb) cb(er || er2) + }) + }) + }) + } + + fs.lutimesSync = function (path, at, mt) { + var fd = fs.openSync(path, constants.O_SYMLINK) + var ret + var threw = true + try { + ret = fs.futimesSync(fd, at, mt) + threw = false + } finally { + if (threw) { + try { + fs.closeSync(fd) + } catch (er) {} + } else { + fs.closeSync(fd) + } + } + return ret + } + + } else if (fs.futimes) { + fs.lutimes = function (_a, _b, _c, cb) { if (cb) process.nextTick(cb) } + fs.lutimesSync = function () {} + } + } + + function chmodFix (orig) { + if (!orig) return orig + return function (target, mode, cb) { + return orig.call(fs, target, mode, function (er) { + if (chownErOk(er)) er = null + if (cb) cb.apply(this, arguments) + }) + } + } + + function chmodFixSync (orig) { + if (!orig) return orig + return function (target, mode) { + try { + return orig.call(fs, target, mode) + } catch (er) { + if (!chownErOk(er)) throw er + } + } + } + + + function chownFix (orig) { + if (!orig) return orig + return function (target, uid, gid, cb) { + return orig.call(fs, target, uid, gid, function (er) { + if (chownErOk(er)) er = null + if (cb) cb.apply(this, arguments) + }) + } + } + + function chownFixSync (orig) { + if (!orig) return orig + return function (target, uid, gid) { + try { + return orig.call(fs, target, uid, gid) + } catch (er) { + if (!chownErOk(er)) throw er + } + } + } + + function statFix (orig) { + if (!orig) return orig + // Older versions of Node erroneously returned signed integers for + // uid + gid. + return function (target, options, cb) { + if (typeof options === 'function') { + cb = options + options = null + } + function callback (er, stats) { + if (stats) { + if (stats.uid < 0) stats.uid += 0x100000000 + if (stats.gid < 0) stats.gid += 0x100000000 + } + if (cb) cb.apply(this, arguments) + } + return options ? orig.call(fs, target, options, callback) + : orig.call(fs, target, callback) + } + } + + function statFixSync (orig) { + if (!orig) return orig + // Older versions of Node erroneously returned signed integers for + // uid + gid. + return function (target, options) { + var stats = options ? orig.call(fs, target, options) + : orig.call(fs, target) + if (stats) { + if (stats.uid < 0) stats.uid += 0x100000000 + if (stats.gid < 0) stats.gid += 0x100000000 + } + return stats; + } + } + + // ENOSYS means that the fs doesn't support the op. Just ignore + // that, because it doesn't matter. + // + // if there's no getuid, or if getuid() is something other + // than 0, and the error is EINVAL or EPERM, then just ignore + // it. + // + // This specific case is a silent failure in cp, install, tar, + // and most other unix tools that manage permissions. + // + // When running as root, or if other types of errors are + // encountered, then it's strict. + function chownErOk (er) { + if (!er) + return true + + if (er.code === "ENOSYS") + return true + + var nonroot = !process.getuid || process.getuid() !== 0 + if (nonroot) { + if (er.code === "EINVAL" || er.code === "EPERM") + return true + } + + return false + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/hosted-git-info/lib/from-url.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/hosted-git-info/lib/from-url.js new file mode 100644 index 0000000000000000000000000000000000000000..efc1247d59d126a6009c23b7dc590944c59a927c --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/hosted-git-info/lib/from-url.js @@ -0,0 +1,122 @@ +'use strict' + +const parseUrl = require('./parse-url') + +// look for github shorthand inputs, such as npm/cli +const isGitHubShorthand = (arg) => { + // it cannot contain whitespace before the first # + // it cannot start with a / because that's probably an absolute file path + // but it must include a slash since repos are username/repository + // it cannot start with a . because that's probably a relative file path + // it cannot start with an @ because that's a scoped package if it passes the other tests + // it cannot contain a : before a # because that tells us that there's a protocol + // a second / may not exist before a # + const firstHash = arg.indexOf('#') + const firstSlash = arg.indexOf('/') + const secondSlash = arg.indexOf('/', firstSlash + 1) + const firstColon = arg.indexOf(':') + const firstSpace = /\s/.exec(arg) + const firstAt = arg.indexOf('@') + + const spaceOnlyAfterHash = !firstSpace || (firstHash > -1 && firstSpace.index > firstHash) + const atOnlyAfterHash = firstAt === -1 || (firstHash > -1 && firstAt > firstHash) + const colonOnlyAfterHash = firstColon === -1 || (firstHash > -1 && firstColon > firstHash) + const secondSlashOnlyAfterHash = secondSlash === -1 || (firstHash > -1 && secondSlash > firstHash) + const hasSlash = firstSlash > 0 + // if a # is found, what we really want to know is that the character + // immediately before # is not a / + const doesNotEndWithSlash = firstHash > -1 ? arg[firstHash - 1] !== '/' : !arg.endsWith('/') + const doesNotStartWithDot = !arg.startsWith('.') + + return spaceOnlyAfterHash && hasSlash && doesNotEndWithSlash && + doesNotStartWithDot && atOnlyAfterHash && colonOnlyAfterHash && + secondSlashOnlyAfterHash +} + +module.exports = (giturl, opts, { gitHosts, protocols }) => { + if (!giturl) { + return + } + + const correctedUrl = isGitHubShorthand(giturl) ? `github:${giturl}` : giturl + const parsed = parseUrl(correctedUrl, protocols) + if (!parsed) { + return + } + + const gitHostShortcut = gitHosts.byShortcut[parsed.protocol] + const gitHostDomain = gitHosts.byDomain[parsed.hostname.startsWith('www.') + ? parsed.hostname.slice(4) + : parsed.hostname] + const gitHostName = gitHostShortcut || gitHostDomain + if (!gitHostName) { + return + } + + const gitHostInfo = gitHosts[gitHostShortcut || gitHostDomain] + let auth = null + if (protocols[parsed.protocol]?.auth && (parsed.username || parsed.password)) { + auth = `${parsed.username}${parsed.password ? ':' + parsed.password : ''}` + } + + let committish = null + let user = null + let project = null + let defaultRepresentation = null + + try { + if (gitHostShortcut) { + let pathname = parsed.pathname.startsWith('/') ? parsed.pathname.slice(1) : parsed.pathname + const firstAt = pathname.indexOf('@') + // we ignore auth for shortcuts, so just trim it out + if (firstAt > -1) { + pathname = pathname.slice(firstAt + 1) + } + + const lastSlash = pathname.lastIndexOf('/') + if (lastSlash > -1) { + user = decodeURIComponent(pathname.slice(0, lastSlash)) + // we want nulls only, never empty strings + if (!user) { + user = null + } + project = decodeURIComponent(pathname.slice(lastSlash + 1)) + } else { + project = decodeURIComponent(pathname) + } + + if (project.endsWith('.git')) { + project = project.slice(0, -4) + } + + if (parsed.hash) { + committish = decodeURIComponent(parsed.hash.slice(1)) + } + + defaultRepresentation = 'shortcut' + } else { + if (!gitHostInfo.protocols.includes(parsed.protocol)) { + return + } + + const segments = gitHostInfo.extract(parsed) + if (!segments) { + return + } + + user = segments.user && decodeURIComponent(segments.user) + project = decodeURIComponent(segments.project) + committish = decodeURIComponent(segments.committish) + defaultRepresentation = protocols[parsed.protocol]?.name || parsed.protocol.slice(0, -1) + } + } catch (err) { + /* istanbul ignore else */ + if (err instanceof URIError) { + return + } else { + throw err + } + } + + return [gitHostName, user, auth, project, committish, defaultRepresentation, opts] +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/hosted-git-info/lib/hosts.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/hosted-git-info/lib/hosts.js new file mode 100644 index 0000000000000000000000000000000000000000..6e7c123dbff8b45e46e58afb1d0e30690e2647cf --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/hosted-git-info/lib/hosts.js @@ -0,0 +1,229 @@ +/* eslint-disable max-len */ + +'use strict' + +const maybeJoin = (...args) => args.every(arg => arg) ? args.join('') : '' +const maybeEncode = (arg) => arg ? encodeURIComponent(arg) : '' +const formatHashFragment = (f) => f.toLowerCase() + .replace(/^\W+/g, '') // strip leading non-characters + .replace(/(? + `git@${domain}:${user}/${project}.git${maybeJoin('#', committish)}`, + sshurltemplate: ({ domain, user, project, committish }) => + `git+ssh://git@${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, + edittemplate: ({ domain, user, project, committish, editpath, path }) => + `https://${domain}/${user}/${project}${maybeJoin('/', editpath, '/', maybeEncode(committish || 'HEAD'), '/', path)}`, + browsetemplate: ({ domain, user, project, committish, treepath }) => + `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}`, + browsetreetemplate: ({ domain, user, project, committish, treepath, path, fragment, hashformat }) => + `https://${domain}/${user}/${project}/${treepath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`, + browseblobtemplate: ({ domain, user, project, committish, blobpath, path, fragment, hashformat }) => + `https://${domain}/${user}/${project}/${blobpath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`, + docstemplate: ({ domain, user, project, treepath, committish }) => + `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}#readme`, + httpstemplate: ({ auth, domain, user, project, committish }) => + `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, + filetemplate: ({ domain, user, project, committish, path }) => + `https://${domain}/${user}/${project}/raw/${maybeEncode(committish || 'HEAD')}/${path}`, + shortcuttemplate: ({ type, user, project, committish }) => + `${type}:${user}/${project}${maybeJoin('#', committish)}`, + pathtemplate: ({ user, project, committish }) => + `${user}/${project}${maybeJoin('#', committish)}`, + bugstemplate: ({ domain, user, project }) => + `https://${domain}/${user}/${project}/issues`, + hashformat: formatHashFragment, +} + +const hosts = {} +hosts.github = { + // First two are insecure and generally shouldn't be used any more, but + // they are still supported. + protocols: ['git:', 'http:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'], + domain: 'github.com', + treepath: 'tree', + blobpath: 'blob', + editpath: 'edit', + filetemplate: ({ auth, user, project, committish, path }) => + `https://${maybeJoin(auth, '@')}raw.githubusercontent.com/${user}/${project}/${maybeEncode(committish || 'HEAD')}/${path}`, + gittemplate: ({ auth, domain, user, project, committish }) => + `git://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, + tarballtemplate: ({ domain, user, project, committish }) => + `https://codeload.${domain}/${user}/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`, + extract: (url) => { + let [, user, project, type, committish] = url.pathname.split('/', 5) + if (type && type !== 'tree') { + return + } + + if (!type) { + committish = url.hash.slice(1) + } + + if (project && project.endsWith('.git')) { + project = project.slice(0, -4) + } + + if (!user || !project) { + return + } + + return { user, project, committish } + }, +} + +hosts.bitbucket = { + protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'], + domain: 'bitbucket.org', + treepath: 'src', + blobpath: 'src', + editpath: '?mode=edit', + edittemplate: ({ domain, user, project, committish, treepath, path, editpath }) => + `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish || 'HEAD'), '/', path, editpath)}`, + tarballtemplate: ({ domain, user, project, committish }) => + `https://${domain}/${user}/${project}/get/${maybeEncode(committish || 'HEAD')}.tar.gz`, + extract: (url) => { + let [, user, project, aux] = url.pathname.split('/', 4) + if (['get'].includes(aux)) { + return + } + + if (project && project.endsWith('.git')) { + project = project.slice(0, -4) + } + + if (!user || !project) { + return + } + + return { user, project, committish: url.hash.slice(1) } + }, +} + +hosts.gitlab = { + protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'], + domain: 'gitlab.com', + treepath: 'tree', + blobpath: 'tree', + editpath: '-/edit', + tarballtemplate: ({ domain, user, project, committish }) => + `https://${domain}/${user}/${project}/repository/archive.tar.gz?ref=${maybeEncode(committish || 'HEAD')}`, + extract: (url) => { + const path = url.pathname.slice(1) + if (path.includes('/-/') || path.includes('/archive.tar.gz')) { + return + } + + const segments = path.split('/') + let project = segments.pop() + if (project.endsWith('.git')) { + project = project.slice(0, -4) + } + + const user = segments.join('/') + if (!user || !project) { + return + } + + return { user, project, committish: url.hash.slice(1) } + }, +} + +hosts.gist = { + protocols: ['git:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'], + domain: 'gist.github.com', + editpath: 'edit', + sshtemplate: ({ domain, project, committish }) => + `git@${domain}:${project}.git${maybeJoin('#', committish)}`, + sshurltemplate: ({ domain, project, committish }) => + `git+ssh://git@${domain}/${project}.git${maybeJoin('#', committish)}`, + edittemplate: ({ domain, user, project, committish, editpath }) => + `https://${domain}/${user}/${project}${maybeJoin('/', maybeEncode(committish))}/${editpath}`, + browsetemplate: ({ domain, project, committish }) => + `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`, + browsetreetemplate: ({ domain, project, committish, path, hashformat }) => + `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`, + browseblobtemplate: ({ domain, project, committish, path, hashformat }) => + `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`, + docstemplate: ({ domain, project, committish }) => + `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`, + httpstemplate: ({ domain, project, committish }) => + `git+https://${domain}/${project}.git${maybeJoin('#', committish)}`, + filetemplate: ({ user, project, committish, path }) => + `https://gist.githubusercontent.com/${user}/${project}/raw${maybeJoin('/', maybeEncode(committish))}/${path}`, + shortcuttemplate: ({ type, project, committish }) => + `${type}:${project}${maybeJoin('#', committish)}`, + pathtemplate: ({ project, committish }) => + `${project}${maybeJoin('#', committish)}`, + bugstemplate: ({ domain, project }) => + `https://${domain}/${project}`, + gittemplate: ({ domain, project, committish }) => + `git://${domain}/${project}.git${maybeJoin('#', committish)}`, + tarballtemplate: ({ project, committish }) => + `https://codeload.github.com/gist/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`, + extract: (url) => { + let [, user, project, aux] = url.pathname.split('/', 4) + if (aux === 'raw') { + return + } + + if (!project) { + if (!user) { + return + } + + project = user + user = null + } + + if (project.endsWith('.git')) { + project = project.slice(0, -4) + } + + return { user, project, committish: url.hash.slice(1) } + }, + hashformat: function (fragment) { + return fragment && 'file-' + formatHashFragment(fragment) + }, +} + +hosts.sourcehut = { + protocols: ['git+ssh:', 'https:'], + domain: 'git.sr.ht', + treepath: 'tree', + blobpath: 'tree', + filetemplate: ({ domain, user, project, committish, path }) => + `https://${domain}/${user}/${project}/blob/${maybeEncode(committish) || 'HEAD'}/${path}`, + httpstemplate: ({ domain, user, project, committish }) => + `https://${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, + tarballtemplate: ({ domain, user, project, committish }) => + `https://${domain}/${user}/${project}/archive/${maybeEncode(committish) || 'HEAD'}.tar.gz`, + bugstemplate: () => null, + extract: (url) => { + let [, user, project, aux] = url.pathname.split('/', 4) + + // tarball url + if (['archive'].includes(aux)) { + return + } + + if (project && project.endsWith('.git')) { + project = project.slice(0, -4) + } + + if (!user || !project) { + return + } + + return { user, project, committish: url.hash.slice(1) } + }, +} + +for (const [name, host] of Object.entries(hosts)) { + hosts[name] = Object.assign({}, defaults, host) +} + +module.exports = hosts diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/hosted-git-info/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/hosted-git-info/lib/index.js new file mode 100644 index 0000000000000000000000000000000000000000..2a7100dcee6e78b0cb50178af8dabb03520df8c3 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/hosted-git-info/lib/index.js @@ -0,0 +1,227 @@ +'use strict' + +const { LRUCache } = require('lru-cache') +const hosts = require('./hosts.js') +const fromUrl = require('./from-url.js') +const parseUrl = require('./parse-url.js') + +const cache = new LRUCache({ max: 1000 }) + +function unknownHostedUrl (url) { + try { + const { + protocol, + hostname, + pathname, + } = new URL(url) + + if (!hostname) { + return null + } + + const proto = /(?:git\+)http:$/.test(protocol) ? 'http:' : 'https:' + const path = pathname.replace(/\.git$/, '') + return `${proto}//${hostname}${path}` + } catch { + return null + } +} + +class GitHost { + constructor (type, user, auth, project, committish, defaultRepresentation, opts = {}) { + Object.assign(this, GitHost.#gitHosts[type], { + type, + user, + auth, + project, + committish, + default: defaultRepresentation, + opts, + }) + } + + static #gitHosts = { byShortcut: {}, byDomain: {} } + static #protocols = { + 'git+ssh:': { name: 'sshurl' }, + 'ssh:': { name: 'sshurl' }, + 'git+https:': { name: 'https', auth: true }, + 'git:': { auth: true }, + 'http:': { auth: true }, + 'https:': { auth: true }, + 'git+http:': { auth: true }, + } + + static addHost (name, host) { + GitHost.#gitHosts[name] = host + GitHost.#gitHosts.byDomain[host.domain] = name + GitHost.#gitHosts.byShortcut[`${name}:`] = name + GitHost.#protocols[`${name}:`] = { name } + } + + static fromUrl (giturl, opts) { + if (typeof giturl !== 'string') { + return + } + + const key = giturl + JSON.stringify(opts || {}) + + if (!cache.has(key)) { + const hostArgs = fromUrl(giturl, opts, { + gitHosts: GitHost.#gitHosts, + protocols: GitHost.#protocols, + }) + cache.set(key, hostArgs ? new GitHost(...hostArgs) : undefined) + } + + return cache.get(key) + } + + static fromManifest (manifest, opts = {}) { + if (!manifest || typeof manifest !== 'object') { + return + } + + const r = manifest.repository + // TODO: look into also checking the `bugs`/`homepage` URLs + + const rurl = r && ( + typeof r === 'string' + ? r + : typeof r === 'object' && typeof r.url === 'string' + ? r.url + : null + ) + + if (!rurl) { + throw new Error('no repository') + } + + const info = (rurl && GitHost.fromUrl(rurl.replace(/^git\+/, ''), opts)) || null + if (info) { + return info + } + const unk = unknownHostedUrl(rurl) + return GitHost.fromUrl(unk, opts) || unk + } + + static parseUrl (url) { + return parseUrl(url) + } + + #fill (template, opts) { + if (typeof template !== 'function') { + return null + } + + const options = { ...this, ...this.opts, ...opts } + + // the path should always be set so we don't end up with 'undefined' in urls + if (!options.path) { + options.path = '' + } + + // template functions will insert the leading slash themselves + if (options.path.startsWith('/')) { + options.path = options.path.slice(1) + } + + if (options.noCommittish) { + options.committish = null + } + + const result = template(options) + return options.noGitPlus && result.startsWith('git+') ? result.slice(4) : result + } + + hash () { + return this.committish ? `#${this.committish}` : '' + } + + ssh (opts) { + return this.#fill(this.sshtemplate, opts) + } + + sshurl (opts) { + return this.#fill(this.sshurltemplate, opts) + } + + browse (path, ...args) { + // not a string, treat path as opts + if (typeof path !== 'string') { + return this.#fill(this.browsetemplate, path) + } + + if (typeof args[0] !== 'string') { + return this.#fill(this.browsetreetemplate, { ...args[0], path }) + } + + return this.#fill(this.browsetreetemplate, { ...args[1], fragment: args[0], path }) + } + + // If the path is known to be a file, then browseFile should be used. For some hosts + // the url is the same as browse, but for others like GitHub a file can use both `/tree/` + // and `/blob/` in the path. When using a default committish of `HEAD` then the `/tree/` + // path will redirect to a specific commit. Using the `/blob/` path avoids this and + // does not redirect to a different commit. + browseFile (path, ...args) { + if (typeof args[0] !== 'string') { + return this.#fill(this.browseblobtemplate, { ...args[0], path }) + } + + return this.#fill(this.browseblobtemplate, { ...args[1], fragment: args[0], path }) + } + + docs (opts) { + return this.#fill(this.docstemplate, opts) + } + + bugs (opts) { + return this.#fill(this.bugstemplate, opts) + } + + https (opts) { + return this.#fill(this.httpstemplate, opts) + } + + git (opts) { + return this.#fill(this.gittemplate, opts) + } + + shortcut (opts) { + return this.#fill(this.shortcuttemplate, opts) + } + + path (opts) { + return this.#fill(this.pathtemplate, opts) + } + + tarball (opts) { + return this.#fill(this.tarballtemplate, { ...opts, noCommittish: false }) + } + + file (path, opts) { + return this.#fill(this.filetemplate, { ...opts, path }) + } + + edit (path, opts) { + return this.#fill(this.edittemplate, { ...opts, path }) + } + + getDefaultRepresentation () { + return this.default + } + + toString (opts) { + if (this.default && typeof this[this.default] === 'function') { + return this[this.default](opts) + } + + return this.sshurl(opts) + } +} + +for (const [name, host] of Object.entries(hosts)) { + GitHost.addHost(name, host) +} + +module.exports = GitHost diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/hosted-git-info/lib/parse-url.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/hosted-git-info/lib/parse-url.js new file mode 100644 index 0000000000000000000000000000000000000000..bfd54b9140c1160f2660e43c88a113702e07da2f --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/hosted-git-info/lib/parse-url.js @@ -0,0 +1,81 @@ +const url = require('url') + +const lastIndexOfBefore = (str, char, beforeChar) => { + const startPosition = str.indexOf(beforeChar) + return str.lastIndexOf(char, startPosition > -1 ? startPosition : Infinity) +} + +const safeUrl = (u) => { + try { + return new url.URL(u) + } catch { + // this fn should never throw + } +} + +// accepts input like git:github.com:user/repo and inserts the // after the first : +const correctProtocol = (arg, protocols) => { + const firstColon = arg.indexOf(':') + const proto = arg.slice(0, firstColon + 1) + if (Object.prototype.hasOwnProperty.call(protocols, proto)) { + return arg + } + + if (arg.substr(firstColon, 3) === '://') { + // If arg is given as ://, then this is already a valid URL. + return arg + } + + const firstAt = arg.indexOf('@') + if (firstAt > -1) { + if (firstAt > firstColon) { + // URL has the form of :@. Assume this is a git+ssh URL. + return `git+ssh://${arg}` + } else { + // URL has the form 'git@github.com:npm/hosted-git-info.git'. + return arg + } + } + + // Correct : to :// + return `${arg.slice(0, firstColon + 1)}//${arg.slice(firstColon + 1)}` +} + +// attempt to correct an scp style url so that it will parse with `new URL()` +const correctUrl = (giturl) => { + // ignore @ that come after the first hash since the denotes the start + // of a committish which can contain @ characters + const firstAt = lastIndexOfBefore(giturl, '@', '#') + // ignore colons that come after the hash since that could include colons such as: + // git@github.com:user/package-2#semver:^1.0.0 + const lastColonBeforeHash = lastIndexOfBefore(giturl, ':', '#') + + if (lastColonBeforeHash > firstAt) { + // the last : comes after the first @ (or there is no @) + // like it would in: + // proto://hostname.com:user/repo + // username@hostname.com:user/repo + // :password@hostname.com:user/repo + // username:password@hostname.com:user/repo + // proto://username@hostname.com:user/repo + // proto://:password@hostname.com:user/repo + // proto://username:password@hostname.com:user/repo + // then we replace the last : with a / to create a valid path + giturl = giturl.slice(0, lastColonBeforeHash) + '/' + giturl.slice(lastColonBeforeHash + 1) + } + + if (lastIndexOfBefore(giturl, ':', '#') === -1 && giturl.indexOf('//') === -1) { + // we have no : at all + // as it would be in: + // username@hostname.com/user/repo + // then we prepend a protocol + giturl = `git+ssh://${giturl}` + } + + return giturl +} + +module.exports = (giturl, protocols) => { + const withProtocol = protocols ? correctProtocol(giturl, protocols) : giturl + return safeUrl(withProtocol) || safeUrl(correctUrl(withProtocol)) +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/http-proxy-agent/dist/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/http-proxy-agent/dist/index.js new file mode 100644 index 0000000000000000000000000000000000000000..fb2751c2264314ee5c3f05f27bc6bfd063ee5b07 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/http-proxy-agent/dist/index.js @@ -0,0 +1,148 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.HttpProxyAgent = void 0; +const net = __importStar(require("net")); +const tls = __importStar(require("tls")); +const debug_1 = __importDefault(require("debug")); +const events_1 = require("events"); +const agent_base_1 = require("agent-base"); +const url_1 = require("url"); +const debug = (0, debug_1.default)('http-proxy-agent'); +/** + * The `HttpProxyAgent` implements an HTTP Agent subclass that connects + * to the specified "HTTP proxy server" in order to proxy HTTP requests. + */ +class HttpProxyAgent extends agent_base_1.Agent { + constructor(proxy, opts) { + super(opts); + this.proxy = typeof proxy === 'string' ? new url_1.URL(proxy) : proxy; + this.proxyHeaders = opts?.headers ?? {}; + debug('Creating new HttpProxyAgent instance: %o', this.proxy.href); + // Trim off the brackets from IPv6 addresses + const host = (this.proxy.hostname || this.proxy.host).replace(/^\[|\]$/g, ''); + const port = this.proxy.port + ? parseInt(this.proxy.port, 10) + : this.proxy.protocol === 'https:' + ? 443 + : 80; + this.connectOpts = { + ...(opts ? omit(opts, 'headers') : null), + host, + port, + }; + } + addRequest(req, opts) { + req._header = null; + this.setRequestProps(req, opts); + // @ts-expect-error `addRequest()` isn't defined in `@types/node` + super.addRequest(req, opts); + } + setRequestProps(req, opts) { + const { proxy } = this; + const protocol = opts.secureEndpoint ? 'https:' : 'http:'; + const hostname = req.getHeader('host') || 'localhost'; + const base = `${protocol}//${hostname}`; + const url = new url_1.URL(req.path, base); + if (opts.port !== 80) { + url.port = String(opts.port); + } + // Change the `http.ClientRequest` instance's "path" field + // to the absolute path of the URL that will be requested. + req.path = String(url); + // Inject the `Proxy-Authorization` header if necessary. + const headers = typeof this.proxyHeaders === 'function' + ? this.proxyHeaders() + : { ...this.proxyHeaders }; + if (proxy.username || proxy.password) { + const auth = `${decodeURIComponent(proxy.username)}:${decodeURIComponent(proxy.password)}`; + headers['Proxy-Authorization'] = `Basic ${Buffer.from(auth).toString('base64')}`; + } + if (!headers['Proxy-Connection']) { + headers['Proxy-Connection'] = this.keepAlive + ? 'Keep-Alive' + : 'close'; + } + for (const name of Object.keys(headers)) { + const value = headers[name]; + if (value) { + req.setHeader(name, value); + } + } + } + async connect(req, opts) { + req._header = null; + if (!req.path.includes('://')) { + this.setRequestProps(req, opts); + } + // At this point, the http ClientRequest's internal `_header` field + // might have already been set. If this is the case then we'll need + // to re-generate the string since we just changed the `req.path`. + let first; + let endOfHeaders; + debug('Regenerating stored HTTP header string for request'); + req._implicitHeader(); + if (req.outputData && req.outputData.length > 0) { + debug('Patching connection write() output buffer with updated header'); + first = req.outputData[0].data; + endOfHeaders = first.indexOf('\r\n\r\n') + 4; + req.outputData[0].data = + req._header + first.substring(endOfHeaders); + debug('Output buffer: %o', req.outputData[0].data); + } + // Create a socket connection to the proxy server. + let socket; + if (this.proxy.protocol === 'https:') { + debug('Creating `tls.Socket`: %o', this.connectOpts); + socket = tls.connect(this.connectOpts); + } + else { + debug('Creating `net.Socket`: %o', this.connectOpts); + socket = net.connect(this.connectOpts); + } + // Wait for the socket's `connect` event, so that this `callback()` + // function throws instead of the `http` request machinery. This is + // important for i.e. `PacProxyAgent` which determines a failed proxy + // connection via the `callback()` function throwing. + await (0, events_1.once)(socket, 'connect'); + return socket; + } +} +HttpProxyAgent.protocols = ['http', 'https']; +exports.HttpProxyAgent = HttpProxyAgent; +function omit(obj, ...keys) { + const ret = {}; + let key; + for (key in obj) { + if (!keys.includes(key)) { + ret[key] = obj[key]; + } + } + return ret; +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/https-proxy-agent/dist/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/https-proxy-agent/dist/index.js new file mode 100644 index 0000000000000000000000000000000000000000..1857f464724e20a12760b06f194022a4fec26ad0 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/https-proxy-agent/dist/index.js @@ -0,0 +1,180 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.HttpsProxyAgent = void 0; +const net = __importStar(require("net")); +const tls = __importStar(require("tls")); +const assert_1 = __importDefault(require("assert")); +const debug_1 = __importDefault(require("debug")); +const agent_base_1 = require("agent-base"); +const url_1 = require("url"); +const parse_proxy_response_1 = require("./parse-proxy-response"); +const debug = (0, debug_1.default)('https-proxy-agent'); +const setServernameFromNonIpHost = (options) => { + if (options.servername === undefined && + options.host && + !net.isIP(options.host)) { + return { + ...options, + servername: options.host, + }; + } + return options; +}; +/** + * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to + * the specified "HTTP(s) proxy server" in order to proxy HTTPS requests. + * + * Outgoing HTTP requests are first tunneled through the proxy server using the + * `CONNECT` HTTP request method to establish a connection to the proxy server, + * and then the proxy server connects to the destination target and issues the + * HTTP request from the proxy server. + * + * `https:` requests have their socket connection upgraded to TLS once + * the connection to the proxy server has been established. + */ +class HttpsProxyAgent extends agent_base_1.Agent { + constructor(proxy, opts) { + super(opts); + this.options = { path: undefined }; + this.proxy = typeof proxy === 'string' ? new url_1.URL(proxy) : proxy; + this.proxyHeaders = opts?.headers ?? {}; + debug('Creating new HttpsProxyAgent instance: %o', this.proxy.href); + // Trim off the brackets from IPv6 addresses + const host = (this.proxy.hostname || this.proxy.host).replace(/^\[|\]$/g, ''); + const port = this.proxy.port + ? parseInt(this.proxy.port, 10) + : this.proxy.protocol === 'https:' + ? 443 + : 80; + this.connectOpts = { + // Attempt to negotiate http/1.1 for proxy servers that support http/2 + ALPNProtocols: ['http/1.1'], + ...(opts ? omit(opts, 'headers') : null), + host, + port, + }; + } + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + */ + async connect(req, opts) { + const { proxy } = this; + if (!opts.host) { + throw new TypeError('No "host" provided'); + } + // Create a socket connection to the proxy server. + let socket; + if (proxy.protocol === 'https:') { + debug('Creating `tls.Socket`: %o', this.connectOpts); + socket = tls.connect(setServernameFromNonIpHost(this.connectOpts)); + } + else { + debug('Creating `net.Socket`: %o', this.connectOpts); + socket = net.connect(this.connectOpts); + } + const headers = typeof this.proxyHeaders === 'function' + ? this.proxyHeaders() + : { ...this.proxyHeaders }; + const host = net.isIPv6(opts.host) ? `[${opts.host}]` : opts.host; + let payload = `CONNECT ${host}:${opts.port} HTTP/1.1\r\n`; + // Inject the `Proxy-Authorization` header if necessary. + if (proxy.username || proxy.password) { + const auth = `${decodeURIComponent(proxy.username)}:${decodeURIComponent(proxy.password)}`; + headers['Proxy-Authorization'] = `Basic ${Buffer.from(auth).toString('base64')}`; + } + headers.Host = `${host}:${opts.port}`; + if (!headers['Proxy-Connection']) { + headers['Proxy-Connection'] = this.keepAlive + ? 'Keep-Alive' + : 'close'; + } + for (const name of Object.keys(headers)) { + payload += `${name}: ${headers[name]}\r\n`; + } + const proxyResponsePromise = (0, parse_proxy_response_1.parseProxyResponse)(socket); + socket.write(`${payload}\r\n`); + const { connect, buffered } = await proxyResponsePromise; + req.emit('proxyConnect', connect); + this.emit('proxyConnect', connect, req); + if (connect.statusCode === 200) { + req.once('socket', resume); + if (opts.secureEndpoint) { + // The proxy is connecting to a TLS server, so upgrade + // this socket connection to a TLS connection. + debug('Upgrading socket connection to TLS'); + return tls.connect({ + ...omit(setServernameFromNonIpHost(opts), 'host', 'path', 'port'), + socket, + }); + } + return socket; + } + // Some other status code that's not 200... need to re-play the HTTP + // header "data" events onto the socket once the HTTP machinery is + // attached so that the node core `http` can parse and handle the + // error status code. + // Close the original socket, and a new "fake" socket is returned + // instead, so that the proxy doesn't get the HTTP request + // written to it (which may contain `Authorization` headers or other + // sensitive data). + // + // See: https://hackerone.com/reports/541502 + socket.destroy(); + const fakeSocket = new net.Socket({ writable: false }); + fakeSocket.readable = true; + // Need to wait for the "socket" event to re-play the "data" events. + req.once('socket', (s) => { + debug('Replaying proxy buffer for failed request'); + (0, assert_1.default)(s.listenerCount('data') > 0); + // Replay the "buffered" Buffer onto the fake `socket`, since at + // this point the HTTP module machinery has been hooked up for + // the user. + s.push(buffered); + s.push(null); + }); + return fakeSocket; + } +} +HttpsProxyAgent.protocols = ['http', 'https']; +exports.HttpsProxyAgent = HttpsProxyAgent; +function resume(socket) { + socket.resume(); +} +function omit(obj, ...keys) { + const ret = {}; + let key; + for (key in obj) { + if (!keys.includes(key)) { + ret[key] = obj[key]; + } + } + return ret; +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/https-proxy-agent/dist/parse-proxy-response.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/https-proxy-agent/dist/parse-proxy-response.js new file mode 100644 index 0000000000000000000000000000000000000000..d3f506f94130667ea0f9371e284a0bfe0935448f --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/https-proxy-agent/dist/parse-proxy-response.js @@ -0,0 +1,101 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseProxyResponse = void 0; +const debug_1 = __importDefault(require("debug")); +const debug = (0, debug_1.default)('https-proxy-agent:parse-proxy-response'); +function parseProxyResponse(socket) { + return new Promise((resolve, reject) => { + // we need to buffer any HTTP traffic that happens with the proxy before we get + // the CONNECT response, so that if the response is anything other than an "200" + // response code, then we can re-play the "data" events on the socket once the + // HTTP parser is hooked up... + let buffersLength = 0; + const buffers = []; + function read() { + const b = socket.read(); + if (b) + ondata(b); + else + socket.once('readable', read); + } + function cleanup() { + socket.removeListener('end', onend); + socket.removeListener('error', onerror); + socket.removeListener('readable', read); + } + function onend() { + cleanup(); + debug('onend'); + reject(new Error('Proxy connection ended before receiving CONNECT response')); + } + function onerror(err) { + cleanup(); + debug('onerror %o', err); + reject(err); + } + function ondata(b) { + buffers.push(b); + buffersLength += b.length; + const buffered = Buffer.concat(buffers, buffersLength); + const endOfHeaders = buffered.indexOf('\r\n\r\n'); + if (endOfHeaders === -1) { + // keep buffering + debug('have not received end of HTTP headers yet...'); + read(); + return; + } + const headerParts = buffered + .slice(0, endOfHeaders) + .toString('ascii') + .split('\r\n'); + const firstLine = headerParts.shift(); + if (!firstLine) { + socket.destroy(); + return reject(new Error('No header received from proxy CONNECT response')); + } + const firstLineParts = firstLine.split(' '); + const statusCode = +firstLineParts[1]; + const statusText = firstLineParts.slice(2).join(' '); + const headers = {}; + for (const header of headerParts) { + if (!header) + continue; + const firstColon = header.indexOf(':'); + if (firstColon === -1) { + socket.destroy(); + return reject(new Error(`Invalid header from proxy CONNECT response: "${header}"`)); + } + const key = header.slice(0, firstColon).toLowerCase(); + const value = header.slice(firstColon + 1).trimStart(); + const current = headers[key]; + if (typeof current === 'string') { + headers[key] = [current, value]; + } + else if (Array.isArray(current)) { + current.push(value); + } + else { + headers[key] = value; + } + } + debug('got proxy server response: %o %o', firstLine, headers); + cleanup(); + resolve({ + connect: { + statusCode, + statusText, + headers, + }, + buffered, + }); + } + socket.on('error', onerror); + socket.on('end', onend); + read(); + }); +} +exports.parseProxyResponse = parseProxyResponse; +//# sourceMappingURL=parse-proxy-response.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/dbcs-codec.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/dbcs-codec.js new file mode 100644 index 0000000000000000000000000000000000000000..fa839170367b271072dc097d29b2c05f085e7681 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/dbcs-codec.js @@ -0,0 +1,597 @@ +"use strict"; +var Buffer = require("safer-buffer").Buffer; + +// Multibyte codec. In this scheme, a character is represented by 1 or more bytes. +// Our codec supports UTF-16 surrogates, extensions for GB18030 and unicode sequences. +// To save memory and loading time, we read table files only when requested. + +exports._dbcs = DBCSCodec; + +var UNASSIGNED = -1, + GB18030_CODE = -2, + SEQ_START = -10, + NODE_START = -1000, + UNASSIGNED_NODE = new Array(0x100), + DEF_CHAR = -1; + +for (var i = 0; i < 0x100; i++) + UNASSIGNED_NODE[i] = UNASSIGNED; + + +// Class DBCSCodec reads and initializes mapping tables. +function DBCSCodec(codecOptions, iconv) { + this.encodingName = codecOptions.encodingName; + if (!codecOptions) + throw new Error("DBCS codec is called without the data.") + if (!codecOptions.table) + throw new Error("Encoding '" + this.encodingName + "' has no data."); + + // Load tables. + var mappingTable = codecOptions.table(); + + + // Decode tables: MBCS -> Unicode. + + // decodeTables is a trie, encoded as an array of arrays of integers. Internal arrays are trie nodes and all have len = 256. + // Trie root is decodeTables[0]. + // Values: >= 0 -> unicode character code. can be > 0xFFFF + // == UNASSIGNED -> unknown/unassigned sequence. + // == GB18030_CODE -> this is the end of a GB18030 4-byte sequence. + // <= NODE_START -> index of the next node in our trie to process next byte. + // <= SEQ_START -> index of the start of a character code sequence, in decodeTableSeq. + this.decodeTables = []; + this.decodeTables[0] = UNASSIGNED_NODE.slice(0); // Create root node. + + // Sometimes a MBCS char corresponds to a sequence of unicode chars. We store them as arrays of integers here. + this.decodeTableSeq = []; + + // Actual mapping tables consist of chunks. Use them to fill up decode tables. + for (var i = 0; i < mappingTable.length; i++) + this._addDecodeChunk(mappingTable[i]); + + // Load & create GB18030 tables when needed. + if (typeof codecOptions.gb18030 === 'function') { + this.gb18030 = codecOptions.gb18030(); // Load GB18030 ranges. + + // Add GB18030 common decode nodes. + var commonThirdByteNodeIdx = this.decodeTables.length; + this.decodeTables.push(UNASSIGNED_NODE.slice(0)); + + var commonFourthByteNodeIdx = this.decodeTables.length; + this.decodeTables.push(UNASSIGNED_NODE.slice(0)); + + // Fill out the tree + var firstByteNode = this.decodeTables[0]; + for (var i = 0x81; i <= 0xFE; i++) { + var secondByteNode = this.decodeTables[NODE_START - firstByteNode[i]]; + for (var j = 0x30; j <= 0x39; j++) { + if (secondByteNode[j] === UNASSIGNED) { + secondByteNode[j] = NODE_START - commonThirdByteNodeIdx; + } else if (secondByteNode[j] > NODE_START) { + throw new Error("gb18030 decode tables conflict at byte 2"); + } + + var thirdByteNode = this.decodeTables[NODE_START - secondByteNode[j]]; + for (var k = 0x81; k <= 0xFE; k++) { + if (thirdByteNode[k] === UNASSIGNED) { + thirdByteNode[k] = NODE_START - commonFourthByteNodeIdx; + } else if (thirdByteNode[k] === NODE_START - commonFourthByteNodeIdx) { + continue; + } else if (thirdByteNode[k] > NODE_START) { + throw new Error("gb18030 decode tables conflict at byte 3"); + } + + var fourthByteNode = this.decodeTables[NODE_START - thirdByteNode[k]]; + for (var l = 0x30; l <= 0x39; l++) { + if (fourthByteNode[l] === UNASSIGNED) + fourthByteNode[l] = GB18030_CODE; + } + } + } + } + } + + this.defaultCharUnicode = iconv.defaultCharUnicode; + + + // Encode tables: Unicode -> DBCS. + + // `encodeTable` is array mapping from unicode char to encoded char. All its values are integers for performance. + // Because it can be sparse, it is represented as array of buckets by 256 chars each. Bucket can be null. + // Values: >= 0 -> it is a normal char. Write the value (if <=256 then 1 byte, if <=65536 then 2 bytes, etc.). + // == UNASSIGNED -> no conversion found. Output a default char. + // <= SEQ_START -> it's an index in encodeTableSeq, see below. The character starts a sequence. + this.encodeTable = []; + + // `encodeTableSeq` is used when a sequence of unicode characters is encoded as a single code. We use a tree of + // objects where keys correspond to characters in sequence and leafs are the encoded dbcs values. A special DEF_CHAR key + // means end of sequence (needed when one sequence is a strict subsequence of another). + // Objects are kept separately from encodeTable to increase performance. + this.encodeTableSeq = []; + + // Some chars can be decoded, but need not be encoded. + var skipEncodeChars = {}; + if (codecOptions.encodeSkipVals) + for (var i = 0; i < codecOptions.encodeSkipVals.length; i++) { + var val = codecOptions.encodeSkipVals[i]; + if (typeof val === 'number') + skipEncodeChars[val] = true; + else + for (var j = val.from; j <= val.to; j++) + skipEncodeChars[j] = true; + } + + // Use decode trie to recursively fill out encode tables. + this._fillEncodeTable(0, 0, skipEncodeChars); + + // Add more encoding pairs when needed. + if (codecOptions.encodeAdd) { + for (var uChar in codecOptions.encodeAdd) + if (Object.prototype.hasOwnProperty.call(codecOptions.encodeAdd, uChar)) + this._setEncodeChar(uChar.charCodeAt(0), codecOptions.encodeAdd[uChar]); + } + + this.defCharSB = this.encodeTable[0][iconv.defaultCharSingleByte.charCodeAt(0)]; + if (this.defCharSB === UNASSIGNED) this.defCharSB = this.encodeTable[0]['?']; + if (this.defCharSB === UNASSIGNED) this.defCharSB = "?".charCodeAt(0); +} + +DBCSCodec.prototype.encoder = DBCSEncoder; +DBCSCodec.prototype.decoder = DBCSDecoder; + +// Decoder helpers +DBCSCodec.prototype._getDecodeTrieNode = function(addr) { + var bytes = []; + for (; addr > 0; addr >>>= 8) + bytes.push(addr & 0xFF); + if (bytes.length == 0) + bytes.push(0); + + var node = this.decodeTables[0]; + for (var i = bytes.length-1; i > 0; i--) { // Traverse nodes deeper into the trie. + var val = node[bytes[i]]; + + if (val == UNASSIGNED) { // Create new node. + node[bytes[i]] = NODE_START - this.decodeTables.length; + this.decodeTables.push(node = UNASSIGNED_NODE.slice(0)); + } + else if (val <= NODE_START) { // Existing node. + node = this.decodeTables[NODE_START - val]; + } + else + throw new Error("Overwrite byte in " + this.encodingName + ", addr: " + addr.toString(16)); + } + return node; +} + + +DBCSCodec.prototype._addDecodeChunk = function(chunk) { + // First element of chunk is the hex mbcs code where we start. + var curAddr = parseInt(chunk[0], 16); + + // Choose the decoding node where we'll write our chars. + var writeTable = this._getDecodeTrieNode(curAddr); + curAddr = curAddr & 0xFF; + + // Write all other elements of the chunk to the table. + for (var k = 1; k < chunk.length; k++) { + var part = chunk[k]; + if (typeof part === "string") { // String, write as-is. + for (var l = 0; l < part.length;) { + var code = part.charCodeAt(l++); + if (0xD800 <= code && code < 0xDC00) { // Decode surrogate + var codeTrail = part.charCodeAt(l++); + if (0xDC00 <= codeTrail && codeTrail < 0xE000) + writeTable[curAddr++] = 0x10000 + (code - 0xD800) * 0x400 + (codeTrail - 0xDC00); + else + throw new Error("Incorrect surrogate pair in " + this.encodingName + " at chunk " + chunk[0]); + } + else if (0x0FF0 < code && code <= 0x0FFF) { // Character sequence (our own encoding used) + var len = 0xFFF - code + 2; + var seq = []; + for (var m = 0; m < len; m++) + seq.push(part.charCodeAt(l++)); // Simple variation: don't support surrogates or subsequences in seq. + + writeTable[curAddr++] = SEQ_START - this.decodeTableSeq.length; + this.decodeTableSeq.push(seq); + } + else + writeTable[curAddr++] = code; // Basic char + } + } + else if (typeof part === "number") { // Integer, meaning increasing sequence starting with prev character. + var charCode = writeTable[curAddr - 1] + 1; + for (var l = 0; l < part; l++) + writeTable[curAddr++] = charCode++; + } + else + throw new Error("Incorrect type '" + typeof part + "' given in " + this.encodingName + " at chunk " + chunk[0]); + } + if (curAddr > 0xFF) + throw new Error("Incorrect chunk in " + this.encodingName + " at addr " + chunk[0] + ": too long" + curAddr); +} + +// Encoder helpers +DBCSCodec.prototype._getEncodeBucket = function(uCode) { + var high = uCode >> 8; // This could be > 0xFF because of astral characters. + if (this.encodeTable[high] === undefined) + this.encodeTable[high] = UNASSIGNED_NODE.slice(0); // Create bucket on demand. + return this.encodeTable[high]; +} + +DBCSCodec.prototype._setEncodeChar = function(uCode, dbcsCode) { + var bucket = this._getEncodeBucket(uCode); + var low = uCode & 0xFF; + if (bucket[low] <= SEQ_START) + this.encodeTableSeq[SEQ_START-bucket[low]][DEF_CHAR] = dbcsCode; // There's already a sequence, set a single-char subsequence of it. + else if (bucket[low] == UNASSIGNED) + bucket[low] = dbcsCode; +} + +DBCSCodec.prototype._setEncodeSequence = function(seq, dbcsCode) { + + // Get the root of character tree according to first character of the sequence. + var uCode = seq[0]; + var bucket = this._getEncodeBucket(uCode); + var low = uCode & 0xFF; + + var node; + if (bucket[low] <= SEQ_START) { + // There's already a sequence with - use it. + node = this.encodeTableSeq[SEQ_START-bucket[low]]; + } + else { + // There was no sequence object - allocate a new one. + node = {}; + if (bucket[low] !== UNASSIGNED) node[DEF_CHAR] = bucket[low]; // If a char was set before - make it a single-char subsequence. + bucket[low] = SEQ_START - this.encodeTableSeq.length; + this.encodeTableSeq.push(node); + } + + // Traverse the character tree, allocating new nodes as needed. + for (var j = 1; j < seq.length-1; j++) { + var oldVal = node[uCode]; + if (typeof oldVal === 'object') + node = oldVal; + else { + node = node[uCode] = {} + if (oldVal !== undefined) + node[DEF_CHAR] = oldVal + } + } + + // Set the leaf to given dbcsCode. + uCode = seq[seq.length-1]; + node[uCode] = dbcsCode; +} + +DBCSCodec.prototype._fillEncodeTable = function(nodeIdx, prefix, skipEncodeChars) { + var node = this.decodeTables[nodeIdx]; + var hasValues = false; + var subNodeEmpty = {}; + for (var i = 0; i < 0x100; i++) { + var uCode = node[i]; + var mbCode = prefix + i; + if (skipEncodeChars[mbCode]) + continue; + + if (uCode >= 0) { + this._setEncodeChar(uCode, mbCode); + hasValues = true; + } else if (uCode <= NODE_START) { + var subNodeIdx = NODE_START - uCode; + if (!subNodeEmpty[subNodeIdx]) { // Skip empty subtrees (they are too large in gb18030). + var newPrefix = (mbCode << 8) >>> 0; // NOTE: '>>> 0' keeps 32-bit num positive. + if (this._fillEncodeTable(subNodeIdx, newPrefix, skipEncodeChars)) + hasValues = true; + else + subNodeEmpty[subNodeIdx] = true; + } + } else if (uCode <= SEQ_START) { + this._setEncodeSequence(this.decodeTableSeq[SEQ_START - uCode], mbCode); + hasValues = true; + } + } + return hasValues; +} + + + +// == Encoder ================================================================== + +function DBCSEncoder(options, codec) { + // Encoder state + this.leadSurrogate = -1; + this.seqObj = undefined; + + // Static data + this.encodeTable = codec.encodeTable; + this.encodeTableSeq = codec.encodeTableSeq; + this.defaultCharSingleByte = codec.defCharSB; + this.gb18030 = codec.gb18030; +} + +DBCSEncoder.prototype.write = function(str) { + var newBuf = Buffer.alloc(str.length * (this.gb18030 ? 4 : 3)), + leadSurrogate = this.leadSurrogate, + seqObj = this.seqObj, nextChar = -1, + i = 0, j = 0; + + while (true) { + // 0. Get next character. + if (nextChar === -1) { + if (i == str.length) break; + var uCode = str.charCodeAt(i++); + } + else { + var uCode = nextChar; + nextChar = -1; + } + + // 1. Handle surrogates. + if (0xD800 <= uCode && uCode < 0xE000) { // Char is one of surrogates. + if (uCode < 0xDC00) { // We've got lead surrogate. + if (leadSurrogate === -1) { + leadSurrogate = uCode; + continue; + } else { + leadSurrogate = uCode; + // Double lead surrogate found. + uCode = UNASSIGNED; + } + } else { // We've got trail surrogate. + if (leadSurrogate !== -1) { + uCode = 0x10000 + (leadSurrogate - 0xD800) * 0x400 + (uCode - 0xDC00); + leadSurrogate = -1; + } else { + // Incomplete surrogate pair - only trail surrogate found. + uCode = UNASSIGNED; + } + + } + } + else if (leadSurrogate !== -1) { + // Incomplete surrogate pair - only lead surrogate found. + nextChar = uCode; uCode = UNASSIGNED; // Write an error, then current char. + leadSurrogate = -1; + } + + // 2. Convert uCode character. + var dbcsCode = UNASSIGNED; + if (seqObj !== undefined && uCode != UNASSIGNED) { // We are in the middle of the sequence + var resCode = seqObj[uCode]; + if (typeof resCode === 'object') { // Sequence continues. + seqObj = resCode; + continue; + + } else if (typeof resCode == 'number') { // Sequence finished. Write it. + dbcsCode = resCode; + + } else if (resCode == undefined) { // Current character is not part of the sequence. + + // Try default character for this sequence + resCode = seqObj[DEF_CHAR]; + if (resCode !== undefined) { + dbcsCode = resCode; // Found. Write it. + nextChar = uCode; // Current character will be written too in the next iteration. + + } else { + // TODO: What if we have no default? (resCode == undefined) + // Then, we should write first char of the sequence as-is and try the rest recursively. + // Didn't do it for now because no encoding has this situation yet. + // Currently, just skip the sequence and write current char. + } + } + seqObj = undefined; + } + else if (uCode >= 0) { // Regular character + var subtable = this.encodeTable[uCode >> 8]; + if (subtable !== undefined) + dbcsCode = subtable[uCode & 0xFF]; + + if (dbcsCode <= SEQ_START) { // Sequence start + seqObj = this.encodeTableSeq[SEQ_START-dbcsCode]; + continue; + } + + if (dbcsCode == UNASSIGNED && this.gb18030) { + // Use GB18030 algorithm to find character(s) to write. + var idx = findIdx(this.gb18030.uChars, uCode); + if (idx != -1) { + var dbcsCode = this.gb18030.gbChars[idx] + (uCode - this.gb18030.uChars[idx]); + newBuf[j++] = 0x81 + Math.floor(dbcsCode / 12600); dbcsCode = dbcsCode % 12600; + newBuf[j++] = 0x30 + Math.floor(dbcsCode / 1260); dbcsCode = dbcsCode % 1260; + newBuf[j++] = 0x81 + Math.floor(dbcsCode / 10); dbcsCode = dbcsCode % 10; + newBuf[j++] = 0x30 + dbcsCode; + continue; + } + } + } + + // 3. Write dbcsCode character. + if (dbcsCode === UNASSIGNED) + dbcsCode = this.defaultCharSingleByte; + + if (dbcsCode < 0x100) { + newBuf[j++] = dbcsCode; + } + else if (dbcsCode < 0x10000) { + newBuf[j++] = dbcsCode >> 8; // high byte + newBuf[j++] = dbcsCode & 0xFF; // low byte + } + else if (dbcsCode < 0x1000000) { + newBuf[j++] = dbcsCode >> 16; + newBuf[j++] = (dbcsCode >> 8) & 0xFF; + newBuf[j++] = dbcsCode & 0xFF; + } else { + newBuf[j++] = dbcsCode >>> 24; + newBuf[j++] = (dbcsCode >>> 16) & 0xFF; + newBuf[j++] = (dbcsCode >>> 8) & 0xFF; + newBuf[j++] = dbcsCode & 0xFF; + } + } + + this.seqObj = seqObj; + this.leadSurrogate = leadSurrogate; + return newBuf.slice(0, j); +} + +DBCSEncoder.prototype.end = function() { + if (this.leadSurrogate === -1 && this.seqObj === undefined) + return; // All clean. Most often case. + + var newBuf = Buffer.alloc(10), j = 0; + + if (this.seqObj) { // We're in the sequence. + var dbcsCode = this.seqObj[DEF_CHAR]; + if (dbcsCode !== undefined) { // Write beginning of the sequence. + if (dbcsCode < 0x100) { + newBuf[j++] = dbcsCode; + } + else { + newBuf[j++] = dbcsCode >> 8; // high byte + newBuf[j++] = dbcsCode & 0xFF; // low byte + } + } else { + // See todo above. + } + this.seqObj = undefined; + } + + if (this.leadSurrogate !== -1) { + // Incomplete surrogate pair - only lead surrogate found. + newBuf[j++] = this.defaultCharSingleByte; + this.leadSurrogate = -1; + } + + return newBuf.slice(0, j); +} + +// Export for testing +DBCSEncoder.prototype.findIdx = findIdx; + + +// == Decoder ================================================================== + +function DBCSDecoder(options, codec) { + // Decoder state + this.nodeIdx = 0; + this.prevBytes = []; + + // Static data + this.decodeTables = codec.decodeTables; + this.decodeTableSeq = codec.decodeTableSeq; + this.defaultCharUnicode = codec.defaultCharUnicode; + this.gb18030 = codec.gb18030; +} + +DBCSDecoder.prototype.write = function(buf) { + var newBuf = Buffer.alloc(buf.length*2), + nodeIdx = this.nodeIdx, + prevBytes = this.prevBytes, prevOffset = this.prevBytes.length, + seqStart = -this.prevBytes.length, // idx of the start of current parsed sequence. + uCode; + + for (var i = 0, j = 0; i < buf.length; i++) { + var curByte = (i >= 0) ? buf[i] : prevBytes[i + prevOffset]; + + // Lookup in current trie node. + var uCode = this.decodeTables[nodeIdx][curByte]; + + if (uCode >= 0) { + // Normal character, just use it. + } + else if (uCode === UNASSIGNED) { // Unknown char. + // TODO: Callback with seq. + uCode = this.defaultCharUnicode.charCodeAt(0); + i = seqStart; // Skip one byte ('i' will be incremented by the for loop) and try to parse again. + } + else if (uCode === GB18030_CODE) { + if (i >= 3) { + var ptr = (buf[i-3]-0x81)*12600 + (buf[i-2]-0x30)*1260 + (buf[i-1]-0x81)*10 + (curByte-0x30); + } else { + var ptr = (prevBytes[i-3+prevOffset]-0x81)*12600 + + (((i-2 >= 0) ? buf[i-2] : prevBytes[i-2+prevOffset])-0x30)*1260 + + (((i-1 >= 0) ? buf[i-1] : prevBytes[i-1+prevOffset])-0x81)*10 + + (curByte-0x30); + } + var idx = findIdx(this.gb18030.gbChars, ptr); + uCode = this.gb18030.uChars[idx] + ptr - this.gb18030.gbChars[idx]; + } + else if (uCode <= NODE_START) { // Go to next trie node. + nodeIdx = NODE_START - uCode; + continue; + } + else if (uCode <= SEQ_START) { // Output a sequence of chars. + var seq = this.decodeTableSeq[SEQ_START - uCode]; + for (var k = 0; k < seq.length - 1; k++) { + uCode = seq[k]; + newBuf[j++] = uCode & 0xFF; + newBuf[j++] = uCode >> 8; + } + uCode = seq[seq.length-1]; + } + else + throw new Error("iconv-lite internal error: invalid decoding table value " + uCode + " at " + nodeIdx + "/" + curByte); + + // Write the character to buffer, handling higher planes using surrogate pair. + if (uCode >= 0x10000) { + uCode -= 0x10000; + var uCodeLead = 0xD800 | (uCode >> 10); + newBuf[j++] = uCodeLead & 0xFF; + newBuf[j++] = uCodeLead >> 8; + + uCode = 0xDC00 | (uCode & 0x3FF); + } + newBuf[j++] = uCode & 0xFF; + newBuf[j++] = uCode >> 8; + + // Reset trie node. + nodeIdx = 0; seqStart = i+1; + } + + this.nodeIdx = nodeIdx; + this.prevBytes = (seqStart >= 0) + ? Array.prototype.slice.call(buf, seqStart) + : prevBytes.slice(seqStart + prevOffset).concat(Array.prototype.slice.call(buf)); + + return newBuf.slice(0, j).toString('ucs2'); +} + +DBCSDecoder.prototype.end = function() { + var ret = ''; + + // Try to parse all remaining chars. + while (this.prevBytes.length > 0) { + // Skip 1 character in the buffer. + ret += this.defaultCharUnicode; + var bytesArr = this.prevBytes.slice(1); + + // Parse remaining as usual. + this.prevBytes = []; + this.nodeIdx = 0; + if (bytesArr.length > 0) + ret += this.write(bytesArr); + } + + this.prevBytes = []; + this.nodeIdx = 0; + return ret; +} + +// Binary search for GB18030. Returns largest i such that table[i] <= val. +function findIdx(table, val) { + if (table[0] > val) + return -1; + + var l = 0, r = table.length; + while (l < r-1) { // always table[l] <= val < table[r] + var mid = l + ((r-l+1) >> 1); + if (table[mid] <= val) + l = mid; + else + r = mid; + } + return l; +} + diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/dbcs-data.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/dbcs-data.js new file mode 100644 index 0000000000000000000000000000000000000000..0d17e5821b3df97bb961c4e71142331139d03d15 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/dbcs-data.js @@ -0,0 +1,188 @@ +"use strict"; + +// Description of supported double byte encodings and aliases. +// Tables are not require()-d until they are needed to speed up library load. +// require()-s are direct to support Browserify. + +module.exports = { + + // == Japanese/ShiftJIS ==================================================== + // All japanese encodings are based on JIS X set of standards: + // JIS X 0201 - Single-byte encoding of ASCII + ¥ + Kana chars at 0xA1-0xDF. + // JIS X 0208 - Main set of 6879 characters, placed in 94x94 plane, to be encoded by 2 bytes. + // Has several variations in 1978, 1983, 1990 and 1997. + // JIS X 0212 - Supplementary plane of 6067 chars in 94x94 plane. 1990. Effectively dead. + // JIS X 0213 - Extension and modern replacement of 0208 and 0212. Total chars: 11233. + // 2 planes, first is superset of 0208, second - revised 0212. + // Introduced in 2000, revised 2004. Some characters are in Unicode Plane 2 (0x2xxxx) + + // Byte encodings are: + // * Shift_JIS: Compatible with 0201, uses not defined chars in top half as lead bytes for double-byte + // encoding of 0208. Lead byte ranges: 0x81-0x9F, 0xE0-0xEF; Trail byte ranges: 0x40-0x7E, 0x80-0x9E, 0x9F-0xFC. + // Windows CP932 is a superset of Shift_JIS. Some companies added more chars, notably KDDI. + // * EUC-JP: Up to 3 bytes per character. Used mostly on *nixes. + // 0x00-0x7F - lower part of 0201 + // 0x8E, 0xA1-0xDF - upper part of 0201 + // (0xA1-0xFE)x2 - 0208 plane (94x94). + // 0x8F, (0xA1-0xFE)x2 - 0212 plane (94x94). + // * JIS X 208: 7-bit, direct encoding of 0208. Byte ranges: 0x21-0x7E (94 values). Uncommon. + // Used as-is in ISO2022 family. + // * ISO2022-JP: Stateful encoding, with escape sequences to switch between ASCII, + // 0201-1976 Roman, 0208-1978, 0208-1983. + // * ISO2022-JP-1: Adds esc seq for 0212-1990. + // * ISO2022-JP-2: Adds esc seq for GB2313-1980, KSX1001-1992, ISO8859-1, ISO8859-7. + // * ISO2022-JP-3: Adds esc seq for 0201-1976 Kana set, 0213-2000 Planes 1, 2. + // * ISO2022-JP-2004: Adds 0213-2004 Plane 1. + // + // After JIS X 0213 appeared, Shift_JIS-2004, EUC-JISX0213 and ISO2022-JP-2004 followed, with just changing the planes. + // + // Overall, it seems that it's a mess :( http://www8.plala.or.jp/tkubota1/unicode-symbols-map2.html + + 'shiftjis': { + type: '_dbcs', + table: function() { return require('./tables/shiftjis.json') }, + encodeAdd: {'\u00a5': 0x5C, '\u203E': 0x7E}, + encodeSkipVals: [{from: 0xED40, to: 0xF940}], + }, + 'csshiftjis': 'shiftjis', + 'mskanji': 'shiftjis', + 'sjis': 'shiftjis', + 'windows31j': 'shiftjis', + 'ms31j': 'shiftjis', + 'xsjis': 'shiftjis', + 'windows932': 'shiftjis', + 'ms932': 'shiftjis', + '932': 'shiftjis', + 'cp932': 'shiftjis', + + 'eucjp': { + type: '_dbcs', + table: function() { return require('./tables/eucjp.json') }, + encodeAdd: {'\u00a5': 0x5C, '\u203E': 0x7E}, + }, + + // TODO: KDDI extension to Shift_JIS + // TODO: IBM CCSID 942 = CP932, but F0-F9 custom chars and other char changes. + // TODO: IBM CCSID 943 = Shift_JIS = CP932 with original Shift_JIS lower 128 chars. + + + // == Chinese/GBK ========================================================== + // http://en.wikipedia.org/wiki/GBK + // We mostly implement W3C recommendation: https://www.w3.org/TR/encoding/#gbk-encoder + + // Oldest GB2312 (1981, ~7600 chars) is a subset of CP936 + 'gb2312': 'cp936', + 'gb231280': 'cp936', + 'gb23121980': 'cp936', + 'csgb2312': 'cp936', + 'csiso58gb231280': 'cp936', + 'euccn': 'cp936', + + // Microsoft's CP936 is a subset and approximation of GBK. + 'windows936': 'cp936', + 'ms936': 'cp936', + '936': 'cp936', + 'cp936': { + type: '_dbcs', + table: function() { return require('./tables/cp936.json') }, + }, + + // GBK (~22000 chars) is an extension of CP936 that added user-mapped chars and some other. + 'gbk': { + type: '_dbcs', + table: function() { return require('./tables/cp936.json').concat(require('./tables/gbk-added.json')) }, + }, + 'xgbk': 'gbk', + 'isoir58': 'gbk', + + // GB18030 is an algorithmic extension of GBK. + // Main source: https://www.w3.org/TR/encoding/#gbk-encoder + // http://icu-project.org/docs/papers/gb18030.html + // http://source.icu-project.org/repos/icu/data/trunk/charset/data/xml/gb-18030-2000.xml + // http://www.khngai.com/chinese/charmap/tblgbk.php?page=0 + 'gb18030': { + type: '_dbcs', + table: function() { return require('./tables/cp936.json').concat(require('./tables/gbk-added.json')) }, + gb18030: function() { return require('./tables/gb18030-ranges.json') }, + encodeSkipVals: [0x80], + encodeAdd: {'€': 0xA2E3}, + }, + + 'chinese': 'gb18030', + + + // == Korean =============================================================== + // EUC-KR, KS_C_5601 and KS X 1001 are exactly the same. + 'windows949': 'cp949', + 'ms949': 'cp949', + '949': 'cp949', + 'cp949': { + type: '_dbcs', + table: function() { return require('./tables/cp949.json') }, + }, + + 'cseuckr': 'cp949', + 'csksc56011987': 'cp949', + 'euckr': 'cp949', + 'isoir149': 'cp949', + 'korean': 'cp949', + 'ksc56011987': 'cp949', + 'ksc56011989': 'cp949', + 'ksc5601': 'cp949', + + + // == Big5/Taiwan/Hong Kong ================================================ + // There are lots of tables for Big5 and cp950. Please see the following links for history: + // http://moztw.org/docs/big5/ http://www.haible.de/bruno/charsets/conversion-tables/Big5.html + // Variations, in roughly number of defined chars: + // * Windows CP 950: Microsoft variant of Big5. Canonical: http://www.unicode.org/Public/MAPPINGS/VENDORS/MICSFT/WINDOWS/CP950.TXT + // * Windows CP 951: Microsoft variant of Big5-HKSCS-2001. Seems to be never public. http://me.abelcheung.org/articles/research/what-is-cp951/ + // * Big5-2003 (Taiwan standard) almost superset of cp950. + // * Unicode-at-on (UAO) / Mozilla 1.8. Falling out of use on the Web. Not supported by other browsers. + // * Big5-HKSCS (-2001, -2004, -2008). Hong Kong standard. + // many unicode code points moved from PUA to Supplementary plane (U+2XXXX) over the years. + // Plus, it has 4 combining sequences. + // Seems that Mozilla refused to support it for 10 yrs. https://bugzilla.mozilla.org/show_bug.cgi?id=162431 https://bugzilla.mozilla.org/show_bug.cgi?id=310299 + // because big5-hkscs is the only encoding to include astral characters in non-algorithmic way. + // Implementations are not consistent within browsers; sometimes labeled as just big5. + // MS Internet Explorer switches from big5 to big5-hkscs when a patch applied. + // Great discussion & recap of what's going on https://bugzilla.mozilla.org/show_bug.cgi?id=912470#c31 + // In the encoder, it might make sense to support encoding old PUA mappings to Big5 bytes seq-s. + // Official spec: http://www.ogcio.gov.hk/en/business/tech_promotion/ccli/terms/doc/2003cmp_2008.txt + // http://www.ogcio.gov.hk/tc/business/tech_promotion/ccli/terms/doc/hkscs-2008-big5-iso.txt + // + // Current understanding of how to deal with Big5(-HKSCS) is in the Encoding Standard, http://encoding.spec.whatwg.org/#big5-encoder + // Unicode mapping (http://www.unicode.org/Public/MAPPINGS/OBSOLETE/EASTASIA/OTHER/BIG5.TXT) is said to be wrong. + + 'windows950': 'cp950', + 'ms950': 'cp950', + '950': 'cp950', + 'cp950': { + type: '_dbcs', + table: function() { return require('./tables/cp950.json') }, + }, + + // Big5 has many variations and is an extension of cp950. We use Encoding Standard's as a consensus. + 'big5': 'big5hkscs', + 'big5hkscs': { + type: '_dbcs', + table: function() { return require('./tables/cp950.json').concat(require('./tables/big5-added.json')) }, + encodeSkipVals: [ + // Although Encoding Standard says we should avoid encoding to HKSCS area (See Step 1 of + // https://encoding.spec.whatwg.org/#index-big5-pointer), we still do it to increase compatibility with ICU. + // But if a single unicode point can be encoded both as HKSCS and regular Big5, we prefer the latter. + 0x8e69, 0x8e6f, 0x8e7e, 0x8eab, 0x8eb4, 0x8ecd, 0x8ed0, 0x8f57, 0x8f69, 0x8f6e, 0x8fcb, 0x8ffe, + 0x906d, 0x907a, 0x90c4, 0x90dc, 0x90f1, 0x91bf, 0x92af, 0x92b0, 0x92b1, 0x92b2, 0x92d1, 0x9447, 0x94ca, + 0x95d9, 0x96fc, 0x9975, 0x9b76, 0x9b78, 0x9b7b, 0x9bc6, 0x9bde, 0x9bec, 0x9bf6, 0x9c42, 0x9c53, 0x9c62, + 0x9c68, 0x9c6b, 0x9c77, 0x9cbc, 0x9cbd, 0x9cd0, 0x9d57, 0x9d5a, 0x9dc4, 0x9def, 0x9dfb, 0x9ea9, 0x9eef, + 0x9efd, 0x9f60, 0x9fcb, 0xa077, 0xa0dc, 0xa0df, 0x8fcc, 0x92c8, 0x9644, 0x96ed, + + // Step 2 of https://encoding.spec.whatwg.org/#index-big5-pointer: Use last pointer for U+2550, U+255E, U+2561, U+256A, U+5341, or U+5345 + 0xa2a4, 0xa2a5, 0xa2a7, 0xa2a6, 0xa2cc, 0xa2ce, + ], + }, + + 'cnbig5': 'big5hkscs', + 'csbig5': 'big5hkscs', + 'xxbig5': 'big5hkscs', +}; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/index.js new file mode 100644 index 0000000000000000000000000000000000000000..d95c2441151a93320a45e2042d30974230b4902b --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/index.js @@ -0,0 +1,23 @@ +"use strict"; + +// Update this array if you add/rename/remove files in this directory. +// We support Browserify by skipping automatic module discovery and requiring modules directly. +var modules = [ + require("./internal"), + require("./utf32"), + require("./utf16"), + require("./utf7"), + require("./sbcs-codec"), + require("./sbcs-data"), + require("./sbcs-data-generated"), + require("./dbcs-codec"), + require("./dbcs-data"), +]; + +// Put all encoding/alias/codec definitions to single object and export it. +for (var i = 0; i < modules.length; i++) { + var module = modules[i]; + for (var enc in module) + if (Object.prototype.hasOwnProperty.call(module, enc)) + exports[enc] = module[enc]; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/internal.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/internal.js new file mode 100644 index 0000000000000000000000000000000000000000..dc1074f04f11a31c0e962846f5d162eab9556d38 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/internal.js @@ -0,0 +1,198 @@ +"use strict"; +var Buffer = require("safer-buffer").Buffer; + +// Export Node.js internal encodings. + +module.exports = { + // Encodings + utf8: { type: "_internal", bomAware: true}, + cesu8: { type: "_internal", bomAware: true}, + unicode11utf8: "utf8", + + ucs2: { type: "_internal", bomAware: true}, + utf16le: "ucs2", + + binary: { type: "_internal" }, + base64: { type: "_internal" }, + hex: { type: "_internal" }, + + // Codec. + _internal: InternalCodec, +}; + +//------------------------------------------------------------------------------ + +function InternalCodec(codecOptions, iconv) { + this.enc = codecOptions.encodingName; + this.bomAware = codecOptions.bomAware; + + if (this.enc === "base64") + this.encoder = InternalEncoderBase64; + else if (this.enc === "cesu8") { + this.enc = "utf8"; // Use utf8 for decoding. + this.encoder = InternalEncoderCesu8; + + // Add decoder for versions of Node not supporting CESU-8 + if (Buffer.from('eda0bdedb2a9', 'hex').toString() !== '💩') { + this.decoder = InternalDecoderCesu8; + this.defaultCharUnicode = iconv.defaultCharUnicode; + } + } +} + +InternalCodec.prototype.encoder = InternalEncoder; +InternalCodec.prototype.decoder = InternalDecoder; + +//------------------------------------------------------------------------------ + +// We use node.js internal decoder. Its signature is the same as ours. +var StringDecoder = require('string_decoder').StringDecoder; + +if (!StringDecoder.prototype.end) // Node v0.8 doesn't have this method. + StringDecoder.prototype.end = function() {}; + + +function InternalDecoder(options, codec) { + this.decoder = new StringDecoder(codec.enc); +} + +InternalDecoder.prototype.write = function(buf) { + if (!Buffer.isBuffer(buf)) { + buf = Buffer.from(buf); + } + + return this.decoder.write(buf); +} + +InternalDecoder.prototype.end = function() { + return this.decoder.end(); +} + + +//------------------------------------------------------------------------------ +// Encoder is mostly trivial + +function InternalEncoder(options, codec) { + this.enc = codec.enc; +} + +InternalEncoder.prototype.write = function(str) { + return Buffer.from(str, this.enc); +} + +InternalEncoder.prototype.end = function() { +} + + +//------------------------------------------------------------------------------ +// Except base64 encoder, which must keep its state. + +function InternalEncoderBase64(options, codec) { + this.prevStr = ''; +} + +InternalEncoderBase64.prototype.write = function(str) { + str = this.prevStr + str; + var completeQuads = str.length - (str.length % 4); + this.prevStr = str.slice(completeQuads); + str = str.slice(0, completeQuads); + + return Buffer.from(str, "base64"); +} + +InternalEncoderBase64.prototype.end = function() { + return Buffer.from(this.prevStr, "base64"); +} + + +//------------------------------------------------------------------------------ +// CESU-8 encoder is also special. + +function InternalEncoderCesu8(options, codec) { +} + +InternalEncoderCesu8.prototype.write = function(str) { + var buf = Buffer.alloc(str.length * 3), bufIdx = 0; + for (var i = 0; i < str.length; i++) { + var charCode = str.charCodeAt(i); + // Naive implementation, but it works because CESU-8 is especially easy + // to convert from UTF-16 (which all JS strings are encoded in). + if (charCode < 0x80) + buf[bufIdx++] = charCode; + else if (charCode < 0x800) { + buf[bufIdx++] = 0xC0 + (charCode >>> 6); + buf[bufIdx++] = 0x80 + (charCode & 0x3f); + } + else { // charCode will always be < 0x10000 in javascript. + buf[bufIdx++] = 0xE0 + (charCode >>> 12); + buf[bufIdx++] = 0x80 + ((charCode >>> 6) & 0x3f); + buf[bufIdx++] = 0x80 + (charCode & 0x3f); + } + } + return buf.slice(0, bufIdx); +} + +InternalEncoderCesu8.prototype.end = function() { +} + +//------------------------------------------------------------------------------ +// CESU-8 decoder is not implemented in Node v4.0+ + +function InternalDecoderCesu8(options, codec) { + this.acc = 0; + this.contBytes = 0; + this.accBytes = 0; + this.defaultCharUnicode = codec.defaultCharUnicode; +} + +InternalDecoderCesu8.prototype.write = function(buf) { + var acc = this.acc, contBytes = this.contBytes, accBytes = this.accBytes, + res = ''; + for (var i = 0; i < buf.length; i++) { + var curByte = buf[i]; + if ((curByte & 0xC0) !== 0x80) { // Leading byte + if (contBytes > 0) { // Previous code is invalid + res += this.defaultCharUnicode; + contBytes = 0; + } + + if (curByte < 0x80) { // Single-byte code + res += String.fromCharCode(curByte); + } else if (curByte < 0xE0) { // Two-byte code + acc = curByte & 0x1F; + contBytes = 1; accBytes = 1; + } else if (curByte < 0xF0) { // Three-byte code + acc = curByte & 0x0F; + contBytes = 2; accBytes = 1; + } else { // Four or more are not supported for CESU-8. + res += this.defaultCharUnicode; + } + } else { // Continuation byte + if (contBytes > 0) { // We're waiting for it. + acc = (acc << 6) | (curByte & 0x3f); + contBytes--; accBytes++; + if (contBytes === 0) { + // Check for overlong encoding, but support Modified UTF-8 (encoding NULL as C0 80) + if (accBytes === 2 && acc < 0x80 && acc > 0) + res += this.defaultCharUnicode; + else if (accBytes === 3 && acc < 0x800) + res += this.defaultCharUnicode; + else + // Actually add character. + res += String.fromCharCode(acc); + } + } else { // Unexpected continuation byte + res += this.defaultCharUnicode; + } + } + } + this.acc = acc; this.contBytes = contBytes; this.accBytes = accBytes; + return res; +} + +InternalDecoderCesu8.prototype.end = function() { + var res = 0; + if (this.contBytes > 0) + res += this.defaultCharUnicode; + return res; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/sbcs-codec.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/sbcs-codec.js new file mode 100644 index 0000000000000000000000000000000000000000..abac5ffaac97da29fa5c5d8aedf5b47763fc7c58 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/sbcs-codec.js @@ -0,0 +1,72 @@ +"use strict"; +var Buffer = require("safer-buffer").Buffer; + +// Single-byte codec. Needs a 'chars' string parameter that contains 256 or 128 chars that +// correspond to encoded bytes (if 128 - then lower half is ASCII). + +exports._sbcs = SBCSCodec; +function SBCSCodec(codecOptions, iconv) { + if (!codecOptions) + throw new Error("SBCS codec is called without the data.") + + // Prepare char buffer for decoding. + if (!codecOptions.chars || (codecOptions.chars.length !== 128 && codecOptions.chars.length !== 256)) + throw new Error("Encoding '"+codecOptions.type+"' has incorrect 'chars' (must be of len 128 or 256)"); + + if (codecOptions.chars.length === 128) { + var asciiString = ""; + for (var i = 0; i < 128; i++) + asciiString += String.fromCharCode(i); + codecOptions.chars = asciiString + codecOptions.chars; + } + + this.decodeBuf = Buffer.from(codecOptions.chars, 'ucs2'); + + // Encoding buffer. + var encodeBuf = Buffer.alloc(65536, iconv.defaultCharSingleByte.charCodeAt(0)); + + for (var i = 0; i < codecOptions.chars.length; i++) + encodeBuf[codecOptions.chars.charCodeAt(i)] = i; + + this.encodeBuf = encodeBuf; +} + +SBCSCodec.prototype.encoder = SBCSEncoder; +SBCSCodec.prototype.decoder = SBCSDecoder; + + +function SBCSEncoder(options, codec) { + this.encodeBuf = codec.encodeBuf; +} + +SBCSEncoder.prototype.write = function(str) { + var buf = Buffer.alloc(str.length); + for (var i = 0; i < str.length; i++) + buf[i] = this.encodeBuf[str.charCodeAt(i)]; + + return buf; +} + +SBCSEncoder.prototype.end = function() { +} + + +function SBCSDecoder(options, codec) { + this.decodeBuf = codec.decodeBuf; +} + +SBCSDecoder.prototype.write = function(buf) { + // Strings are immutable in JS -> we use ucs2 buffer to speed up computations. + var decodeBuf = this.decodeBuf; + var newBuf = Buffer.alloc(buf.length*2); + var idx1 = 0, idx2 = 0; + for (var i = 0; i < buf.length; i++) { + idx1 = buf[i]*2; idx2 = i*2; + newBuf[idx2] = decodeBuf[idx1]; + newBuf[idx2+1] = decodeBuf[idx1+1]; + } + return newBuf.toString('ucs2'); +} + +SBCSDecoder.prototype.end = function() { +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/sbcs-data-generated.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/sbcs-data-generated.js new file mode 100644 index 0000000000000000000000000000000000000000..9b4823607b6071c67dd7b553767bfee98a49de1c --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/sbcs-data-generated.js @@ -0,0 +1,451 @@ +"use strict"; + +// Generated data for sbcs codec. Don't edit manually. Regenerate using generation/gen-sbcs.js script. +module.exports = { + "437": "cp437", + "737": "cp737", + "775": "cp775", + "850": "cp850", + "852": "cp852", + "855": "cp855", + "856": "cp856", + "857": "cp857", + "858": "cp858", + "860": "cp860", + "861": "cp861", + "862": "cp862", + "863": "cp863", + "864": "cp864", + "865": "cp865", + "866": "cp866", + "869": "cp869", + "874": "windows874", + "922": "cp922", + "1046": "cp1046", + "1124": "cp1124", + "1125": "cp1125", + "1129": "cp1129", + "1133": "cp1133", + "1161": "cp1161", + "1162": "cp1162", + "1163": "cp1163", + "1250": "windows1250", + "1251": "windows1251", + "1252": "windows1252", + "1253": "windows1253", + "1254": "windows1254", + "1255": "windows1255", + "1256": "windows1256", + "1257": "windows1257", + "1258": "windows1258", + "28591": "iso88591", + "28592": "iso88592", + "28593": "iso88593", + "28594": "iso88594", + "28595": "iso88595", + "28596": "iso88596", + "28597": "iso88597", + "28598": "iso88598", + "28599": "iso88599", + "28600": "iso885910", + "28601": "iso885911", + "28603": "iso885913", + "28604": "iso885914", + "28605": "iso885915", + "28606": "iso885916", + "windows874": { + "type": "_sbcs", + "chars": "€����…�����������‘’“”•–—�������� กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู����฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛����" + }, + "win874": "windows874", + "cp874": "windows874", + "windows1250": { + "type": "_sbcs", + "chars": "€�‚�„…†‡�‰Š‹ŚŤŽŹ�‘’“”•–—�™š›śťžź ˇ˘Ł¤Ą¦§¨©Ş«¬­®Ż°±˛ł´µ¶·¸ąş»Ľ˝ľżŔÁÂĂÄĹĆÇČÉĘËĚÍÎĎĐŃŇÓÔŐÖ×ŘŮÚŰÜÝŢßŕáâăäĺćçčéęëěíîďđńňóôőö÷řůúűüýţ˙" + }, + "win1250": "windows1250", + "cp1250": "windows1250", + "windows1251": { + "type": "_sbcs", + "chars": "ЂЃ‚ѓ„…†‡€‰Љ‹ЊЌЋЏђ‘’“”•–—�™љ›њќћџ ЎўЈ¤Ґ¦§Ё©Є«¬­®Ї°±Ііґµ¶·ё№є»јЅѕїАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя" + }, + "win1251": "windows1251", + "cp1251": "windows1251", + "windows1252": { + "type": "_sbcs", + "chars": "€�‚ƒ„…†‡ˆ‰Š‹Œ�Ž��‘’“”•–—˜™š›œ�žŸ ¡¢£¤¥¦§¨©ª«¬­®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖרÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ" + }, + "win1252": "windows1252", + "cp1252": "windows1252", + "windows1253": { + "type": "_sbcs", + "chars": "€�‚ƒ„…†‡�‰�‹�����‘’“”•–—�™�›���� ΅Ά£¤¥¦§¨©�«¬­®―°±²³΄µ¶·ΈΉΊ»Ό½ΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡ�ΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύώ�" + }, + "win1253": "windows1253", + "cp1253": "windows1253", + "windows1254": { + "type": "_sbcs", + "chars": "€�‚ƒ„…†‡ˆ‰Š‹Œ����‘’“”•–—˜™š›œ��Ÿ ¡¢£¤¥¦§¨©ª«¬­®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏĞÑÒÓÔÕÖרÙÚÛÜİŞßàáâãäåæçèéêëìíîïğñòóôõö÷øùúûüışÿ" + }, + "win1254": "windows1254", + "cp1254": "windows1254", + "windows1255": { + "type": "_sbcs", + "chars": "€�‚ƒ„…†‡ˆ‰�‹�����‘’“”•–—˜™�›���� ¡¢£₪¥¦§¨©×«¬­®¯°±²³´µ¶·¸¹÷»¼½¾¿ְֱֲֳִֵֶַָֹֺֻּֽ־ֿ׀ׁׂ׃װױײ׳״�������אבגדהוזחטיךכלםמןנסעףפץצקרשת��‎‏�" + }, + "win1255": "windows1255", + "cp1255": "windows1255", + "windows1256": { + "type": "_sbcs", + "chars": "€پ‚ƒ„…†‡ˆ‰ٹ‹Œچژڈگ‘’“”•–—ک™ڑ›œ‌‍ں ،¢£¤¥¦§¨©ھ«¬­®¯°±²³´µ¶·¸¹؛»¼½¾؟ہءآأؤإئابةتثجحخدذرزسشصض×طظعغـفقكàلâمنهوçèéêëىيîïًٌٍَôُِ÷ّùْûü‎‏ے" + }, + "win1256": "windows1256", + "cp1256": "windows1256", + "windows1257": { + "type": "_sbcs", + "chars": "€�‚�„…†‡�‰�‹�¨ˇ¸�‘’“”•–—�™�›�¯˛� �¢£¤�¦§Ø©Ŗ«¬­®Æ°±²³´µ¶·ø¹ŗ»¼½¾æĄĮĀĆÄÅĘĒČÉŹĖĢĶĪĻŠŃŅÓŌÕÖ×ŲŁŚŪÜŻŽßąįāćäåęēčéźėģķīļšńņóōõö÷ųłśūüżž˙" + }, + "win1257": "windows1257", + "cp1257": "windows1257", + "windows1258": { + "type": "_sbcs", + "chars": "€�‚ƒ„…†‡ˆ‰�‹Œ����‘’“”•–—˜™�›œ��Ÿ ¡¢£¤¥¦§¨©ª«¬­®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂĂÄÅÆÇÈÉÊË̀ÍÎÏĐÑ̉ÓÔƠÖרÙÚÛÜỮßàáâăäåæçèéêë́íîïđṇ̃óôơö÷øùúûüư₫ÿ" + }, + "win1258": "windows1258", + "cp1258": "windows1258", + "iso88591": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ¡¢£¤¥¦§¨©ª«¬­®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖרÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ" + }, + "cp28591": "iso88591", + "iso88592": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ Ą˘Ł¤ĽŚ§¨ŠŞŤŹ­ŽŻ°ą˛ł´ľśˇ¸šşťź˝žżŔÁÂĂÄĹĆÇČÉĘËĚÍÎĎĐŃŇÓÔŐÖ×ŘŮÚŰÜÝŢßŕáâăäĺćçčéęëěíîďđńňóôőö÷řůúűüýţ˙" + }, + "cp28592": "iso88592", + "iso88593": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ Ħ˘£¤�Ĥ§¨İŞĞĴ­�ݰħ²³´µĥ·¸ışğĵ½�żÀÁÂ�ÄĊĈÇÈÉÊËÌÍÎÏ�ÑÒÓÔĠÖ×ĜÙÚÛÜŬŜßàáâ�äċĉçèéêëìíîï�ñòóôġö÷ĝùúûüŭŝ˙" + }, + "cp28593": "iso88593", + "iso88594": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ĄĸŖ¤Ĩϧ¨ŠĒĢŦ­Ž¯°ą˛ŗ´ĩšēģŧŊžŋĀÁÂÃÄÅÆĮČÉĘËĖÍÎĪĐŅŌĶÔÕÖרŲÚÛÜŨŪßāáâãäåæįčéęëėíîīđņōķôõö÷øųúûüũū˙" + }, + "cp28594": "iso88594", + "iso88595": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ЁЂЃЄЅІЇЈЉЊЋЌ­ЎЏАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя№ёђѓєѕіїјљњћќ§ўџ" + }, + "cp28595": "iso88595", + "iso88596": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ���¤�������،­�������������؛���؟�ءآأؤإئابةتثجحخدذرزسشصضطظعغ�����ـفقكلمنهوىيًٌٍَُِّْ�������������" + }, + "cp28596": "iso88596", + "iso88597": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ‘’£€₯¦§¨©ͺ«¬­�―°±²³΄΅Ά·ΈΉΊ»Ό½ΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡ�ΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύώ�" + }, + "cp28597": "iso88597", + "iso88598": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ �¢£¤¥¦§¨©×«¬­®¯°±²³´µ¶·¸¹÷»¼½¾��������������������������������‗אבגדהוזחטיךכלםמןנסעףפץצקרשת��‎‏�" + }, + "cp28598": "iso88598", + "iso88599": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ¡¢£¤¥¦§¨©ª«¬­®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏĞÑÒÓÔÕÖרÙÚÛÜİŞßàáâãäåæçèéêëìíîïğñòóôõö÷øùúûüışÿ" + }, + "cp28599": "iso88599", + "iso885910": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ĄĒĢĪĨͧĻĐŠŦŽ­ŪŊ°ąēģīĩķ·ļđšŧž―ūŋĀÁÂÃÄÅÆĮČÉĘËĖÍÎÏÐŅŌÓÔÕÖŨØŲÚÛÜÝÞßāáâãäåæįčéęëėíîïðņōóôõöũøųúûüýþĸ" + }, + "cp28600": "iso885910", + "iso885911": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู����฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛����" + }, + "cp28601": "iso885911", + "iso885913": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ”¢£¤„¦§Ø©Ŗ«¬­®Æ°±²³“µ¶·ø¹ŗ»¼½¾æĄĮĀĆÄÅĘĒČÉŹĖĢĶĪĻŠŃŅÓŌÕÖ×ŲŁŚŪÜŻŽßąįāćäåęēčéźėģķīļšńņóōõö÷ųłśūüżž’" + }, + "cp28603": "iso885913", + "iso885914": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ Ḃḃ£ĊċḊ§Ẁ©ẂḋỲ­®ŸḞḟĠġṀṁ¶ṖẁṗẃṠỳẄẅṡÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏŴÑÒÓÔÕÖṪØÙÚÛÜÝŶßàáâãäåæçèéêëìíîïŵñòóôõöṫøùúûüýŷÿ" + }, + "cp28604": "iso885914", + "iso885915": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ¡¢£€¥Š§š©ª«¬­®¯°±²³Žµ¶·ž¹º»ŒœŸ¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖרÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ" + }, + "cp28605": "iso885915", + "iso885916": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ĄąŁ€„Чš©Ș«Ź­źŻ°±ČłŽ”¶·žčș»ŒœŸżÀÁÂĂÄĆÆÇÈÉÊËÌÍÎÏĐŃÒÓÔŐÖŚŰÙÚÛÜĘȚßàáâăäćæçèéêëìíîïđńòóôőöśűùúûüęțÿ" + }, + "cp28606": "iso885916", + "cp437": { + "type": "_sbcs", + "chars": "ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜ¢£¥₧ƒáíóúñѪº¿⌐¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ " + }, + "ibm437": "cp437", + "csibm437": "cp437", + "cp737": { + "type": "_sbcs", + "chars": "ΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩαβγδεζηθικλμνξοπρσςτυφχψ░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀ωάέήϊίόύϋώΆΈΉΊΌΎΏ±≥≤ΪΫ÷≈°∙·√ⁿ²■ " + }, + "ibm737": "cp737", + "csibm737": "cp737", + "cp775": { + "type": "_sbcs", + "chars": "ĆüéāäģåćłēŖŗīŹÄÅÉæÆōöĢ¢ŚśÖÜø£Ø×¤ĀĪóŻżź”¦©®¬½¼Ł«»░▒▓│┤ĄČĘĖ╣║╗╝ĮŠ┐└┴┬├─┼ŲŪ╚╔╩╦╠═╬Žąčęėįšųūž┘┌█▄▌▐▀ÓßŌŃõÕµńĶķĻļņĒŅ’­±“¾¶§÷„°∙·¹³²■ " + }, + "ibm775": "cp775", + "csibm775": "cp775", + "cp850": { + "type": "_sbcs", + "chars": "ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜø£Ø×ƒáíóúñѪº¿®¬½¼¡«»░▒▓│┤ÁÂÀ©╣║╗╝¢¥┐└┴┬├─┼ãÃ╚╔╩╦╠═╬¤ðÐÊËÈıÍÎÏ┘┌█▄¦Ì▀ÓßÔÒõÕµþÞÚÛÙýݯ´­±‗¾¶§÷¸°¨·¹³²■ " + }, + "ibm850": "cp850", + "csibm850": "cp850", + "cp852": { + "type": "_sbcs", + "chars": "ÇüéâäůćçłëŐőîŹÄĆÉĹĺôöĽľŚśÖÜŤťŁ×čáíóúĄąŽžĘ꬟Ⱥ«»░▒▓│┤ÁÂĚŞ╣║╗╝Żż┐└┴┬├─┼Ăă╚╔╩╦╠═╬¤đĐĎËďŇÍÎě┘┌█▄ŢŮ▀ÓßÔŃńňŠšŔÚŕŰýÝţ´­˝˛ˇ˘§÷¸°¨˙űŘř■ " + }, + "ibm852": "cp852", + "csibm852": "cp852", + "cp855": { + "type": "_sbcs", + "chars": "ђЂѓЃёЁєЄѕЅіІїЇјЈљЉњЊћЋќЌўЎџЏюЮъЪаАбБцЦдДеЕфФгГ«»░▒▓│┤хХиИ╣║╗╝йЙ┐└┴┬├─┼кК╚╔╩╦╠═╬¤лЛмМнНоОп┘┌█▄Пя▀ЯрРсСтТуУжЖвВьЬ№­ыЫзЗшШэЭщЩчЧ§■ " + }, + "ibm855": "cp855", + "csibm855": "cp855", + "cp856": { + "type": "_sbcs", + "chars": "אבגדהוזחטיךכלםמןנסעףפץצקרשת�£�×����������®¬½¼�«»░▒▓│┤���©╣║╗╝¢¥┐└┴┬├─┼��╚╔╩╦╠═╬¤���������┘┌█▄¦�▀������µ�������¯´­±‗¾¶§÷¸°¨·¹³²■ " + }, + "ibm856": "cp856", + "csibm856": "cp856", + "cp857": { + "type": "_sbcs", + "chars": "ÇüéâäàåçêëèïîıÄÅÉæÆôöòûùİÖÜø£ØŞşáíóúñÑĞ𿮬½¼¡«»░▒▓│┤ÁÂÀ©╣║╗╝¢¥┐└┴┬├─┼ãÃ╚╔╩╦╠═╬¤ºªÊËÈ�ÍÎÏ┘┌█▄¦Ì▀ÓßÔÒõÕµ�×ÚÛÙìÿ¯´­±�¾¶§÷¸°¨·¹³²■ " + }, + "ibm857": "cp857", + "csibm857": "cp857", + "cp858": { + "type": "_sbcs", + "chars": "ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜø£Ø×ƒáíóúñѪº¿®¬½¼¡«»░▒▓│┤ÁÂÀ©╣║╗╝¢¥┐└┴┬├─┼ãÃ╚╔╩╦╠═╬¤ðÐÊËÈ€ÍÎÏ┘┌█▄¦Ì▀ÓßÔÒõÕµþÞÚÛÙýݯ´­±‗¾¶§÷¸°¨·¹³²■ " + }, + "ibm858": "cp858", + "csibm858": "cp858", + "cp860": { + "type": "_sbcs", + "chars": "ÇüéâãàÁçêÊèÍÔìÃÂÉÀÈôõòÚùÌÕÜ¢£Ù₧ÓáíóúñѪº¿Ò¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ " + }, + "ibm860": "cp860", + "csibm860": "cp860", + "cp861": { + "type": "_sbcs", + "chars": "ÇüéâäàåçêëèÐðÞÄÅÉæÆôöþûÝýÖÜø£Ø₧ƒáíóúÁÍÓÚ¿⌐¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ " + }, + "ibm861": "cp861", + "csibm861": "cp861", + "cp862": { + "type": "_sbcs", + "chars": "אבגדהוזחטיךכלםמןנסעףפץצקרשת¢£¥₧ƒáíóúñѪº¿⌐¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ " + }, + "ibm862": "cp862", + "csibm862": "cp862", + "cp863": { + "type": "_sbcs", + "chars": "ÇüéâÂà¶çêëèïî‗À§ÉÈÊôËÏûù¤ÔÜ¢£ÙÛƒ¦´óú¨¸³¯Î⌐¬½¼¾«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ " + }, + "ibm863": "cp863", + "csibm863": "cp863", + "cp864": { + "type": "_sbcs", + "chars": "\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010\u0011\u0012\u0013\u0014\u0015\u0016\u0017\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f !\"#$٪&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~°·∙√▒─│┼┤┬├┴┐┌└┘β∞φ±½¼≈«»ﻷﻸ��ﻻﻼ� ­ﺂ£¤ﺄ��ﺎﺏﺕﺙ،ﺝﺡﺥ٠١٢٣٤٥٦٧٨٩ﻑ؛ﺱﺵﺹ؟¢ﺀﺁﺃﺅﻊﺋﺍﺑﺓﺗﺛﺟﺣﺧﺩﺫﺭﺯﺳﺷﺻﺿﻁﻅﻋﻏ¦¬÷×ﻉـﻓﻗﻛﻟﻣﻧﻫﻭﻯﻳﺽﻌﻎﻍﻡﹽّﻥﻩﻬﻰﻲﻐﻕﻵﻶﻝﻙﻱ■�" + }, + "ibm864": "cp864", + "csibm864": "cp864", + "cp865": { + "type": "_sbcs", + "chars": "ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜø£Ø₧ƒáíóúñѪº¿⌐¬½¼¡«¤░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ " + }, + "ibm865": "cp865", + "csibm865": "cp865", + "cp866": { + "type": "_sbcs", + "chars": "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмноп░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀рстуфхцчшщъыьэюяЁёЄєЇїЎў°∙·√№¤■ " + }, + "ibm866": "cp866", + "csibm866": "cp866", + "cp869": { + "type": "_sbcs", + "chars": "������Ά�·¬¦‘’Έ―ΉΊΪΌ��ΎΫ©Ώ²³ά£έήίϊΐόύΑΒΓΔΕΖΗ½ΘΙ«»░▒▓│┤ΚΛΜΝ╣║╗╝ΞΟ┐└┴┬├─┼ΠΡ╚╔╩╦╠═╬ΣΤΥΦΧΨΩαβγ┘┌█▄δε▀ζηθικλμνξοπρσςτ΄­±υφχ§ψ΅°¨ωϋΰώ■ " + }, + "ibm869": "cp869", + "csibm869": "cp869", + "cp922": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ¡¢£¤¥¦§¨©ª«¬­®‾°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏŠÑÒÓÔÕÖרÙÚÛÜÝŽßàáâãäåæçèéêëìíîïšñòóôõö÷øùúûüýžÿ" + }, + "ibm922": "cp922", + "csibm922": "cp922", + "cp1046": { + "type": "_sbcs", + "chars": "ﺈ×÷ﹱˆ■│─┐┌└┘ﹹﹻﹽﹿﹷﺊﻰﻳﻲﻎﻏﻐﻶﻸﻺﻼ ¤ﺋﺑﺗﺛﺟﺣ،­ﺧﺳ٠١٢٣٤٥٦٧٨٩ﺷ؛ﺻﺿﻊ؟ﻋءآأؤإئابةتثجحخدذرزسشصضطﻇعغﻌﺂﺄﺎﻓـفقكلمنهوىيًٌٍَُِّْﻗﻛﻟﻵﻷﻹﻻﻣﻧﻬﻩ�" + }, + "ibm1046": "cp1046", + "csibm1046": "cp1046", + "cp1124": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ЁЂҐЄЅІЇЈЉЊЋЌ­ЎЏАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя№ёђґєѕіїјљњћќ§ўџ" + }, + "ibm1124": "cp1124", + "csibm1124": "cp1124", + "cp1125": { + "type": "_sbcs", + "chars": "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмноп░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀рстуфхцчшщъыьэюяЁёҐґЄєІіЇї·√№¤■ " + }, + "ibm1125": "cp1125", + "csibm1125": "cp1125", + "cp1129": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ¡¢£¤¥¦§œ©ª«¬­®¯°±²³Ÿµ¶·Œ¹º»¼½¾¿ÀÁÂĂÄÅÆÇÈÉÊË̀ÍÎÏĐÑ̉ÓÔƠÖרÙÚÛÜỮßàáâăäåæçèéêë́íîïđṇ̃óôơö÷øùúûüư₫ÿ" + }, + "ibm1129": "cp1129", + "csibm1129": "cp1129", + "cp1133": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ກຂຄງຈສຊຍດຕຖທນບປຜຝພຟມຢຣລວຫອຮ���ຯະາຳິີຶືຸູຼັົຽ���ເແໂໃໄ່້໊໋໌ໍໆ�ໜໝ₭����������������໐໑໒໓໔໕໖໗໘໙��¢¬¦�" + }, + "ibm1133": "cp1133", + "csibm1133": "cp1133", + "cp1161": { + "type": "_sbcs", + "chars": "��������������������������������่กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู้๊๋€฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛¢¬¦ " + }, + "ibm1161": "cp1161", + "csibm1161": "cp1161", + "cp1162": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู����฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛����" + }, + "ibm1162": "cp1162", + "csibm1162": "cp1162", + "cp1163": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ¡¢£€¥¦§œ©ª«¬­®¯°±²³Ÿµ¶·Œ¹º»¼½¾¿ÀÁÂĂÄÅÆÇÈÉÊË̀ÍÎÏĐÑ̉ÓÔƠÖרÙÚÛÜỮßàáâăäåæçèéêë́íîïđṇ̃óôơö÷øùúûüư₫ÿ" + }, + "ibm1163": "cp1163", + "csibm1163": "cp1163", + "maccroatian": { + "type": "_sbcs", + "chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®Š™´¨≠ŽØ∞±≤≥∆µ∂∑∏š∫ªºΩžø¿¡¬√ƒ≈ƫȅ ÀÃÕŒœĐ—“”‘’÷◊�©⁄¤‹›Æ»–·‚„‰ÂćÁčÈÍÎÏÌÓÔđÒÚÛÙıˆ˜¯πË˚¸Êæˇ" + }, + "maccyrillic": { + "type": "_sbcs", + "chars": "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯ†°¢£§•¶І®©™Ђђ≠Ѓѓ∞±≤≥іµ∂ЈЄєЇїЉљЊњјЅ¬√ƒ≈∆«»… ЋћЌќѕ–—“”‘’÷„ЎўЏџ№Ёёяабвгдежзийклмнопрстуфхцчшщъыьэю¤" + }, + "macgreek": { + "type": "_sbcs", + "chars": "Ĺ²É³ÖÜ΅àâä΄¨çéèê룙î‰ôö¦­ùûü†ΓΔΘΛΞΠß®©ΣΪ§≠°·Α±≤≥¥ΒΕΖΗΙΚΜΦΫΨΩάΝ¬ΟΡ≈Τ«»… ΥΧΆΈœ–―“”‘’÷ΉΊΌΎέήίόΏύαβψδεφγηιξκλμνοπώρστθωςχυζϊϋΐΰ�" + }, + "maciceland": { + "type": "_sbcs", + "chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûüݰ¢£§•¶ß®©™´¨≠ÆØ∞±≤≥¥µ∂∑∏π∫ªºΩæø¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸ⁄¤ÐðÞþý·‚„‰ÂÊÁËÈÍÎÏÌÓÔ�ÒÚÛÙıˆ˜¯˘˙˚¸˝˛ˇ" + }, + "macroman": { + "type": "_sbcs", + "chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®©™´¨≠ÆØ∞±≤≥¥µ∂∑∏π∫ªºΩæø¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸ⁄¤‹›fifl‡·‚„‰ÂÊÁËÈÍÎÏÌÓÔ�ÒÚÛÙıˆ˜¯˘˙˚¸˝˛ˇ" + }, + "macromania": { + "type": "_sbcs", + "chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®©™´¨≠ĂŞ∞±≤≥¥µ∂∑∏π∫ªºΩăş¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸ⁄¤‹›Ţţ‡·‚„‰ÂÊÁËÈÍÎÏÌÓÔ�ÒÚÛÙıˆ˜¯˘˙˚¸˝˛ˇ" + }, + "macthai": { + "type": "_sbcs", + "chars": "«»…“”�•‘’� กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู​–—฿เแโใไๅๆ็่้๊๋์ํ™๏๐๑๒๓๔๕๖๗๘๙®©����" + }, + "macturkish": { + "type": "_sbcs", + "chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®©™´¨≠ÆØ∞±≤≥¥µ∂∑∏π∫ªºΩæø¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸĞğİıŞş‡·‚„‰ÂÊÁËÈÍÎÏÌÓÔ�ÒÚÛÙ�ˆ˜¯˘˙˚¸˝˛ˇ" + }, + "macukraine": { + "type": "_sbcs", + "chars": "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯ†°Ґ£§•¶І®©™Ђђ≠Ѓѓ∞±≤≥іµґЈЄєЇїЉљЊњјЅ¬√ƒ≈∆«»… ЋћЌќѕ–—“”‘’÷„ЎўЏџ№Ёёяабвгдежзийклмнопрстуфхцчшщъыьэю¤" + }, + "koi8r": { + "type": "_sbcs", + "chars": "─│┌┐└┘├┤┬┴┼▀▄█▌▐░▒▓⌠■∙√≈≤≥ ⌡°²·÷═║╒ё╓╔╕╖╗╘╙╚╛╜╝╞╟╠╡Ё╢╣╤╥╦╧╨╩╪╫╬©юабцдефгхийклмнопярстужвьызшэщчъЮАБЦДЕФГХИЙКЛМНОПЯРСТУЖВЬЫЗШЭЩЧЪ" + }, + "koi8u": { + "type": "_sbcs", + "chars": "─│┌┐└┘├┤┬┴┼▀▄█▌▐░▒▓⌠■∙√≈≤≥ ⌡°²·÷═║╒ёє╔ії╗╘╙╚╛ґ╝╞╟╠╡ЁЄ╣ІЇ╦╧╨╩╪Ґ╬©юабцдефгхийклмнопярстужвьызшэщчъЮАБЦДЕФГХИЙКЛМНОПЯРСТУЖВЬЫЗШЭЩЧЪ" + }, + "koi8ru": { + "type": "_sbcs", + "chars": "─│┌┐└┘├┤┬┴┼▀▄█▌▐░▒▓⌠■∙√≈≤≥ ⌡°²·÷═║╒ёє╔ії╗╘╙╚╛ґў╞╟╠╡ЁЄ╣ІЇ╦╧╨╩╪ҐЎ©юабцдефгхийклмнопярстужвьызшэщчъЮАБЦДЕФГХИЙКЛМНОПЯРСТУЖВЬЫЗШЭЩЧЪ" + }, + "koi8t": { + "type": "_sbcs", + "chars": "қғ‚Ғ„…†‡�‰ҳ‹ҲҷҶ�Қ‘’“”•–—�™�›�����ӯӮё¤ӣ¦§���«¬­®�°±²Ё�Ӣ¶·�№�»���©юабцдефгхийклмнопярстужвьызшэщчъЮАБЦДЕФГХИЙКЛМНОПЯРСТУЖВЬЫЗШЭЩЧЪ" + }, + "armscii8": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ �և։)(»«—.՝,-֊…՜՛՞ԱաԲբԳգԴդԵեԶզԷէԸըԹթԺժԻիԼլԽխԾծԿկՀհՁձՂղՃճՄմՅյՆնՇշՈոՉչՊպՋջՌռՍսՎվՏտՐրՑցՒւՓփՔքՕօՖֆ՚�" + }, + "rk1048": { + "type": "_sbcs", + "chars": "ЂЃ‚ѓ„…†‡€‰Љ‹ЊҚҺЏђ‘’“”•–—�™љ›њқһџ ҰұӘ¤Ө¦§Ё©Ғ«¬­®Ү°±Ііөµ¶·ё№ғ»әҢңүАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя" + }, + "tcvn": { + "type": "_sbcs", + "chars": "\u0000ÚỤ\u0003ỪỬỮ\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010ỨỰỲỶỸÝỴ\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~ÀẢÃÁẠẶẬÈẺẼÉẸỆÌỈĨÍỊÒỎÕÓỌỘỜỞỠỚỢÙỦŨ ĂÂÊÔƠƯĐăâêôơưđẶ̀̀̉̃́àảãáạẲằẳẵắẴẮẦẨẪẤỀặầẩẫấậèỂẻẽéẹềểễếệìỉỄẾỒĩíịòỔỏõóọồổỗốộờởỡớợùỖủũúụừửữứựỳỷỹýỵỐ" + }, + "georgianacademy": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ¡¢£¤¥¦§¨©ª«¬­®¯°±²³´µ¶·¸¹º»¼½¾¿აბგდევზთიკლმნოპჟრსტუფქღყშჩცძწჭხჯჰჱჲჳჴჵჶçèéêëìíîïðñòóôõö÷øùúûüýþÿ" + }, + "georgianps": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ¡¢£¤¥¦§¨©ª«¬­®¯°±²³´µ¶·¸¹º»¼½¾¿აბგდევზჱთიკლმნჲოპჟრსტჳუფქღყშჩცძწჭხჴჯჰჵæçèéêëìíîïðñòóôõö÷øùúûüýþÿ" + }, + "pt154": { + "type": "_sbcs", + "chars": "ҖҒӮғ„…ҶҮҲүҠӢҢҚҺҸҗ‘’“”•–—ҳҷҡӣңқһҹ ЎўЈӨҘҰ§Ё©Ә«¬ӯ®Ҝ°ұІіҙө¶·ё№ә»јҪҫҝАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя" + }, + "viscii": { + "type": "_sbcs", + "chars": "\u0000\u0001Ẳ\u0003\u0004ẴẪ\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010\u0011\u0012\u0013Ỷ\u0015\u0016\u0017\u0018Ỹ\u001a\u001b\u001c\u001dỴ\u001f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~ẠẮẰẶẤẦẨẬẼẸẾỀỂỄỆỐỒỔỖỘỢỚỜỞỊỎỌỈỦŨỤỲÕắằặấầẩậẽẹếềểễệốồổỗỠƠộờởịỰỨỪỬơớƯÀÁÂÃẢĂẳẵÈÉÊẺÌÍĨỳĐứÒÓÔạỷừửÙÚỹỵÝỡưàáâãảăữẫèéêẻìíĩỉđựòóôõỏọụùúũủýợỮ" + }, + "iso646cn": { + "type": "_sbcs", + "chars": "\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010\u0011\u0012\u0013\u0014\u0015\u0016\u0017\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f !\"#¥%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}‾��������������������������������������������������������������������������������������������������������������������������������" + }, + "iso646jp": { + "type": "_sbcs", + "chars": "\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010\u0011\u0012\u0013\u0014\u0015\u0016\u0017\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[¥]^_`abcdefghijklmnopqrstuvwxyz{|}‾��������������������������������������������������������������������������������������������������������������������������������" + }, + "hproman8": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ÀÂÈÊËÎÏ´ˋˆ¨˜ÙÛ₤¯Ýý°ÇçÑñ¡¿¤£¥§ƒ¢âêôûáéóúàèòùäëöüÅîØÆåíøæÄìÖÜÉïßÔÁÃãÐðÍÌÓÒÕõŠšÚŸÿÞþ·µ¶¾—¼½ªº«■»±�" + }, + "macintosh": { + "type": "_sbcs", + "chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®©™´¨≠ÆØ∞±≤≥¥µ∂∑∏π∫ªºΩæø¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸ⁄¤‹›fifl‡·‚„‰ÂÊÁËÈÍÎÏÌÓÔ�ÒÚÛÙıˆ˜¯˘˙˚¸˝˛ˇ" + }, + "ascii": { + "type": "_sbcs", + "chars": "��������������������������������������������������������������������������������������������������������������������������������" + }, + "tis620": { + "type": "_sbcs", + "chars": "���������������������������������กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู����฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛����" + } +} \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/sbcs-data.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/sbcs-data.js new file mode 100644 index 0000000000000000000000000000000000000000..066f904e5f1d3e713e573b1e1cd7dae41f463ed3 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/sbcs-data.js @@ -0,0 +1,179 @@ +"use strict"; + +// Manually added data to be used by sbcs codec in addition to generated one. + +module.exports = { + // Not supported by iconv, not sure why. + "10029": "maccenteuro", + "maccenteuro": { + "type": "_sbcs", + "chars": "ÄĀāÉĄÖÜáąČäčĆć鏟ĎíďĒēĖóėôöõúĚěü†°Ę£§•¶ß®©™ę¨≠ģĮįĪ≤≥īĶ∂∑łĻļĽľĹĺŅņѬ√ńŇ∆«»… ňŐÕőŌ–—“”‘’÷◊ōŔŕŘ‹›řŖŗŠ‚„šŚśÁŤťÍŽžŪÓÔūŮÚůŰűŲųÝýķŻŁżĢˇ" + }, + + "808": "cp808", + "ibm808": "cp808", + "cp808": { + "type": "_sbcs", + "chars": "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмноп░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀рстуфхцчшщъыьэюяЁёЄєЇїЎў°∙·√№€■ " + }, + + "mik": { + "type": "_sbcs", + "chars": "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя└┴┬├─┼╣║╚╔╩╦╠═╬┐░▒▓│┤№§╗╝┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ " + }, + + "cp720": { + "type": "_sbcs", + "chars": "\x80\x81éâ\x84à\x86çêëèïî\x8d\x8e\x8f\x90\u0651\u0652ô¤ـûùءآأؤ£إئابةتثجحخدذرزسشص«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀ضطظعغفµقكلمنهوىي≡\u064b\u064c\u064d\u064e\u064f\u0650≈°∙·√ⁿ²■\u00a0" + }, + + // Aliases of generated encodings. + "ascii8bit": "ascii", + "usascii": "ascii", + "ansix34": "ascii", + "ansix341968": "ascii", + "ansix341986": "ascii", + "csascii": "ascii", + "cp367": "ascii", + "ibm367": "ascii", + "isoir6": "ascii", + "iso646us": "ascii", + "iso646irv": "ascii", + "us": "ascii", + + "latin1": "iso88591", + "latin2": "iso88592", + "latin3": "iso88593", + "latin4": "iso88594", + "latin5": "iso88599", + "latin6": "iso885910", + "latin7": "iso885913", + "latin8": "iso885914", + "latin9": "iso885915", + "latin10": "iso885916", + + "csisolatin1": "iso88591", + "csisolatin2": "iso88592", + "csisolatin3": "iso88593", + "csisolatin4": "iso88594", + "csisolatincyrillic": "iso88595", + "csisolatinarabic": "iso88596", + "csisolatingreek" : "iso88597", + "csisolatinhebrew": "iso88598", + "csisolatin5": "iso88599", + "csisolatin6": "iso885910", + + "l1": "iso88591", + "l2": "iso88592", + "l3": "iso88593", + "l4": "iso88594", + "l5": "iso88599", + "l6": "iso885910", + "l7": "iso885913", + "l8": "iso885914", + "l9": "iso885915", + "l10": "iso885916", + + "isoir14": "iso646jp", + "isoir57": "iso646cn", + "isoir100": "iso88591", + "isoir101": "iso88592", + "isoir109": "iso88593", + "isoir110": "iso88594", + "isoir144": "iso88595", + "isoir127": "iso88596", + "isoir126": "iso88597", + "isoir138": "iso88598", + "isoir148": "iso88599", + "isoir157": "iso885910", + "isoir166": "tis620", + "isoir179": "iso885913", + "isoir199": "iso885914", + "isoir203": "iso885915", + "isoir226": "iso885916", + + "cp819": "iso88591", + "ibm819": "iso88591", + + "cyrillic": "iso88595", + + "arabic": "iso88596", + "arabic8": "iso88596", + "ecma114": "iso88596", + "asmo708": "iso88596", + + "greek" : "iso88597", + "greek8" : "iso88597", + "ecma118" : "iso88597", + "elot928" : "iso88597", + + "hebrew": "iso88598", + "hebrew8": "iso88598", + + "turkish": "iso88599", + "turkish8": "iso88599", + + "thai": "iso885911", + "thai8": "iso885911", + + "celtic": "iso885914", + "celtic8": "iso885914", + "isoceltic": "iso885914", + + "tis6200": "tis620", + "tis62025291": "tis620", + "tis62025330": "tis620", + + "10000": "macroman", + "10006": "macgreek", + "10007": "maccyrillic", + "10079": "maciceland", + "10081": "macturkish", + + "cspc8codepage437": "cp437", + "cspc775baltic": "cp775", + "cspc850multilingual": "cp850", + "cspcp852": "cp852", + "cspc862latinhebrew": "cp862", + "cpgr": "cp869", + + "msee": "cp1250", + "mscyrl": "cp1251", + "msansi": "cp1252", + "msgreek": "cp1253", + "msturk": "cp1254", + "mshebr": "cp1255", + "msarab": "cp1256", + "winbaltrim": "cp1257", + + "cp20866": "koi8r", + "20866": "koi8r", + "ibm878": "koi8r", + "cskoi8r": "koi8r", + + "cp21866": "koi8u", + "21866": "koi8u", + "ibm1168": "koi8u", + + "strk10482002": "rk1048", + + "tcvn5712": "tcvn", + "tcvn57121": "tcvn", + + "gb198880": "iso646cn", + "cn": "iso646cn", + + "csiso14jisc6220ro": "iso646jp", + "jisc62201969ro": "iso646jp", + "jp": "iso646jp", + + "cshproman8": "hproman8", + "r8": "hproman8", + "roman8": "hproman8", + "xroman8": "hproman8", + "ibm1051": "hproman8", + + "mac": "macintosh", + "csmacintosh": "macintosh", +}; + diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/tables/big5-added.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/tables/big5-added.json new file mode 100644 index 0000000000000000000000000000000000000000..3c3d3c2f7b14c6a570e58184f68ef0894a5f812d --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/tables/big5-added.json @@ -0,0 +1,122 @@ +[ +["8740","䏰䰲䘃䖦䕸𧉧䵷䖳𧲱䳢𧳅㮕䜶䝄䱇䱀𤊿𣘗𧍒𦺋𧃒䱗𪍑䝏䗚䲅𧱬䴇䪤䚡𦬣爥𥩔𡩣𣸆𣽡晍囻"], +["8767","綕夝𨮹㷴霴𧯯寛𡵞媤㘥𩺰嫑宷峼杮薓𩥅瑡璝㡵𡵓𣚞𦀡㻬"], +["87a1","𥣞㫵竼龗𤅡𨤍𣇪𠪊𣉞䌊蒄龖鐯䤰蘓墖靊鈘秐稲晠権袝瑌篅枂稬剏遆㓦珄𥶹瓆鿇垳䤯呌䄱𣚎堘穲𧭥讏䚮𦺈䆁𥶙箮𢒼鿈𢓁𢓉𢓌鿉蔄𣖻䂴鿊䓡𪷿拁灮鿋"], +["8840","㇀",4,"𠄌㇅𠃑𠃍㇆㇇𠃋𡿨㇈𠃊㇉㇊㇋㇌𠄎㇍㇎ĀÁǍÀĒÉĚÈŌÓǑÒ࿿Ê̄Ế࿿Ê̌ỀÊāáǎàɑēéěèīíǐìōóǒòūúǔùǖǘǚ"], +["88a1","ǜü࿿ê̄ế࿿ê̌ềêɡ⏚⏛"], +["8940","𪎩𡅅"], +["8943","攊"], +["8946","丽滝鵎釟"], +["894c","𧜵撑会伨侨兖兴农凤务动医华发变团声处备夲头学实実岚庆总斉柾栄桥济炼电纤纬纺织经统缆缷艺苏药视设询车轧轮"], +["89a1","琑糼緍楆竉刧"], +["89ab","醌碸酞肼"], +["89b0","贋胶𠧧"], +["89b5","肟黇䳍鷉鸌䰾𩷶𧀎鸊𪄳㗁"], +["89c1","溚舾甙"], +["89c5","䤑马骏龙禇𨑬𡷊𠗐𢫦两亁亀亇亿仫伷㑌侽㹈倃傈㑽㒓㒥円夅凛凼刅争剹劐匧㗇厩㕑厰㕓参吣㕭㕲㚁咓咣咴咹哐哯唘唣唨㖘唿㖥㖿嗗㗅"], +["8a40","𧶄唥"], +["8a43","𠱂𠴕𥄫喐𢳆㧬𠍁蹆𤶸𩓥䁓𨂾睺𢰸㨴䟕𨅝𦧲𤷪擝𠵼𠾴𠳕𡃴撍蹾𠺖𠰋𠽤𢲩𨉖𤓓"], +["8a64","𠵆𩩍𨃩䟴𤺧𢳂骲㩧𩗴㿭㔆𥋇𩟔𧣈𢵄鵮頕"], +["8a76","䏙𦂥撴哣𢵌𢯊𡁷㧻𡁯"], +["8aa1","𦛚𦜖𧦠擪𥁒𠱃蹨𢆡𨭌𠜱"], +["8aac","䠋𠆩㿺塳𢶍"], +["8ab2","𤗈𠓼𦂗𠽌𠶖啹䂻䎺"], +["8abb","䪴𢩦𡂝膪飵𠶜捹㧾𢝵跀嚡摼㹃"], +["8ac9","𪘁𠸉𢫏𢳉"], +["8ace","𡃈𣧂㦒㨆𨊛㕸𥹉𢃇噒𠼱𢲲𩜠㒼氽𤸻"], +["8adf","𧕴𢺋𢈈𪙛𨳍𠹺𠰴𦠜羓𡃏𢠃𢤹㗻𥇣𠺌𠾍𠺪㾓𠼰𠵇𡅏𠹌"], +["8af6","𠺫𠮩𠵈𡃀𡄽㿹𢚖搲𠾭"], +["8b40","𣏴𧘹𢯎𠵾𠵿𢱑𢱕㨘𠺘𡃇𠼮𪘲𦭐𨳒𨶙𨳊閪哌苄喹"], +["8b55","𩻃鰦骶𧝞𢷮煀腭胬尜𦕲脴㞗卟𨂽醶𠻺𠸏𠹷𠻻㗝𤷫㘉𠳖嚯𢞵𡃉𠸐𠹸𡁸𡅈𨈇𡑕𠹹𤹐𢶤婔𡀝𡀞𡃵𡃶垜𠸑"], +["8ba1","𧚔𨋍𠾵𠹻𥅾㜃𠾶𡆀𥋘𪊽𤧚𡠺𤅷𨉼墙剨㘚𥜽箲孨䠀䬬鼧䧧鰟鮍𥭴𣄽嗻㗲嚉丨夂𡯁屮靑𠂆乛亻㔾尣彑忄㣺扌攵歺氵氺灬爫丬犭𤣩罒礻糹罓𦉪㓁"], +["8bde","𦍋耂肀𦘒𦥑卝衤见𧢲讠贝钅镸长门𨸏韦页风飞饣𩠐鱼鸟黄歯龜丷𠂇阝户钢"], +["8c40","倻淾𩱳龦㷉袏𤅎灷峵䬠𥇍㕙𥴰愢𨨲辧釶熑朙玺𣊁𪄇㲋𡦀䬐磤琂冮𨜏䀉橣𪊺䈣蘏𠩯稪𩥇𨫪靕灍匤𢁾鏴盙𨧣龧矝亣俰傼丯众龨吴綋墒壐𡶶庒庙忂𢜒斋"], +["8ca1","𣏹椙橃𣱣泿"], +["8ca7","爀𤔅玌㻛𤨓嬕璹讃𥲤𥚕窓篬糃繬苸薗龩袐龪躹龫迏蕟駠鈡龬𨶹𡐿䁱䊢娚"], +["8cc9","顨杫䉶圽"], +["8cce","藖𤥻芿𧄍䲁𦵴嵻𦬕𦾾龭龮宖龯曧繛湗秊㶈䓃𣉖𢞖䎚䔶"], +["8ce6","峕𣬚諹屸㴒𣕑嵸龲煗䕘𤃬𡸣䱷㥸㑊𠆤𦱁諌侴𠈹妿腬顖𩣺弻"], +["8d40","𠮟"], +["8d42","𢇁𨥭䄂䚻𩁹㼇龳𪆵䃸㟖䛷𦱆䅼𨚲𧏿䕭㣔𥒚䕡䔛䶉䱻䵶䗪㿈𤬏㙡䓞䒽䇭崾嵈嵖㷼㠏嶤嶹㠠㠸幂庽弥徃㤈㤔㤿㥍惗愽峥㦉憷憹懏㦸戬抐拥挘㧸嚱"], +["8da1","㨃揢揻搇摚㩋擀崕嘡龟㪗斆㪽旿晓㫲暒㬢朖㭂枤栀㭘桊梄㭲㭱㭻椉楃牜楤榟榅㮼槖㯝橥橴橱檂㯬檙㯲檫檵櫔櫶殁毁毪汵沪㳋洂洆洦涁㳯涤涱渕渘温溆𨧀溻滢滚齿滨滩漤漴㵆𣽁澁澾㵪㵵熷岙㶊瀬㶑灐灔灯灿炉𠌥䏁㗱𠻘"], +["8e40","𣻗垾𦻓焾𥟠㙎榢𨯩孴穉𥣡𩓙穥穽𥦬窻窰竂竃燑𦒍䇊竚竝竪䇯咲𥰁笋筕笩𥌎𥳾箢筯莜𥮴𦱿篐萡箒箸𥴠㶭𥱥蒒篺簆簵𥳁籄粃𤢂粦晽𤕸糉糇糦籴糳糵糎"], +["8ea1","繧䔝𦹄絝𦻖璍綉綫焵綳緒𤁗𦀩緤㴓緵𡟹緥𨍭縝𦄡𦅚繮纒䌫鑬縧罀罁罇礶𦋐駡羗𦍑羣𡙡𠁨䕜𣝦䔃𨌺翺𦒉者耈耝耨耯𪂇𦳃耻耼聡𢜔䦉𦘦𣷣𦛨朥肧𨩈脇脚墰𢛶汿𦒘𤾸擧𡒊舘𡡞橓𤩥𤪕䑺舩𠬍𦩒𣵾俹𡓽蓢荢𦬊𤦧𣔰𡝳𣷸芪椛芳䇛"], +["8f40","蕋苐茚𠸖𡞴㛁𣅽𣕚艻苢茘𣺋𦶣𦬅𦮗𣗎㶿茝嗬莅䔋𦶥莬菁菓㑾𦻔橗蕚㒖𦹂𢻯葘𥯤葱㷓䓤檧葊𣲵祘蒨𦮖𦹷𦹃蓞萏莑䒠蒓蓤𥲑䉀𥳀䕃蔴嫲𦺙䔧蕳䔖枿蘖"], +["8fa1","𨘥𨘻藁𧂈蘂𡖂𧃍䕫䕪蘨㙈𡢢号𧎚虾蝱𪃸蟮𢰧螱蟚蠏噡虬桖䘏衅衆𧗠𣶹𧗤衞袜䙛袴袵揁装睷𧜏覇覊覦覩覧覼𨨥觧𧤤𧪽誜瞓釾誐𧩙竩𧬺𣾏䜓𧬸煼謌謟𥐰𥕥謿譌譍誩𤩺讐讛誯𡛟䘕衏貛𧵔𧶏貫㜥𧵓賖𧶘𧶽贒贃𡤐賛灜贑𤳉㻐起"], +["9040","趩𨀂𡀔𤦊㭼𨆼𧄌竧躭躶軃鋔輙輭𨍥𨐒辥錃𪊟𠩐辳䤪𨧞𨔽𣶻廸𣉢迹𪀔𨚼𨔁𢌥㦀𦻗逷𨔼𧪾遡𨕬𨘋邨𨜓郄𨛦邮都酧㫰醩釄粬𨤳𡺉鈎沟鉁鉢𥖹銹𨫆𣲛𨬌𥗛"], +["90a1","𠴱錬鍫𨫡𨯫炏嫃𨫢𨫥䥥鉄𨯬𨰹𨯿鍳鑛躼閅閦鐦閠濶䊹𢙺𨛘𡉼𣸮䧟氜陻隖䅬隣𦻕懚隶磵𨫠隽双䦡𦲸𠉴𦐐𩂯𩃥𤫑𡤕𣌊霱虂霶䨏䔽䖅𤫩灵孁霛靜𩇕靗孊𩇫靟鐥僐𣂷𣂼鞉鞟鞱鞾韀韒韠𥑬韮琜𩐳響韵𩐝𧥺䫑頴頳顋顦㬎𧅵㵑𠘰𤅜"], +["9140","𥜆飊颷飈飇䫿𦴧𡛓喰飡飦飬鍸餹𤨩䭲𩡗𩤅駵騌騻騐驘𥜥㛄𩂱𩯕髠髢𩬅髴䰎鬔鬭𨘀倴鬴𦦨㣃𣁽魐魀𩴾婅𡡣鮎𤉋鰂鯿鰌𩹨鷔𩾷𪆒𪆫𪃡𪄣𪇟鵾鶃𪄴鸎梈"], +["91a1","鷄𢅛𪆓𪈠𡤻𪈳鴹𪂹𪊴麐麕麞麢䴴麪麯𤍤黁㭠㧥㴝伲㞾𨰫鼂鼈䮖鐤𦶢鼗鼖鼹嚟嚊齅馸𩂋韲葿齢齩竜龎爖䮾𤥵𤦻煷𤧸𤍈𤩑玞𨯚𡣺禟𨥾𨸶鍩鏳𨩄鋬鎁鏋𨥬𤒹爗㻫睲穃烐𤑳𤏸煾𡟯炣𡢾𣖙㻇𡢅𥐯𡟸㜢𡛻𡠹㛡𡝴𡣑𥽋㜣𡛀坛𤨥𡏾𡊨"], +["9240","𡏆𡒶蔃𣚦蔃葕𤦔𧅥𣸱𥕜𣻻𧁒䓴𣛮𩦝𦼦柹㜳㰕㷧塬𡤢栐䁗𣜿𤃡𤂋𤄏𦰡哋嚞𦚱嚒𠿟𠮨𠸍鏆𨬓鎜仸儫㠙𤐶亼𠑥𠍿佋侊𥙑婨𠆫𠏋㦙𠌊𠐔㐵伩𠋀𨺳𠉵諚𠈌亘"], +["92a1","働儍侢伃𤨎𣺊佂倮偬傁俌俥偘僼兙兛兝兞湶𣖕𣸹𣺿浲𡢄𣺉冨凃𠗠䓝𠒣𠒒𠒑赺𨪜𠜎剙劤𠡳勡鍮䙺熌𤎌𠰠𤦬𡃤槑𠸝瑹㻞璙琔瑖玘䮎𤪼𤂍叐㖄爏𤃉喴𠍅响𠯆圝鉝雴鍦埝垍坿㘾壋媙𨩆𡛺𡝯𡜐娬妸銏婾嫏娒𥥆𡧳𡡡𤊕㛵洅瑃娡𥺃"], +["9340","媁𨯗𠐓鏠璌𡌃焅䥲鐈𨧻鎽㞠尞岞幞幈𡦖𡥼𣫮廍孏𡤃𡤄㜁𡢠㛝𡛾㛓脪𨩇𡶺𣑲𨦨弌弎𡤧𡞫婫𡜻孄蘔𧗽衠恾𢡠𢘫忛㺸𢖯𢖾𩂈𦽳懀𠀾𠁆𢘛憙憘恵𢲛𢴇𤛔𩅍"], +["93a1","摱𤙥𢭪㨩𢬢𣑐𩣪𢹸挷𪑛撶挱揑𤧣𢵧护𢲡搻敫楲㯴𣂎𣊭𤦉𣊫唍𣋠𡣙𩐿曎𣊉𣆳㫠䆐𥖄𨬢𥖏𡛼𥕛𥐥磮𣄃𡠪𣈴㑤𣈏𣆂𤋉暎𦴤晫䮓昰𧡰𡷫晣𣋒𣋡昞𥡲㣑𣠺𣞼㮙𣞢𣏾瓐㮖枏𤘪梶栞㯄檾㡣𣟕𤒇樳橒櫉欅𡤒攑梘橌㯗橺歗𣿀𣲚鎠鋲𨯪𨫋"], +["9440","銉𨀞𨧜鑧涥漋𤧬浧𣽿㶏渄𤀼娽渊塇洤硂焻𤌚𤉶烱牐犇犔𤞏𤜥兹𤪤𠗫瑺𣻸𣙟𤩊𤤗𥿡㼆㺱𤫟𨰣𣼵悧㻳瓌琼鎇琷䒟𦷪䕑疃㽣𤳙𤴆㽘畕癳𪗆㬙瑨𨫌𤦫𤦎㫻"], +["94a1","㷍𤩎㻿𤧅𤣳釺圲鍂𨫣𡡤僟𥈡𥇧睸𣈲眎眏睻𤚗𣞁㩞𤣰琸璛㺿𤪺𤫇䃈𤪖𦆮錇𥖁砞碍碈磒珐祙𧝁𥛣䄎禛蒖禥樭𣻺稺秴䅮𡛦䄲鈵秱𠵌𤦌𠊙𣶺𡝮㖗啫㕰㚪𠇔𠰍竢婙𢛵𥪯𥪜娍𠉛磰娪𥯆竾䇹籝籭䈑𥮳𥺼𥺦糍𤧹𡞰粎籼粮檲緜縇緓罎𦉡"], +["9540","𦅜𧭈綗𥺂䉪𦭵𠤖柖𠁎𣗏埄𦐒𦏸𤥢翝笧𠠬𥫩𥵃笌𥸎駦虅驣樜𣐿㧢𤧷𦖭騟𦖠蒀𧄧𦳑䓪脷䐂胆脉腂𦞴飃𦩂艢艥𦩑葓𦶧蘐𧈛媆䅿𡡀嬫𡢡嫤𡣘蚠蜨𣶏蠭𧐢娂"], +["95a1","衮佅袇袿裦襥襍𥚃襔𧞅𧞄𨯵𨯙𨮜𨧹㺭蒣䛵䛏㟲訽訜𩑈彍鈫𤊄旔焩烄𡡅鵭貟賩𧷜妚矃姰䍮㛔踪躧𤰉輰轊䋴汘澻𢌡䢛潹溋𡟚鯩㚵𤤯邻邗啱䤆醻鐄𨩋䁢𨫼鐧𨰝𨰻蓥訫閙閧閗閖𨴴瑅㻂𤣿𤩂𤏪㻧𣈥随𨻧𨹦𨹥㻌𤧭𤩸𣿮琒瑫㻼靁𩂰"], +["9640","桇䨝𩂓𥟟靝鍨𨦉𨰦𨬯𦎾銺嬑譩䤼珹𤈛鞛靱餸𠼦巁𨯅𤪲頟𩓚鋶𩗗釥䓀𨭐𤩧𨭤飜𨩅㼀鈪䤥萔餻饍𧬆㷽馛䭯馪驜𨭥𥣈檏騡嫾騯𩣱䮐𩥈馼䮽䮗鍽塲𡌂堢𤦸"], +["96a1","𡓨硄𢜟𣶸棅㵽鑘㤧慐𢞁𢥫愇鱏鱓鱻鰵鰐魿鯏𩸭鮟𪇵𪃾鴡䲮𤄄鸘䲰鴌𪆴𪃭𪃳𩤯鶥蒽𦸒𦿟𦮂藼䔳𦶤𦺄𦷰萠藮𦸀𣟗𦁤秢𣖜𣙀䤭𤧞㵢鏛銾鍈𠊿碹鉷鑍俤㑀遤𥕝砽硔碶硋𡝗𣇉𤥁㚚佲濚濙瀞瀞吔𤆵垻壳垊鴖埗焴㒯𤆬燫𦱀𤾗嬨𡞵𨩉"], +["9740","愌嫎娋䊼𤒈㜬䭻𨧼鎻鎸𡣖𠼝葲𦳀𡐓𤋺𢰦𤏁妔𣶷𦝁綨𦅛𦂤𤦹𤦋𨧺鋥珢㻩璴𨭣𡢟㻡𤪳櫘珳珻㻖𤨾𤪔𡟙𤩦𠎧𡐤𤧥瑈𤤖炥𤥶銄珦鍟𠓾錱𨫎𨨖鎆𨯧𥗕䤵𨪂煫"], +["97a1","𤥃𠳿嚤𠘚𠯫𠲸唂秄𡟺緾𡛂𤩐𡡒䔮鐁㜊𨫀𤦭妰𡢿𡢃𧒄媡㛢𣵛㚰鉟婹𨪁𡡢鍴㳍𠪴䪖㦊僴㵩㵌𡎜煵䋻𨈘渏𩃤䓫浗𧹏灧沯㳖𣿭𣸭渂漌㵯𠏵畑㚼㓈䚀㻚䡱姄鉮䤾轁𨰜𦯀堒埈㛖𡑒烾𤍢𤩱𢿣𡊰𢎽梹楧𡎘𣓥𧯴𣛟𨪃𣟖𣏺𤲟樚𣚭𦲷萾䓟䓎"], +["9840","𦴦𦵑𦲂𦿞漗𧄉茽𡜺菭𦲀𧁓𡟛妉媂𡞳婡婱𡤅𤇼㜭姯𡜼㛇熎鎐暚𤊥婮娫𤊓樫𣻹𧜶𤑛𤋊焝𤉙𨧡侰𦴨峂𤓎𧹍𤎽樌𤉖𡌄炦焳𤏩㶥泟勇𤩏繥姫崯㷳彜𤩝𡟟綤萦"], +["98a1","咅𣫺𣌀𠈔坾𠣕𠘙㿥𡾞𪊶瀃𩅛嵰玏糓𨩙𩐠俈翧狍猐𧫴猸猹𥛶獁獈㺩𧬘遬燵𤣲珡臶㻊県㻑沢国琙琞琟㻢㻰㻴㻺瓓㼎㽓畂畭畲疍㽼痈痜㿀癍㿗癴㿜発𤽜熈嘣覀塩䀝睃䀹条䁅㗛瞘䁪䁯属瞾矋売砘点砜䂨砹硇硑硦葈𥔵礳栃礲䄃"], +["9940","䄉禑禙辻稆込䅧窑䆲窼艹䇄竏竛䇏両筢筬筻簒簛䉠䉺类粜䊌粸䊔糭输烀𠳏総緔緐緽羮羴犟䎗耠耥笹耮耱联㷌垴炠肷胩䏭脌猪脎脒畠脔䐁㬹腖腙腚"], +["99a1","䐓堺腼膄䐥膓䐭膥埯臁臤艔䒏芦艶苊苘苿䒰荗险榊萅烵葤惣蒈䔄蒾蓡蓸蔐蔸蕒䔻蕯蕰藠䕷虲蚒蚲蛯际螋䘆䘗袮裿褤襇覑𧥧訩訸誔誴豑賔賲贜䞘塟跃䟭仮踺嗘坔蹱嗵躰䠷軎転軤軭軲辷迁迊迌逳駄䢭飠鈓䤞鈨鉘鉫銱銮銿"], +["9a40","鋣鋫鋳鋴鋽鍃鎄鎭䥅䥑麿鐗匁鐝鐭鐾䥪鑔鑹锭関䦧间阳䧥枠䨤靀䨵鞲韂噔䫤惨颹䬙飱塄餎餙冴餜餷饂饝饢䭰駅䮝騼鬏窃魩鮁鯝鯱鯴䱭鰠㝯𡯂鵉鰺"], +["9aa1","黾噐鶓鶽鷀鷼银辶鹻麬麱麽黆铜黢黱黸竈齄𠂔𠊷𠎠椚铃妬𠓗塀铁㞹𠗕𠘕𠙶𡚺块煳𠫂𠫍𠮿呪吆𠯋咞𠯻𠰻𠱓𠱥𠱼惧𠲍噺𠲵𠳝𠳭𠵯𠶲𠷈楕鰯螥𠸄𠸎𠻗𠾐𠼭𠹳尠𠾼帋𡁜𡁏𡁶朞𡁻𡂈𡂖㙇𡂿𡃓𡄯𡄻卤蒭𡋣𡍵𡌶讁𡕷𡘙𡟃𡟇乸炻𡠭𡥪"], +["9b40","𡨭𡩅𡰪𡱰𡲬𡻈拃𡻕𡼕熘桕𢁅槩㛈𢉼𢏗𢏺𢜪𢡱𢥏苽𢥧𢦓𢫕覥𢫨辠𢬎鞸𢬿顇骽𢱌"], +["9b62","𢲈𢲷𥯨𢴈𢴒𢶷𢶕𢹂𢽴𢿌𣀳𣁦𣌟𣏞徱晈暿𧩹𣕧𣗳爁𤦺矗𣘚𣜖纇𠍆墵朎"], +["9ba1","椘𣪧𧙗𥿢𣸑𣺹𧗾𢂚䣐䪸𤄙𨪚𤋮𤌍𤀻𤌴𤎖𤩅𠗊凒𠘑妟𡺨㮾𣳿𤐄𤓖垈𤙴㦛𤜯𨗨𩧉㝢𢇃譞𨭎駖𤠒𤣻𤨕爉𤫀𠱸奥𤺥𤾆𠝹軚𥀬劏圿煱𥊙𥐙𣽊𤪧喼𥑆𥑮𦭒釔㑳𥔿𧘲𥕞䜘𥕢𥕦𥟇𤤿𥡝偦㓻𣏌惞𥤃䝼𨥈𥪮𥮉𥰆𡶐垡煑澶𦄂𧰒遖𦆲𤾚譢𦐂𦑊"], +["9c40","嵛𦯷輶𦒄𡤜諪𤧶𦒈𣿯𦔒䯀𦖿𦚵𢜛鑥𥟡憕娧晉侻嚹𤔡𦛼乪𤤴陖涏𦲽㘘襷𦞙𦡮𦐑𦡞營𦣇筂𩃀𠨑𦤦鄄𦤹穅鷰𦧺騦𦨭㙟𦑩𠀡禃𦨴𦭛崬𣔙菏𦮝䛐𦲤画补𦶮墶"], +["9ca1","㜜𢖍𧁋𧇍㱔𧊀𧊅銁𢅺𧊋錰𧋦𤧐氹钟𧑐𠻸蠧裵𢤦𨑳𡞱溸𤨪𡠠㦤㚹尐秣䔿暶𩲭𩢤襃𧟌𧡘囖䃟𡘊㦡𣜯𨃨𡏅熭荦𧧝𩆨婧䲷𧂯𨦫𧧽𧨊𧬋𧵦𤅺筃祾𨀉澵𪋟樃𨌘厢𦸇鎿栶靝𨅯𨀣𦦵𡏭𣈯𨁈嶅𨰰𨂃圕頣𨥉嶫𤦈斾槕叒𤪥𣾁㰑朶𨂐𨃴𨄮𡾡𨅏"], +["9d40","𨆉𨆯𨈚𨌆𨌯𨎊㗊𨑨𨚪䣺揦𨥖砈鉕𨦸䏲𨧧䏟𨧨𨭆𨯔姸𨰉輋𨿅𩃬筑𩄐𩄼㷷𩅞𤫊运犏嚋𩓧𩗩𩖰𩖸𩜲𩣑𩥉𩥪𩧃𩨨𩬎𩵚𩶛纟𩻸𩼣䲤镇𪊓熢𪋿䶑递𪗋䶜𠲜达嗁"], +["9da1","辺𢒰边𤪓䔉繿潖檱仪㓤𨬬𧢝㜺躀𡟵𨀤𨭬𨮙𧨾𦚯㷫𧙕𣲷𥘵𥥖亚𥺁𦉘嚿𠹭踎孭𣺈𤲞揞拐𡟶𡡻攰嘭𥱊吚𥌑㷆𩶘䱽嘢嘞罉𥻘奵𣵀蝰东𠿪𠵉𣚺脗鵞贘瘻鱅癎瞹鍅吲腈苷嘥脲萘肽嗪祢噃吖𠺝㗎嘅嗱曱𨋢㘭甴嗰喺咗啲𠱁𠲖廐𥅈𠹶𢱢"], +["9e40","𠺢麫絚嗞𡁵抝靭咔賍燶酶揼掹揾啩𢭃鱲𢺳冚㓟𠶧冧呍唞唓癦踭𦢊疱肶蠄螆裇膶萜𡃁䓬猄𤜆宐茋𦢓噻𢛴𧴯𤆣𧵳𦻐𧊶酰𡇙鈈𣳼𪚩𠺬𠻹牦𡲢䝎𤿂𧿹𠿫䃺"], +["9ea1","鱝攟𢶠䣳𤟠𩵼𠿬𠸊恢𧖣𠿭"], +["9ead","𦁈𡆇熣纎鵐业丄㕷嬍沲卧㚬㧜卽㚥𤘘墚𤭮舭呋垪𥪕𠥹"], +["9ec5","㩒𢑥獴𩺬䴉鯭𣳾𩼰䱛𤾩𩖞𩿞葜𣶶𧊲𦞳𣜠挮紥𣻷𣸬㨪逈勌㹴㙺䗩𠒎癀嫰𠺶硺𧼮墧䂿噼鮋嵴癔𪐴麅䳡痹㟻愙𣃚𤏲"], +["9ef5","噝𡊩垧𤥣𩸆刴𧂮㖭汊鵼"], +["9f40","籖鬹埞𡝬屓擓𩓐𦌵𧅤蚭𠴨𦴢𤫢𠵱"], +["9f4f","凾𡼏嶎霃𡷑麁遌笟鬂峑箣扨挵髿篏鬪籾鬮籂粆鰕篼鬉鼗鰛𤤾齚啳寃俽麘俲剠㸆勑坧偖妷帒韈鶫轜呩鞴饀鞺匬愰"], +["9fa1","椬叚鰊鴂䰻陁榀傦畆𡝭駚剳"], +["9fae","酙隁酜"], +["9fb2","酑𨺗捿𦴣櫊嘑醎畺抅𠏼獏籰𥰡𣳽"], +["9fc1","𤤙盖鮝个𠳔莾衂"], +["9fc9","届槀僭坺刟巵从氱𠇲伹咜哚劚趂㗾弌㗳"], +["9fdb","歒酼龥鮗頮颴骺麨麄煺笔"], +["9fe7","毺蠘罸"], +["9feb","嘠𪙊蹷齓"], +["9ff0","跔蹏鸜踁抂𨍽踨蹵竓𤩷稾磘泪詧瘇"], +["a040","𨩚鼦泎蟖痃𪊲硓咢贌狢獱謭猂瓱賫𤪻蘯徺袠䒷"], +["a055","𡠻𦸅"], +["a058","詾𢔛"], +["a05b","惽癧髗鵄鍮鮏蟵"], +["a063","蠏賷猬霡鮰㗖犲䰇籑饊𦅙慙䰄麖慽"], +["a073","坟慯抦戹拎㩜懢厪𣏵捤栂㗒"], +["a0a1","嵗𨯂迚𨸹"], +["a0a6","僙𡵆礆匲阸𠼻䁥"], +["a0ae","矾"], +["a0b0","糂𥼚糚稭聦聣絍甅瓲覔舚朌聢𧒆聛瓰脃眤覉𦟌畓𦻑螩蟎臈螌詉貭譃眫瓸蓚㘵榲趦"], +["a0d4","覩瑨涹蟁𤀑瓧㷛煶悤憜㳑煢恷"], +["a0e2","罱𨬭牐惩䭾删㰘𣳇𥻗𧙖𥔱𡥄𡋾𩤃𦷜𧂭峁𦆭𨨏𣙷𠃮𦡆𤼎䕢嬟𦍌齐麦𦉫"], +["a3c0","␀",31,"␡"], +["c6a1","①",9,"⑴",9,"ⅰ",9,"丶丿亅亠冂冖冫勹匸卩厶夊宀巛⼳广廴彐彡攴无疒癶辵隶¨ˆヽヾゝゞ〃仝々〆〇ー[]✽ぁ",23], +["c740","す",58,"ァアィイ"], +["c7a1","ゥ",81,"А",5,"ЁЖ",4], +["c840","Л",26,"ёж",25,"⇧↸↹㇏𠃌乚𠂊刂䒑"], +["c8a1","龰冈龱𧘇"], +["c8cd","¬¦'"㈱№℡゛゜⺀⺄⺆⺇⺈⺊⺌⺍⺕⺜⺝⺥⺧⺪⺬⺮⺶⺼⺾⻆⻊⻌⻍⻏⻖⻗⻞⻣"], +["c8f5","ʃɐɛɔɵœøŋʊɪ"], +["f9fe","■"], +["fa40","𠕇鋛𠗟𣿅蕌䊵珯况㙉𤥂𨧤鍄𡧛苮𣳈砼杄拟𤤳𨦪𠊠𦮳𡌅侫𢓭倈𦴩𧪄𣘀𤪱𢔓倩𠍾徤𠎀𠍇滛𠐟偽儁㑺儎顬㝃萖𤦤𠒇兠𣎴兪𠯿𢃼𠋥𢔰𠖎𣈳𡦃宂蝽𠖳𣲙冲冸"], +["faa1","鴴凉减凑㳜凓𤪦决凢卂凭菍椾𣜭彻刋刦刼劵剗劔効勅簕蕂勠蘍𦬓包𨫞啉滙𣾀𠥔𣿬匳卄𠯢泋𡜦栛珕恊㺪㣌𡛨燝䒢卭却𨚫卾卿𡖖𡘓矦厓𨪛厠厫厮玧𥝲㽙玜叁叅汉义埾叙㪫𠮏叠𣿫𢶣叶𠱷吓灹唫晗浛呭𦭓𠵴啝咏咤䞦𡜍𠻝㶴𠵍"], +["fb40","𨦼𢚘啇䳭启琗喆喩嘅𡣗𤀺䕒𤐵暳𡂴嘷曍𣊊暤暭噍噏磱囱鞇叾圀囯园𨭦㘣𡉏坆𤆥汮炋坂㚱𦱾埦𡐖堃𡑔𤍣堦𤯵塜墪㕡壠壜𡈼壻寿坃𪅐𤉸鏓㖡够梦㛃湙"], +["fba1","𡘾娤啓𡚒蔅姉𠵎𦲁𦴪𡟜姙𡟻𡞲𦶦浱𡠨𡛕姹𦹅媫婣㛦𤦩婷㜈媖瑥嫓𦾡𢕔㶅𡤑㜲𡚸広勐孶斈孼𧨎䀄䡝𠈄寕慠𡨴𥧌𠖥寳宝䴐尅𡭄尓珎尔𡲥𦬨屉䣝岅峩峯嶋𡷹𡸷崐崘嵆𡺤岺巗苼㠭𤤁𢁉𢅳芇㠶㯂帮檊幵幺𤒼𠳓厦亷廐厨𡝱帉廴𨒂"], +["fc40","廹廻㢠廼栾鐛弍𠇁弢㫞䢮𡌺强𦢈𢏐彘𢑱彣鞽𦹮彲鍀𨨶徧嶶㵟𥉐𡽪𧃸𢙨釖𠊞𨨩怱暅𡡷㥣㷇㘹垐𢞴祱㹀悞悤悳𤦂𤦏𧩓璤僡媠慤萤慂慈𦻒憁凴𠙖憇宪𣾷"], +["fca1","𢡟懓𨮝𩥝懐㤲𢦀𢣁怣慜攞掋𠄘担𡝰拕𢸍捬𤧟㨗搸揸𡎎𡟼撐澊𢸶頔𤂌𥜝擡擥鑻㩦携㩗敍漖𤨨𤨣斅敭敟𣁾斵𤥀䬷旑䃘𡠩无旣忟𣐀昘𣇷𣇸晄𣆤𣆥晋𠹵晧𥇦晳晴𡸽𣈱𨗴𣇈𥌓矅𢣷馤朂𤎜𤨡㬫槺𣟂杞杧杢𤇍𩃭柗䓩栢湐鈼栁𣏦𦶠桝"], +["fd40","𣑯槡樋𨫟楳棃𣗍椁椀㴲㨁𣘼㮀枬楡𨩊䋼椶榘㮡𠏉荣傐槹𣙙𢄪橅𣜃檝㯳枱櫈𩆜㰍欝𠤣惞欵歴𢟍溵𣫛𠎵𡥘㝀吡𣭚毡𣻼毜氷𢒋𤣱𦭑汚舦汹𣶼䓅𣶽𤆤𤤌𤤀"], +["fda1","𣳉㛥㳫𠴲鮃𣇹𢒑羏样𦴥𦶡𦷫涖浜湼漄𤥿𤂅𦹲蔳𦽴凇沜渝萮𨬡港𣸯瑓𣾂秌湏媑𣁋濸㜍澝𣸰滺𡒗𤀽䕕鏰潄潜㵎潴𩅰㴻澟𤅄濓𤂑𤅕𤀹𣿰𣾴𤄿凟𤅖𤅗𤅀𦇝灋灾炧炁烌烕烖烟䄄㷨熴熖𤉷焫煅媈煊煮岜𤍥煏鍢𤋁焬𤑚𤨧𤨢熺𨯨炽爎"], +["fe40","鑂爕夑鑃爤鍁𥘅爮牀𤥴梽牕牗㹕𣁄栍漽犂猪猫𤠣𨠫䣭𨠄猨献珏玪𠰺𦨮珉瑉𤇢𡛧𤨤昣㛅𤦷𤦍𤧻珷琕椃𤨦琹𠗃㻗瑜𢢭瑠𨺲瑇珤瑶莹瑬㜰瑴鏱樬璂䥓𤪌"], +["fea1","𤅟𤩹𨮏孆𨰃𡢞瓈𡦈甎瓩甞𨻙𡩋寗𨺬鎅畍畊畧畮𤾂㼄𤴓疎瑝疞疴瘂瘬癑癏癯癶𦏵皐臯㟸𦤑𦤎皡皥皷盌𦾟葢𥂝𥅽𡸜眞眦着撯𥈠睘𣊬瞯𨥤𨥨𡛁矴砉𡍶𤨒棊碯磇磓隥礮𥗠磗礴碱𧘌辸袄𨬫𦂃𢘜禆褀椂禀𥡗禝𧬹礼禩渪𧄦㺨秆𩄍秔"] +] diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/tables/cp936.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/tables/cp936.json new file mode 100644 index 0000000000000000000000000000000000000000..49ddb9a1d68fd76a82904ef694de6b2770c04575 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/tables/cp936.json @@ -0,0 +1,264 @@ +[ +["0","\u0000",127,"€"], +["8140","丂丄丅丆丏丒丗丟丠両丣並丩丮丯丱丳丵丷丼乀乁乂乄乆乊乑乕乗乚乛乢乣乤乥乧乨乪",5,"乲乴",9,"乿",6,"亇亊"], +["8180","亐亖亗亙亜亝亞亣亪亯亰亱亴亶亷亸亹亼亽亾仈仌仏仐仒仚仛仜仠仢仦仧仩仭仮仯仱仴仸仹仺仼仾伀伂",6,"伋伌伒",4,"伜伝伡伣伨伩伬伭伮伱伳伵伷伹伻伾",4,"佄佅佇",5,"佒佔佖佡佢佦佨佪佫佭佮佱佲併佷佸佹佺佽侀侁侂侅來侇侊侌侎侐侒侓侕侖侘侙侚侜侞侟価侢"], +["8240","侤侫侭侰",4,"侶",8,"俀俁係俆俇俈俉俋俌俍俒",4,"俙俛俠俢俤俥俧俫俬俰俲俴俵俶俷俹俻俼俽俿",11], +["8280","個倎倐們倓倕倖倗倛倝倞倠倢倣値倧倫倯",10,"倻倽倿偀偁偂偄偅偆偉偊偋偍偐",4,"偖偗偘偙偛偝",7,"偦",5,"偭",8,"偸偹偺偼偽傁傂傃傄傆傇傉傊傋傌傎",20,"傤傦傪傫傭",4,"傳",6,"傼"], +["8340","傽",17,"僐",5,"僗僘僙僛",10,"僨僩僪僫僯僰僱僲僴僶",4,"僼",9,"儈"], +["8380","儉儊儌",5,"儓",13,"儢",28,"兂兇兊兌兎兏児兒兓兗兘兙兛兝",4,"兣兤兦內兩兪兯兲兺兾兿冃冄円冇冊冋冎冏冐冑冓冔冘冚冝冞冟冡冣冦",4,"冭冮冴冸冹冺冾冿凁凂凃凅凈凊凍凎凐凒",5], +["8440","凘凙凚凜凞凟凢凣凥",5,"凬凮凱凲凴凷凾刄刅刉刋刌刏刐刓刔刕刜刞刟刡刢刣別刦刧刪刬刯刱刲刴刵刼刾剄",5,"剋剎剏剒剓剕剗剘"], +["8480","剙剚剛剝剟剠剢剣剤剦剨剫剬剭剮剰剱剳",9,"剾劀劃",4,"劉",6,"劑劒劔",6,"劜劤劥劦劧劮劯劰労",9,"勀勁勂勄勅勆勈勊勌勍勎勏勑勓勔動勗務",5,"勠勡勢勣勥",10,"勱",7,"勻勼勽匁匂匃匄匇匉匊匋匌匎"], +["8540","匑匒匓匔匘匛匜匞匟匢匤匥匧匨匩匫匬匭匯",9,"匼匽區卂卄卆卋卌卍卐協単卙卛卝卥卨卪卬卭卲卶卹卻卼卽卾厀厁厃厇厈厊厎厏"], +["8580","厐",4,"厖厗厙厛厜厞厠厡厤厧厪厫厬厭厯",6,"厷厸厹厺厼厽厾叀參",4,"収叏叐叒叓叕叚叜叝叞叡叢叧叴叺叾叿吀吂吅吇吋吔吘吙吚吜吢吤吥吪吰吳吶吷吺吽吿呁呂呄呅呇呉呌呍呎呏呑呚呝",4,"呣呥呧呩",7,"呴呹呺呾呿咁咃咅咇咈咉咊咍咑咓咗咘咜咞咟咠咡"], +["8640","咢咥咮咰咲咵咶咷咹咺咼咾哃哅哊哋哖哘哛哠",4,"哫哬哯哰哱哴",5,"哻哾唀唂唃唄唅唈唊",4,"唒唓唕",5,"唜唝唞唟唡唥唦"], +["8680","唨唩唫唭唲唴唵唶唸唹唺唻唽啀啂啅啇啈啋",4,"啑啒啓啔啗",4,"啝啞啟啠啢啣啨啩啫啯",5,"啹啺啽啿喅喆喌喍喎喐喒喓喕喖喗喚喛喞喠",6,"喨",8,"喲喴営喸喺喼喿",4,"嗆嗇嗈嗊嗋嗎嗏嗐嗕嗗",4,"嗞嗠嗢嗧嗩嗭嗮嗰嗱嗴嗶嗸",4,"嗿嘂嘃嘄嘅"], +["8740","嘆嘇嘊嘋嘍嘐",7,"嘙嘚嘜嘝嘠嘡嘢嘥嘦嘨嘩嘪嘫嘮嘯嘰嘳嘵嘷嘸嘺嘼嘽嘾噀",11,"噏",4,"噕噖噚噛噝",4], +["8780","噣噥噦噧噭噮噯噰噲噳噴噵噷噸噹噺噽",7,"嚇",6,"嚐嚑嚒嚔",14,"嚤",10,"嚰",6,"嚸嚹嚺嚻嚽",12,"囋",8,"囕囖囘囙囜団囥",5,"囬囮囯囲図囶囷囸囻囼圀圁圂圅圇國",6], +["8840","園",9,"圝圞圠圡圢圤圥圦圧圫圱圲圴",4,"圼圽圿坁坃坄坅坆坈坉坋坒",4,"坘坙坢坣坥坧坬坮坰坱坲坴坵坸坹坺坽坾坿垀"], +["8880","垁垇垈垉垊垍",4,"垔",6,"垜垝垞垟垥垨垪垬垯垰垱垳垵垶垷垹",8,"埄",6,"埌埍埐埑埓埖埗埛埜埞埡埢埣埥",7,"埮埰埱埲埳埵埶執埻埼埾埿堁堃堄堅堈堉堊堌堎堏堐堒堓堔堖堗堘堚堛堜堝堟堢堣堥",4,"堫",4,"報堲堳場堶",7], +["8940","堾",5,"塅",6,"塎塏塐塒塓塕塖塗塙",4,"塟",5,"塦",4,"塭",16,"塿墂墄墆墇墈墊墋墌"], +["8980","墍",4,"墔",4,"墛墜墝墠",7,"墪",17,"墽墾墿壀壂壃壄壆",10,"壒壓壔壖",13,"壥",5,"壭壯壱売壴壵壷壸壺",7,"夃夅夆夈",4,"夎夐夑夒夓夗夘夛夝夞夠夡夢夣夦夨夬夰夲夳夵夶夻"], +["8a40","夽夾夿奀奃奅奆奊奌奍奐奒奓奙奛",4,"奡奣奤奦",12,"奵奷奺奻奼奾奿妀妅妉妋妌妎妏妐妑妔妕妘妚妛妜妝妟妠妡妢妦"], +["8a80","妧妬妭妰妱妳",5,"妺妼妽妿",6,"姇姈姉姌姍姎姏姕姖姙姛姞",4,"姤姦姧姩姪姫姭",11,"姺姼姽姾娀娂娊娋娍娎娏娐娒娔娕娖娗娙娚娛娝娞娡娢娤娦娧娨娪",6,"娳娵娷",4,"娽娾娿婁",4,"婇婈婋",9,"婖婗婘婙婛",5], +["8b40","婡婣婤婥婦婨婩婫",8,"婸婹婻婼婽婾媀",17,"媓",6,"媜",13,"媫媬"], +["8b80","媭",4,"媴媶媷媹",4,"媿嫀嫃",5,"嫊嫋嫍",4,"嫓嫕嫗嫙嫚嫛嫝嫞嫟嫢嫤嫥嫧嫨嫪嫬",4,"嫲",22,"嬊",11,"嬘",25,"嬳嬵嬶嬸",7,"孁",6], +["8c40","孈",7,"孒孖孞孠孡孧孨孫孭孮孯孲孴孶孷學孹孻孼孾孿宂宆宊宍宎宐宑宒宔宖実宧宨宩宬宭宮宯宱宲宷宺宻宼寀寁寃寈寉寊寋寍寎寏"], +["8c80","寑寔",8,"寠寢寣實寧審",4,"寯寱",6,"寽対尀専尃尅將專尋尌對導尐尒尓尗尙尛尞尟尠尡尣尦尨尩尪尫尭尮尯尰尲尳尵尶尷屃屄屆屇屌屍屒屓屔屖屗屘屚屛屜屝屟屢層屧",6,"屰屲",6,"屻屼屽屾岀岃",4,"岉岊岋岎岏岒岓岕岝",4,"岤",4], +["8d40","岪岮岯岰岲岴岶岹岺岻岼岾峀峂峃峅",5,"峌",5,"峓",5,"峚",6,"峢峣峧峩峫峬峮峯峱",9,"峼",4], +["8d80","崁崄崅崈",5,"崏",4,"崕崗崘崙崚崜崝崟",4,"崥崨崪崫崬崯",4,"崵",7,"崿",7,"嵈嵉嵍",10,"嵙嵚嵜嵞",10,"嵪嵭嵮嵰嵱嵲嵳嵵",12,"嶃",21,"嶚嶛嶜嶞嶟嶠"], +["8e40","嶡",21,"嶸",12,"巆",6,"巎",12,"巜巟巠巣巤巪巬巭"], +["8e80","巰巵巶巸",4,"巿帀帄帇帉帊帋帍帎帒帓帗帞",7,"帨",4,"帯帰帲",4,"帹帺帾帿幀幁幃幆",5,"幍",6,"幖",4,"幜幝幟幠幣",14,"幵幷幹幾庁庂広庅庈庉庌庍庎庒庘庛庝庡庢庣庤庨",4,"庮",4,"庴庺庻庼庽庿",6], +["8f40","廆廇廈廋",5,"廔廕廗廘廙廚廜",11,"廩廫",8,"廵廸廹廻廼廽弅弆弇弉弌弍弎弐弒弔弖弙弚弜弝弞弡弢弣弤"], +["8f80","弨弫弬弮弰弲",6,"弻弽弾弿彁",14,"彑彔彙彚彛彜彞彟彠彣彥彧彨彫彮彯彲彴彵彶彸彺彽彾彿徃徆徍徎徏徑従徔徖徚徛徝從徟徠徢",5,"復徫徬徯",5,"徶徸徹徺徻徾",4,"忇忈忊忋忎忓忔忕忚忛応忞忟忢忣忥忦忨忩忬忯忰忲忳忴忶忷忹忺忼怇"], +["9040","怈怉怋怌怐怑怓怗怘怚怞怟怢怣怤怬怭怮怰",4,"怶",4,"怽怾恀恄",6,"恌恎恏恑恓恔恖恗恘恛恜恞恟恠恡恥恦恮恱恲恴恵恷恾悀"], +["9080","悁悂悅悆悇悈悊悋悎悏悐悑悓悕悗悘悙悜悞悡悢悤悥悧悩悪悮悰悳悵悶悷悹悺悽",7,"惇惈惉惌",4,"惒惓惔惖惗惙惛惞惡",4,"惪惱惲惵惷惸惻",4,"愂愃愄愅愇愊愋愌愐",4,"愖愗愘愙愛愜愝愞愡愢愥愨愩愪愬",18,"慀",6], +["9140","慇慉態慍慏慐慒慓慔慖",6,"慞慟慠慡慣慤慥慦慩",6,"慱慲慳慴慶慸",18,"憌憍憏",4,"憕"], +["9180","憖",6,"憞",8,"憪憫憭",9,"憸",5,"憿懀懁懃",4,"應懌",4,"懓懕",16,"懧",13,"懶",8,"戀",5,"戇戉戓戔戙戜戝戞戠戣戦戧戨戩戫戭戯戰戱戲戵戶戸",4,"扂扄扅扆扊"], +["9240","扏扐払扖扗扙扚扜",6,"扤扥扨扱扲扴扵扷扸扺扻扽抁抂抃抅抆抇抈抋",5,"抔抙抜抝択抣抦抧抩抪抭抮抯抰抲抳抴抶抷抸抺抾拀拁"], +["9280","拃拋拏拑拕拝拞拠拡拤拪拫拰拲拵拸拹拺拻挀挃挄挅挆挊挋挌挍挏挐挒挓挔挕挗挘挙挜挦挧挩挬挭挮挰挱挳",5,"挻挼挾挿捀捁捄捇捈捊捑捒捓捔捖",7,"捠捤捥捦捨捪捫捬捯捰捲捳捴捵捸捹捼捽捾捿掁掃掄掅掆掋掍掑掓掔掕掗掙",6,"採掤掦掫掯掱掲掵掶掹掻掽掿揀"], +["9340","揁揂揃揅揇揈揊揋揌揑揓揔揕揗",6,"揟揢揤",4,"揫揬揮揯揰揱揳揵揷揹揺揻揼揾搃搄搆",4,"損搎搑搒搕",5,"搝搟搢搣搤"], +["9380","搥搧搨搩搫搮",5,"搵",4,"搻搼搾摀摂摃摉摋",6,"摓摕摖摗摙",4,"摟",7,"摨摪摫摬摮",9,"摻",6,"撃撆撈",8,"撓撔撗撘撚撛撜撝撟",4,"撥撦撧撨撪撫撯撱撲撳撴撶撹撻撽撾撿擁擃擄擆",6,"擏擑擓擔擕擖擙據"], +["9440","擛擜擝擟擠擡擣擥擧",24,"攁",7,"攊",7,"攓",4,"攙",8], +["9480","攢攣攤攦",4,"攬攭攰攱攲攳攷攺攼攽敀",4,"敆敇敊敋敍敎敐敒敓敔敗敘敚敜敟敠敡敤敥敧敨敩敪敭敮敯敱敳敵敶數",14,"斈斉斊斍斎斏斒斔斕斖斘斚斝斞斠斢斣斦斨斪斬斮斱",7,"斺斻斾斿旀旂旇旈旉旊旍旐旑旓旔旕旘",7,"旡旣旤旪旫"], +["9540","旲旳旴旵旸旹旻",4,"昁昄昅昇昈昉昋昍昐昑昒昖昗昘昚昛昜昞昡昢昣昤昦昩昪昫昬昮昰昲昳昷",4,"昽昿晀時晄",6,"晍晎晐晑晘"], +["9580","晙晛晜晝晞晠晢晣晥晧晩",4,"晱晲晳晵晸晹晻晼晽晿暀暁暃暅暆暈暉暊暋暍暎暏暐暒暓暔暕暘",4,"暞",8,"暩",4,"暯",4,"暵暶暷暸暺暻暼暽暿",25,"曚曞",7,"曧曨曪",5,"曱曵曶書曺曻曽朁朂會"], +["9640","朄朅朆朇朌朎朏朑朒朓朖朘朙朚朜朞朠",5,"朧朩朮朰朲朳朶朷朸朹朻朼朾朿杁杄杅杇杊杋杍杒杔杕杗",4,"杝杢杣杤杦杧杫杬杮東杴杶"], +["9680","杸杹杺杻杽枀枂枃枅枆枈枊枌枍枎枏枑枒枓枔枖枙枛枟枠枡枤枦枩枬枮枱枲枴枹",7,"柂柅",9,"柕柖柗柛柟柡柣柤柦柧柨柪柫柭柮柲柵",7,"柾栁栂栃栄栆栍栐栒栔栕栘",4,"栞栟栠栢",6,"栫",6,"栴栵栶栺栻栿桇桋桍桏桒桖",5], +["9740","桜桝桞桟桪桬",7,"桵桸",8,"梂梄梇",7,"梐梑梒梔梕梖梘",9,"梣梤梥梩梪梫梬梮梱梲梴梶梷梸"], +["9780","梹",6,"棁棃",5,"棊棌棎棏棐棑棓棔棖棗棙棛",4,"棡棢棤",9,"棯棲棳棴棶棷棸棻棽棾棿椀椂椃椄椆",4,"椌椏椑椓",11,"椡椢椣椥",7,"椮椯椱椲椳椵椶椷椸椺椻椼椾楀楁楃",16,"楕楖楘楙楛楜楟"], +["9840","楡楢楤楥楧楨楩楪楬業楯楰楲",4,"楺楻楽楾楿榁榃榅榊榋榌榎",5,"榖榗榙榚榝",9,"榩榪榬榮榯榰榲榳榵榶榸榹榺榼榽"], +["9880","榾榿槀槂",7,"構槍槏槑槒槓槕",5,"槜槝槞槡",11,"槮槯槰槱槳",9,"槾樀",9,"樋",11,"標",5,"樠樢",5,"権樫樬樭樮樰樲樳樴樶",6,"樿",4,"橅橆橈",7,"橑",6,"橚"], +["9940","橜",4,"橢橣橤橦",10,"橲",6,"橺橻橽橾橿檁檂檃檅",8,"檏檒",4,"檘",7,"檡",5], +["9980","檧檨檪檭",114,"欥欦欨",6], +["9a40","欯欰欱欳欴欵欶欸欻欼欽欿歀歁歂歄歅歈歊歋歍",11,"歚",7,"歨歩歫",13,"歺歽歾歿殀殅殈"], +["9a80","殌殎殏殐殑殔殕殗殘殙殜",4,"殢",7,"殫",7,"殶殸",6,"毀毃毄毆",4,"毌毎毐毑毘毚毜",4,"毢",7,"毬毭毮毰毱毲毴毶毷毸毺毻毼毾",6,"氈",4,"氎氒気氜氝氞氠氣氥氫氬氭氱氳氶氷氹氺氻氼氾氿汃汄汅汈汋",4,"汑汒汓汖汘"], +["9b40","汙汚汢汣汥汦汧汫",4,"汱汳汵汷汸決汻汼汿沀沄沇沊沋沍沎沑沒沕沖沗沘沚沜沝沞沠沢沨沬沯沰沴沵沶沷沺泀況泂泃泆泇泈泋泍泎泏泑泒泘"], +["9b80","泙泚泜泝泟泤泦泧泩泬泭泲泴泹泿洀洂洃洅洆洈洉洊洍洏洐洑洓洔洕洖洘洜洝洟",5,"洦洨洩洬洭洯洰洴洶洷洸洺洿浀浂浄浉浌浐浕浖浗浘浛浝浟浡浢浤浥浧浨浫浬浭浰浱浲浳浵浶浹浺浻浽",4,"涃涄涆涇涊涋涍涏涐涒涖",4,"涜涢涥涬涭涰涱涳涴涶涷涹",5,"淁淂淃淈淉淊"], +["9c40","淍淎淏淐淒淓淔淕淗淚淛淜淟淢淣淥淧淨淩淪淭淯淰淲淴淵淶淸淺淽",7,"渆渇済渉渋渏渒渓渕渘渙減渜渞渟渢渦渧渨渪測渮渰渱渳渵"], +["9c80","渶渷渹渻",7,"湅",7,"湏湐湑湒湕湗湙湚湜湝湞湠",10,"湬湭湯",14,"満溁溂溄溇溈溊",4,"溑",6,"溙溚溛溝溞溠溡溣溤溦溨溩溫溬溭溮溰溳溵溸溹溼溾溿滀滃滄滅滆滈滉滊滌滍滎滐滒滖滘滙滛滜滝滣滧滪",5], +["9d40","滰滱滲滳滵滶滷滸滺",7,"漃漄漅漇漈漊",4,"漐漑漒漖",9,"漡漢漣漥漦漧漨漬漮漰漲漴漵漷",6,"漿潀潁潂"], +["9d80","潃潄潅潈潉潊潌潎",9,"潙潚潛潝潟潠潡潣潤潥潧",5,"潯潰潱潳潵潶潷潹潻潽",6,"澅澆澇澊澋澏",12,"澝澞澟澠澢",4,"澨",10,"澴澵澷澸澺",5,"濁濃",5,"濊",6,"濓",10,"濟濢濣濤濥"], +["9e40","濦",7,"濰",32,"瀒",7,"瀜",6,"瀤",6], +["9e80","瀫",9,"瀶瀷瀸瀺",17,"灍灎灐",13,"灟",11,"灮灱灲灳灴灷灹灺灻災炁炂炃炄炆炇炈炋炌炍炏炐炑炓炗炘炚炛炞",12,"炰炲炴炵炶為炾炿烄烅烆烇烉烋",12,"烚"], +["9f40","烜烝烞烠烡烢烣烥烪烮烰",6,"烸烺烻烼烾",10,"焋",4,"焑焒焔焗焛",10,"焧",7,"焲焳焴"], +["9f80","焵焷",13,"煆煇煈煉煋煍煏",12,"煝煟",4,"煥煩",4,"煯煰煱煴煵煶煷煹煻煼煾",5,"熅",4,"熋熌熍熎熐熑熒熓熕熖熗熚",4,"熡",6,"熩熪熫熭",5,"熴熶熷熸熺",8,"燄",9,"燏",4], +["a040","燖",9,"燡燢燣燤燦燨",5,"燯",9,"燺",11,"爇",19], +["a080","爛爜爞",9,"爩爫爭爮爯爲爳爴爺爼爾牀",6,"牉牊牋牎牏牐牑牓牔牕牗牘牚牜牞牠牣牤牥牨牪牫牬牭牰牱牳牴牶牷牸牻牼牽犂犃犅",4,"犌犎犐犑犓",11,"犠",11,"犮犱犲犳犵犺",6,"狅狆狇狉狊狋狌狏狑狓狔狕狖狘狚狛"], +["a1a1"," 、。·ˉˇ¨〃々—~‖…‘’“”〔〕〈",7,"〖〗【】±×÷∶∧∨∑∏∪∩∈∷√⊥∥∠⌒⊙∫∮≡≌≈∽∝≠≮≯≤≥∞∵∴♂♀°′″℃$¤¢£‰§№☆★○●◎◇◆□■△▲※→←↑↓〓"], +["a2a1","ⅰ",9], +["a2b1","⒈",19,"⑴",19,"①",9], +["a2e5","㈠",9], +["a2f1","Ⅰ",11], +["a3a1","!"#¥%",88," ̄"], +["a4a1","ぁ",82], +["a5a1","ァ",85], +["a6a1","Α",16,"Σ",6], +["a6c1","α",16,"σ",6], +["a6e0","︵︶︹︺︿﹀︽︾﹁﹂﹃﹄"], +["a6ee","︻︼︷︸︱"], +["a6f4","︳︴"], +["a7a1","А",5,"ЁЖ",25], +["a7d1","а",5,"ёж",25], +["a840","ˊˋ˙–―‥‵℅℉↖↗↘↙∕∟∣≒≦≧⊿═",35,"▁",6], +["a880","█",7,"▓▔▕▼▽◢◣◤◥☉⊕〒〝〞"], +["a8a1","āáǎàēéěèīíǐìōóǒòūúǔùǖǘǚǜüêɑ"], +["a8bd","ńň"], +["a8c0","ɡ"], +["a8c5","ㄅ",36], +["a940","〡",8,"㊣㎎㎏㎜㎝㎞㎡㏄㏎㏑㏒㏕︰¬¦"], +["a959","℡㈱"], +["a95c","‐"], +["a960","ー゛゜ヽヾ〆ゝゞ﹉",9,"﹔﹕﹖﹗﹙",8], +["a980","﹢",4,"﹨﹩﹪﹫"], +["a996","〇"], +["a9a4","─",75], +["aa40","狜狝狟狢",5,"狪狫狵狶狹狽狾狿猀猂猄",5,"猋猌猍猏猐猑猒猔猘猙猚猟猠猣猤猦猧猨猭猯猰猲猳猵猶猺猻猼猽獀",8], +["aa80","獉獊獋獌獎獏獑獓獔獕獖獘",7,"獡",10,"獮獰獱"], +["ab40","獲",11,"獿",4,"玅玆玈玊玌玍玏玐玒玓玔玕玗玘玙玚玜玝玞玠玡玣",5,"玪玬玭玱玴玵玶玸玹玼玽玾玿珁珃",4], +["ab80","珋珌珎珒",6,"珚珛珜珝珟珡珢珣珤珦珨珪珫珬珮珯珰珱珳",4], +["ac40","珸",10,"琄琇琈琋琌琍琎琑",8,"琜",5,"琣琤琧琩琫琭琯琱琲琷",4,"琽琾琿瑀瑂",11], +["ac80","瑎",6,"瑖瑘瑝瑠",12,"瑮瑯瑱",4,"瑸瑹瑺"], +["ad40","瑻瑼瑽瑿璂璄璅璆璈璉璊璌璍璏璑",10,"璝璟",7,"璪",15,"璻",12], +["ad80","瓈",9,"瓓",8,"瓝瓟瓡瓥瓧",6,"瓰瓱瓲"], +["ae40","瓳瓵瓸",6,"甀甁甂甃甅",7,"甎甐甒甔甕甖甗甛甝甞甠",4,"甦甧甪甮甴甶甹甼甽甿畁畂畃畄畆畇畉畊畍畐畑畒畓畕畖畗畘"], +["ae80","畝",7,"畧畨畩畫",6,"畳畵當畷畺",4,"疀疁疂疄疅疇"], +["af40","疈疉疊疌疍疎疐疓疕疘疛疜疞疢疦",4,"疭疶疷疺疻疿痀痁痆痋痌痎痏痐痑痓痗痙痚痜痝痟痠痡痥痩痬痭痮痯痲痳痵痶痷痸痺痻痽痾瘂瘄瘆瘇"], +["af80","瘈瘉瘋瘍瘎瘏瘑瘒瘓瘔瘖瘚瘜瘝瘞瘡瘣瘧瘨瘬瘮瘯瘱瘲瘶瘷瘹瘺瘻瘽癁療癄"], +["b040","癅",6,"癎",5,"癕癗",4,"癝癟癠癡癢癤",6,"癬癭癮癰",7,"癹発發癿皀皁皃皅皉皊皌皍皏皐皒皔皕皗皘皚皛"], +["b080","皜",7,"皥",8,"皯皰皳皵",9,"盀盁盃啊阿埃挨哎唉哀皑癌蔼矮艾碍爱隘鞍氨安俺按暗岸胺案肮昂盎凹敖熬翱袄傲奥懊澳芭捌扒叭吧笆八疤巴拔跋靶把耙坝霸罢爸白柏百摆佰败拜稗斑班搬扳般颁板版扮拌伴瓣半办绊邦帮梆榜膀绑棒磅蚌镑傍谤苞胞包褒剥"], +["b140","盄盇盉盋盌盓盕盙盚盜盝盞盠",4,"盦",7,"盰盳盵盶盷盺盻盽盿眀眂眃眅眆眊県眎",10,"眛眜眝眞眡眣眤眥眧眪眫"], +["b180","眬眮眰",4,"眹眻眽眾眿睂睄睅睆睈",7,"睒",7,"睜薄雹保堡饱宝抱报暴豹鲍爆杯碑悲卑北辈背贝钡倍狈备惫焙被奔苯本笨崩绷甭泵蹦迸逼鼻比鄙笔彼碧蓖蔽毕毙毖币庇痹闭敝弊必辟壁臂避陛鞭边编贬扁便变卞辨辩辫遍标彪膘表鳖憋别瘪彬斌濒滨宾摈兵冰柄丙秉饼炳"], +["b240","睝睞睟睠睤睧睩睪睭",11,"睺睻睼瞁瞂瞃瞆",5,"瞏瞐瞓",11,"瞡瞣瞤瞦瞨瞫瞭瞮瞯瞱瞲瞴瞶",4], +["b280","瞼瞾矀",12,"矎",8,"矘矙矚矝",4,"矤病并玻菠播拨钵波博勃搏铂箔伯帛舶脖膊渤泊驳捕卜哺补埠不布步簿部怖擦猜裁材才财睬踩采彩菜蔡餐参蚕残惭惨灿苍舱仓沧藏操糙槽曹草厕策侧册测层蹭插叉茬茶查碴搽察岔差诧拆柴豺搀掺蝉馋谗缠铲产阐颤昌猖"], +["b340","矦矨矪矯矰矱矲矴矵矷矹矺矻矼砃",5,"砊砋砎砏砐砓砕砙砛砞砠砡砢砤砨砪砫砮砯砱砲砳砵砶砽砿硁硂硃硄硆硈硉硊硋硍硏硑硓硔硘硙硚"], +["b380","硛硜硞",11,"硯",7,"硸硹硺硻硽",6,"场尝常长偿肠厂敞畅唱倡超抄钞朝嘲潮巢吵炒车扯撤掣彻澈郴臣辰尘晨忱沉陈趁衬撑称城橙成呈乘程惩澄诚承逞骋秤吃痴持匙池迟弛驰耻齿侈尺赤翅斥炽充冲虫崇宠抽酬畴踌稠愁筹仇绸瞅丑臭初出橱厨躇锄雏滁除楚"], +["b440","碄碅碆碈碊碋碏碐碒碔碕碖碙碝碞碠碢碤碦碨",7,"碵碶碷碸確碻碼碽碿磀磂磃磄磆磇磈磌磍磎磏磑磒磓磖磗磘磚",9], +["b480","磤磥磦磧磩磪磫磭",4,"磳磵磶磸磹磻",5,"礂礃礄礆",6,"础储矗搐触处揣川穿椽传船喘串疮窗幢床闯创吹炊捶锤垂春椿醇唇淳纯蠢戳绰疵茨磁雌辞慈瓷词此刺赐次聪葱囱匆从丛凑粗醋簇促蹿篡窜摧崔催脆瘁粹淬翠村存寸磋撮搓措挫错搭达答瘩打大呆歹傣戴带殆代贷袋待逮"], +["b540","礍",5,"礔",9,"礟",4,"礥",14,"礵",4,"礽礿祂祃祄祅祇祊",8,"祔祕祘祙祡祣"], +["b580","祤祦祩祪祫祬祮祰",6,"祹祻",4,"禂禃禆禇禈禉禋禌禍禎禐禑禒怠耽担丹单郸掸胆旦氮但惮淡诞弹蛋当挡党荡档刀捣蹈倒岛祷导到稻悼道盗德得的蹬灯登等瞪凳邓堤低滴迪敌笛狄涤翟嫡抵底地蒂第帝弟递缔颠掂滇碘点典靛垫电佃甸店惦奠淀殿碉叼雕凋刁掉吊钓调跌爹碟蝶迭谍叠"], +["b640","禓",6,"禛",11,"禨",10,"禴",4,"禼禿秂秄秅秇秈秊秌秎秏秐秓秔秖秗秙",5,"秠秡秢秥秨秪"], +["b680","秬秮秱",6,"秹秺秼秾秿稁稄稅稇稈稉稊稌稏",4,"稕稖稘稙稛稜丁盯叮钉顶鼎锭定订丢东冬董懂动栋侗恫冻洞兜抖斗陡豆逗痘都督毒犊独读堵睹赌杜镀肚度渡妒端短锻段断缎堆兑队对墩吨蹲敦顿囤钝盾遁掇哆多夺垛躲朵跺舵剁惰堕蛾峨鹅俄额讹娥恶厄扼遏鄂饿恩而儿耳尔饵洱二"], +["b740","稝稟稡稢稤",14,"稴稵稶稸稺稾穀",5,"穇",9,"穒",4,"穘",16], +["b780","穩",6,"穱穲穳穵穻穼穽穾窂窅窇窉窊窋窌窎窏窐窓窔窙窚窛窞窡窢贰发罚筏伐乏阀法珐藩帆番翻樊矾钒繁凡烦反返范贩犯饭泛坊芳方肪房防妨仿访纺放菲非啡飞肥匪诽吠肺废沸费芬酚吩氛分纷坟焚汾粉奋份忿愤粪丰封枫蜂峰锋风疯烽逢冯缝讽奉凤佛否夫敷肤孵扶拂辐幅氟符伏俘服"], +["b840","窣窤窧窩窪窫窮",4,"窴",10,"竀",10,"竌",9,"竗竘竚竛竜竝竡竢竤竧",5,"竮竰竱竲竳"], +["b880","竴",4,"竻竼竾笀笁笂笅笇笉笌笍笎笐笒笓笖笗笘笚笜笝笟笡笢笣笧笩笭浮涪福袱弗甫抚辅俯釜斧脯腑府腐赴副覆赋复傅付阜父腹负富讣附妇缚咐噶嘎该改概钙盖溉干甘杆柑竿肝赶感秆敢赣冈刚钢缸肛纲岗港杠篙皋高膏羔糕搞镐稿告哥歌搁戈鸽胳疙割革葛格蛤阁隔铬个各给根跟耕更庚羹"], +["b940","笯笰笲笴笵笶笷笹笻笽笿",5,"筆筈筊筍筎筓筕筗筙筜筞筟筡筣",10,"筯筰筳筴筶筸筺筼筽筿箁箂箃箄箆",6,"箎箏"], +["b980","箑箒箓箖箘箙箚箛箞箟箠箣箤箥箮箯箰箲箳箵箶箷箹",7,"篂篃範埂耿梗工攻功恭龚供躬公宫弓巩汞拱贡共钩勾沟苟狗垢构购够辜菇咕箍估沽孤姑鼓古蛊骨谷股故顾固雇刮瓜剐寡挂褂乖拐怪棺关官冠观管馆罐惯灌贯光广逛瑰规圭硅归龟闺轨鬼诡癸桂柜跪贵刽辊滚棍锅郭国果裹过哈"], +["ba40","篅篈築篊篋篍篎篏篐篒篔",4,"篛篜篞篟篠篢篣篤篧篨篩篫篬篭篯篰篲",4,"篸篹篺篻篽篿",7,"簈簉簊簍簎簐",5,"簗簘簙"], +["ba80","簚",4,"簠",5,"簨簩簫",12,"簹",5,"籂骸孩海氦亥害骇酣憨邯韩含涵寒函喊罕翰撼捍旱憾悍焊汗汉夯杭航壕嚎豪毫郝好耗号浩呵喝荷菏核禾和何合盒貉阂河涸赫褐鹤贺嘿黑痕很狠恨哼亨横衡恒轰哄烘虹鸿洪宏弘红喉侯猴吼厚候后呼乎忽瑚壶葫胡蝴狐糊湖"], +["bb40","籃",9,"籎",36,"籵",5,"籾",9], +["bb80","粈粊",6,"粓粔粖粙粚粛粠粡粣粦粧粨粩粫粬粭粯粰粴",4,"粺粻弧虎唬护互沪户花哗华猾滑画划化话槐徊怀淮坏欢环桓还缓换患唤痪豢焕涣宦幻荒慌黄磺蝗簧皇凰惶煌晃幌恍谎灰挥辉徽恢蛔回毁悔慧卉惠晦贿秽会烩汇讳诲绘荤昏婚魂浑混豁活伙火获或惑霍货祸击圾基机畸稽积箕"], +["bc40","粿糀糂糃糄糆糉糋糎",6,"糘糚糛糝糞糡",6,"糩",5,"糰",7,"糹糺糼",13,"紋",5], +["bc80","紑",14,"紡紣紤紥紦紨紩紪紬紭紮細",6,"肌饥迹激讥鸡姬绩缉吉极棘辑籍集及急疾汲即嫉级挤几脊己蓟技冀季伎祭剂悸济寄寂计记既忌际妓继纪嘉枷夹佳家加荚颊贾甲钾假稼价架驾嫁歼监坚尖笺间煎兼肩艰奸缄茧检柬碱硷拣捡简俭剪减荐槛鉴践贱见键箭件"], +["bd40","紷",54,"絯",7], +["bd80","絸",32,"健舰剑饯渐溅涧建僵姜将浆江疆蒋桨奖讲匠酱降蕉椒礁焦胶交郊浇骄娇嚼搅铰矫侥脚狡角饺缴绞剿教酵轿较叫窖揭接皆秸街阶截劫节桔杰捷睫竭洁结解姐戒藉芥界借介疥诫届巾筋斤金今津襟紧锦仅谨进靳晋禁近烬浸"], +["be40","継",12,"綧",6,"綯",42], +["be80","線",32,"尽劲荆兢茎睛晶鲸京惊精粳经井警景颈静境敬镜径痉靖竟竞净炯窘揪究纠玖韭久灸九酒厩救旧臼舅咎就疚鞠拘狙疽居驹菊局咀矩举沮聚拒据巨具距踞锯俱句惧炬剧捐鹃娟倦眷卷绢撅攫抉掘倔爵觉决诀绝均菌钧军君峻"], +["bf40","緻",62], +["bf80","縺縼",4,"繂",4,"繈",21,"俊竣浚郡骏喀咖卡咯开揩楷凯慨刊堪勘坎砍看康慷糠扛抗亢炕考拷烤靠坷苛柯棵磕颗科壳咳可渴克刻客课肯啃垦恳坑吭空恐孔控抠口扣寇枯哭窟苦酷库裤夸垮挎跨胯块筷侩快宽款匡筐狂框矿眶旷况亏盔岿窥葵奎魁傀"], +["c040","繞",35,"纃",23,"纜纝纞"], +["c080","纮纴纻纼绖绤绬绹缊缐缞缷缹缻",6,"罃罆",9,"罒罓馈愧溃坤昆捆困括扩廓阔垃拉喇蜡腊辣啦莱来赖蓝婪栏拦篮阑兰澜谰揽览懒缆烂滥琅榔狼廊郎朗浪捞劳牢老佬姥酪烙涝勒乐雷镭蕾磊累儡垒擂肋类泪棱楞冷厘梨犁黎篱狸离漓理李里鲤礼莉荔吏栗丽厉励砾历利傈例俐"], +["c140","罖罙罛罜罝罞罠罣",4,"罫罬罭罯罰罳罵罶罷罸罺罻罼罽罿羀羂",7,"羋羍羏",4,"羕",4,"羛羜羠羢羣羥羦羨",6,"羱"], +["c180","羳",4,"羺羻羾翀翂翃翄翆翇翈翉翋翍翏",4,"翖翗翙",5,"翢翣痢立粒沥隶力璃哩俩联莲连镰廉怜涟帘敛脸链恋炼练粮凉梁粱良两辆量晾亮谅撩聊僚疗燎寥辽潦了撂镣廖料列裂烈劣猎琳林磷霖临邻鳞淋凛赁吝拎玲菱零龄铃伶羚凌灵陵岭领另令溜琉榴硫馏留刘瘤流柳六龙聋咙笼窿"], +["c240","翤翧翨翪翫翬翭翯翲翴",6,"翽翾翿耂耇耈耉耊耎耏耑耓耚耛耝耞耟耡耣耤耫",5,"耲耴耹耺耼耾聀聁聄聅聇聈聉聎聏聐聑聓聕聖聗"], +["c280","聙聛",13,"聫",5,"聲",11,"隆垄拢陇楼娄搂篓漏陋芦卢颅庐炉掳卤虏鲁麓碌露路赂鹿潞禄录陆戮驴吕铝侣旅履屡缕虑氯律率滤绿峦挛孪滦卵乱掠略抡轮伦仑沦纶论萝螺罗逻锣箩骡裸落洛骆络妈麻玛码蚂马骂嘛吗埋买麦卖迈脉瞒馒蛮满蔓曼慢漫"], +["c340","聾肁肂肅肈肊肍",5,"肔肕肗肙肞肣肦肧肨肬肰肳肵肶肸肹肻胅胇",4,"胏",6,"胘胟胠胢胣胦胮胵胷胹胻胾胿脀脁脃脄脅脇脈脋"], +["c380","脌脕脗脙脛脜脝脟",12,"脭脮脰脳脴脵脷脹",4,"脿谩芒茫盲氓忙莽猫茅锚毛矛铆卯茂冒帽貌贸么玫枚梅酶霉煤没眉媒镁每美昧寐妹媚门闷们萌蒙檬盟锰猛梦孟眯醚靡糜迷谜弥米秘觅泌蜜密幂棉眠绵冕免勉娩缅面苗描瞄藐秒渺庙妙蔑灭民抿皿敏悯闽明螟鸣铭名命谬摸"], +["c440","腀",5,"腇腉腍腎腏腒腖腗腘腛",4,"腡腢腣腤腦腨腪腫腬腯腲腳腵腶腷腸膁膃",4,"膉膋膌膍膎膐膒",5,"膙膚膞",4,"膤膥"], +["c480","膧膩膫",7,"膴",5,"膼膽膾膿臄臅臇臈臉臋臍",6,"摹蘑模膜磨摩魔抹末莫墨默沫漠寞陌谋牟某拇牡亩姆母墓暮幕募慕木目睦牧穆拿哪呐钠那娜纳氖乃奶耐奈南男难囊挠脑恼闹淖呢馁内嫩能妮霓倪泥尼拟你匿腻逆溺蔫拈年碾撵捻念娘酿鸟尿捏聂孽啮镊镍涅您柠狞凝宁"], +["c540","臔",14,"臤臥臦臨臩臫臮",4,"臵",5,"臽臿舃與",4,"舎舏舑舓舕",5,"舝舠舤舥舦舧舩舮舲舺舼舽舿"], +["c580","艀艁艂艃艅艆艈艊艌艍艎艐",7,"艙艛艜艝艞艠",7,"艩拧泞牛扭钮纽脓浓农弄奴努怒女暖虐疟挪懦糯诺哦欧鸥殴藕呕偶沤啪趴爬帕怕琶拍排牌徘湃派攀潘盘磐盼畔判叛乓庞旁耪胖抛咆刨炮袍跑泡呸胚培裴赔陪配佩沛喷盆砰抨烹澎彭蓬棚硼篷膨朋鹏捧碰坯砒霹批披劈琵毗"], +["c640","艪艫艬艭艱艵艶艷艸艻艼芀芁芃芅芆芇芉芌芐芓芔芕芖芚芛芞芠芢芣芧芲芵芶芺芻芼芿苀苂苃苅苆苉苐苖苙苚苝苢苧苨苩苪苬苭苮苰苲苳苵苶苸"], +["c680","苺苼",4,"茊茋茍茐茒茓茖茘茙茝",9,"茩茪茮茰茲茷茻茽啤脾疲皮匹痞僻屁譬篇偏片骗飘漂瓢票撇瞥拼频贫品聘乒坪苹萍平凭瓶评屏坡泼颇婆破魄迫粕剖扑铺仆莆葡菩蒲埔朴圃普浦谱曝瀑期欺栖戚妻七凄漆柒沏其棋奇歧畦崎脐齐旗祈祁骑起岂乞企启契砌器气迄弃汽泣讫掐"], +["c740","茾茿荁荂荄荅荈荊",4,"荓荕",4,"荝荢荰",6,"荹荺荾",6,"莇莈莊莋莌莍莏莐莑莔莕莖莗莙莚莝莟莡",6,"莬莭莮"], +["c780","莯莵莻莾莿菂菃菄菆菈菉菋菍菎菐菑菒菓菕菗菙菚菛菞菢菣菤菦菧菨菫菬菭恰洽牵扦钎铅千迁签仟谦乾黔钱钳前潜遣浅谴堑嵌欠歉枪呛腔羌墙蔷强抢橇锹敲悄桥瞧乔侨巧鞘撬翘峭俏窍切茄且怯窃钦侵亲秦琴勤芹擒禽寝沁青轻氢倾卿清擎晴氰情顷请庆琼穷秋丘邱球求囚酋泅趋区蛆曲躯屈驱渠"], +["c840","菮華菳",4,"菺菻菼菾菿萀萂萅萇萈萉萊萐萒",5,"萙萚萛萞",5,"萩",7,"萲",5,"萹萺萻萾",7,"葇葈葉"], +["c880","葊",6,"葒",4,"葘葝葞葟葠葢葤",4,"葪葮葯葰葲葴葷葹葻葼取娶龋趣去圈颧权醛泉全痊拳犬券劝缺炔瘸却鹊榷确雀裙群然燃冉染瓤壤攘嚷让饶扰绕惹热壬仁人忍韧任认刃妊纫扔仍日戎茸蓉荣融熔溶容绒冗揉柔肉茹蠕儒孺如辱乳汝入褥软阮蕊瑞锐闰润若弱撒洒萨腮鳃塞赛三叁"], +["c940","葽",4,"蒃蒄蒅蒆蒊蒍蒏",7,"蒘蒚蒛蒝蒞蒟蒠蒢",12,"蒰蒱蒳蒵蒶蒷蒻蒼蒾蓀蓂蓃蓅蓆蓇蓈蓋蓌蓎蓏蓒蓔蓕蓗"], +["c980","蓘",4,"蓞蓡蓢蓤蓧",4,"蓭蓮蓯蓱",10,"蓽蓾蔀蔁蔂伞散桑嗓丧搔骚扫嫂瑟色涩森僧莎砂杀刹沙纱傻啥煞筛晒珊苫杉山删煽衫闪陕擅赡膳善汕扇缮墒伤商赏晌上尚裳梢捎稍烧芍勺韶少哨邵绍奢赊蛇舌舍赦摄射慑涉社设砷申呻伸身深娠绅神沈审婶甚肾慎渗声生甥牲升绳"], +["ca40","蔃",8,"蔍蔎蔏蔐蔒蔔蔕蔖蔘蔙蔛蔜蔝蔞蔠蔢",8,"蔭",9,"蔾",4,"蕄蕅蕆蕇蕋",10], +["ca80","蕗蕘蕚蕛蕜蕝蕟",4,"蕥蕦蕧蕩",8,"蕳蕵蕶蕷蕸蕼蕽蕿薀薁省盛剩胜圣师失狮施湿诗尸虱十石拾时什食蚀实识史矢使屎驶始式示士世柿事拭誓逝势是嗜噬适仕侍释饰氏市恃室视试收手首守寿授售受瘦兽蔬枢梳殊抒输叔舒淑疏书赎孰熟薯暑曙署蜀黍鼠属术述树束戍竖墅庶数漱"], +["cb40","薂薃薆薈",6,"薐",10,"薝",6,"薥薦薧薩薫薬薭薱",5,"薸薺",6,"藂",6,"藊",4,"藑藒"], +["cb80","藔藖",5,"藝",6,"藥藦藧藨藪",14,"恕刷耍摔衰甩帅栓拴霜双爽谁水睡税吮瞬顺舜说硕朔烁斯撕嘶思私司丝死肆寺嗣四伺似饲巳松耸怂颂送宋讼诵搜艘擞嗽苏酥俗素速粟僳塑溯宿诉肃酸蒜算虽隋随绥髓碎岁穗遂隧祟孙损笋蓑梭唆缩琐索锁所塌他它她塔"], +["cc40","藹藺藼藽藾蘀",4,"蘆",10,"蘒蘓蘔蘕蘗",15,"蘨蘪",13,"蘹蘺蘻蘽蘾蘿虀"], +["cc80","虁",11,"虒虓處",4,"虛虜虝號虠虡虣",7,"獭挞蹋踏胎苔抬台泰酞太态汰坍摊贪瘫滩坛檀痰潭谭谈坦毯袒碳探叹炭汤塘搪堂棠膛唐糖倘躺淌趟烫掏涛滔绦萄桃逃淘陶讨套特藤腾疼誊梯剔踢锑提题蹄啼体替嚏惕涕剃屉天添填田甜恬舔腆挑条迢眺跳贴铁帖厅听烃"], +["cd40","虭虯虰虲",6,"蚃",6,"蚎",4,"蚔蚖",5,"蚞",4,"蚥蚦蚫蚭蚮蚲蚳蚷蚸蚹蚻",4,"蛁蛂蛃蛅蛈蛌蛍蛒蛓蛕蛖蛗蛚蛜"], +["cd80","蛝蛠蛡蛢蛣蛥蛦蛧蛨蛪蛫蛬蛯蛵蛶蛷蛺蛻蛼蛽蛿蜁蜄蜅蜆蜋蜌蜎蜏蜐蜑蜔蜖汀廷停亭庭挺艇通桐酮瞳同铜彤童桶捅筒统痛偷投头透凸秃突图徒途涂屠土吐兔湍团推颓腿蜕褪退吞屯臀拖托脱鸵陀驮驼椭妥拓唾挖哇蛙洼娃瓦袜歪外豌弯湾玩顽丸烷完碗挽晚皖惋宛婉万腕汪王亡枉网往旺望忘妄威"], +["ce40","蜙蜛蜝蜟蜠蜤蜦蜧蜨蜪蜫蜬蜭蜯蜰蜲蜳蜵蜶蜸蜹蜺蜼蜽蝀",6,"蝊蝋蝍蝏蝐蝑蝒蝔蝕蝖蝘蝚",5,"蝡蝢蝦",7,"蝯蝱蝲蝳蝵"], +["ce80","蝷蝸蝹蝺蝿螀螁螄螆螇螉螊螌螎",4,"螔螕螖螘",6,"螠",4,"巍微危韦违桅围唯惟为潍维苇萎委伟伪尾纬未蔚味畏胃喂魏位渭谓尉慰卫瘟温蚊文闻纹吻稳紊问嗡翁瓮挝蜗涡窝我斡卧握沃巫呜钨乌污诬屋无芜梧吾吴毋武五捂午舞伍侮坞戊雾晤物勿务悟误昔熙析西硒矽晰嘻吸锡牺"], +["cf40","螥螦螧螩螪螮螰螱螲螴螶螷螸螹螻螼螾螿蟁",4,"蟇蟈蟉蟌",4,"蟔",6,"蟜蟝蟞蟟蟡蟢蟣蟤蟦蟧蟨蟩蟫蟬蟭蟯",9], +["cf80","蟺蟻蟼蟽蟿蠀蠁蠂蠄",5,"蠋",7,"蠔蠗蠘蠙蠚蠜",4,"蠣稀息希悉膝夕惜熄烯溪汐犀檄袭席习媳喜铣洗系隙戏细瞎虾匣霞辖暇峡侠狭下厦夏吓掀锨先仙鲜纤咸贤衔舷闲涎弦嫌显险现献县腺馅羡宪陷限线相厢镶香箱襄湘乡翔祥详想响享项巷橡像向象萧硝霄削哮嚣销消宵淆晓"], +["d040","蠤",13,"蠳",5,"蠺蠻蠽蠾蠿衁衂衃衆",5,"衎",5,"衕衖衘衚",6,"衦衧衪衭衯衱衳衴衵衶衸衹衺"], +["d080","衻衼袀袃袆袇袉袊袌袎袏袐袑袓袔袕袗",4,"袝",4,"袣袥",5,"小孝校肖啸笑效楔些歇蝎鞋协挟携邪斜胁谐写械卸蟹懈泄泻谢屑薪芯锌欣辛新忻心信衅星腥猩惺兴刑型形邢行醒幸杏性姓兄凶胸匈汹雄熊休修羞朽嗅锈秀袖绣墟戌需虚嘘须徐许蓄酗叙旭序畜恤絮婿绪续轩喧宣悬旋玄"], +["d140","袬袮袯袰袲",4,"袸袹袺袻袽袾袿裀裃裄裇裈裊裋裌裍裏裐裑裓裖裗裚",4,"裠裡裦裧裩",6,"裲裵裶裷裺裻製裿褀褁褃",5], +["d180","褉褋",4,"褑褔",4,"褜",4,"褢褣褤褦褧褨褩褬褭褮褯褱褲褳褵褷选癣眩绚靴薛学穴雪血勋熏循旬询寻驯巡殉汛训讯逊迅压押鸦鸭呀丫芽牙蚜崖衙涯雅哑亚讶焉咽阉烟淹盐严研蜒岩延言颜阎炎沿奄掩眼衍演艳堰燕厌砚雁唁彦焰宴谚验殃央鸯秧杨扬佯疡羊洋阳氧仰痒养样漾邀腰妖瑶"], +["d240","褸",8,"襂襃襅",24,"襠",5,"襧",19,"襼"], +["d280","襽襾覀覂覄覅覇",26,"摇尧遥窑谣姚咬舀药要耀椰噎耶爷野冶也页掖业叶曳腋夜液一壹医揖铱依伊衣颐夷遗移仪胰疑沂宜姨彝椅蚁倚已乙矣以艺抑易邑屹亿役臆逸肄疫亦裔意毅忆义益溢诣议谊译异翼翌绎茵荫因殷音阴姻吟银淫寅饮尹引隐"], +["d340","覢",30,"觃觍觓觔觕觗觘觙觛觝觟觠觡觢觤觧觨觩觪觬觭觮觰觱觲觴",6], +["d380","觻",4,"訁",5,"計",21,"印英樱婴鹰应缨莹萤营荧蝇迎赢盈影颖硬映哟拥佣臃痈庸雍踊蛹咏泳涌永恿勇用幽优悠忧尤由邮铀犹油游酉有友右佑釉诱又幼迂淤于盂榆虞愚舆余俞逾鱼愉渝渔隅予娱雨与屿禹宇语羽玉域芋郁吁遇喻峪御愈欲狱育誉"], +["d440","訞",31,"訿",8,"詉",21], +["d480","詟",25,"詺",6,"浴寓裕预豫驭鸳渊冤元垣袁原援辕园员圆猿源缘远苑愿怨院曰约越跃钥岳粤月悦阅耘云郧匀陨允运蕴酝晕韵孕匝砸杂栽哉灾宰载再在咱攒暂赞赃脏葬遭糟凿藻枣早澡蚤躁噪造皂灶燥责择则泽贼怎增憎曾赠扎喳渣札轧"], +["d540","誁",7,"誋",7,"誔",46], +["d580","諃",32,"铡闸眨栅榨咋乍炸诈摘斋宅窄债寨瞻毡詹粘沾盏斩辗崭展蘸栈占战站湛绽樟章彰漳张掌涨杖丈帐账仗胀瘴障招昭找沼赵照罩兆肇召遮折哲蛰辙者锗蔗这浙珍斟真甄砧臻贞针侦枕疹诊震振镇阵蒸挣睁征狰争怔整拯正政"], +["d640","諤",34,"謈",27], +["d680","謤謥謧",30,"帧症郑证芝枝支吱蜘知肢脂汁之织职直植殖执值侄址指止趾只旨纸志挚掷至致置帜峙制智秩稚质炙痔滞治窒中盅忠钟衷终种肿重仲众舟周州洲诌粥轴肘帚咒皱宙昼骤珠株蛛朱猪诸诛逐竹烛煮拄瞩嘱主著柱助蛀贮铸筑"], +["d740","譆",31,"譧",4,"譭",25], +["d780","讇",24,"讬讱讻诇诐诪谉谞住注祝驻抓爪拽专砖转撰赚篆桩庄装妆撞壮状椎锥追赘坠缀谆准捉拙卓桌琢茁酌啄着灼浊兹咨资姿滋淄孜紫仔籽滓子自渍字鬃棕踪宗综总纵邹走奏揍租足卒族祖诅阻组钻纂嘴醉最罪尊遵昨左佐柞做作坐座"], +["d840","谸",8,"豂豃豄豅豈豊豋豍",7,"豖豗豘豙豛",5,"豣",6,"豬",6,"豴豵豶豷豻",6,"貃貄貆貇"], +["d880","貈貋貍",6,"貕貖貗貙",20,"亍丌兀丐廿卅丕亘丞鬲孬噩丨禺丿匕乇夭爻卮氐囟胤馗毓睾鼗丶亟鼐乜乩亓芈孛啬嘏仄厍厝厣厥厮靥赝匚叵匦匮匾赜卦卣刂刈刎刭刳刿剀剌剞剡剜蒯剽劂劁劐劓冂罔亻仃仉仂仨仡仫仞伛仳伢佤仵伥伧伉伫佞佧攸佚佝"], +["d940","貮",62], +["d980","賭",32,"佟佗伲伽佶佴侑侉侃侏佾佻侪佼侬侔俦俨俪俅俚俣俜俑俟俸倩偌俳倬倏倮倭俾倜倌倥倨偾偃偕偈偎偬偻傥傧傩傺僖儆僭僬僦僮儇儋仝氽佘佥俎龠汆籴兮巽黉馘冁夔勹匍訇匐凫夙兕亠兖亳衮袤亵脔裒禀嬴蠃羸冫冱冽冼"], +["da40","贎",14,"贠赑赒赗赟赥赨赩赪赬赮赯赱赲赸",8,"趂趃趆趇趈趉趌",4,"趒趓趕",9,"趠趡"], +["da80","趢趤",12,"趲趶趷趹趻趽跀跁跂跅跇跈跉跊跍跐跒跓跔凇冖冢冥讠讦讧讪讴讵讷诂诃诋诏诎诒诓诔诖诘诙诜诟诠诤诨诩诮诰诳诶诹诼诿谀谂谄谇谌谏谑谒谔谕谖谙谛谘谝谟谠谡谥谧谪谫谮谯谲谳谵谶卩卺阝阢阡阱阪阽阼陂陉陔陟陧陬陲陴隈隍隗隰邗邛邝邙邬邡邴邳邶邺"], +["db40","跕跘跙跜跠跡跢跥跦跧跩跭跮跰跱跲跴跶跼跾",6,"踆踇踈踋踍踎踐踑踒踓踕",7,"踠踡踤",4,"踫踭踰踲踳踴踶踷踸踻踼踾"], +["db80","踿蹃蹅蹆蹌",4,"蹓",5,"蹚",11,"蹧蹨蹪蹫蹮蹱邸邰郏郅邾郐郄郇郓郦郢郜郗郛郫郯郾鄄鄢鄞鄣鄱鄯鄹酃酆刍奂劢劬劭劾哿勐勖勰叟燮矍廴凵凼鬯厶弁畚巯坌垩垡塾墼壅壑圩圬圪圳圹圮圯坜圻坂坩垅坫垆坼坻坨坭坶坳垭垤垌垲埏垧垴垓垠埕埘埚埙埒垸埴埯埸埤埝"], +["dc40","蹳蹵蹷",4,"蹽蹾躀躂躃躄躆躈",6,"躑躒躓躕",6,"躝躟",11,"躭躮躰躱躳",6,"躻",7], +["dc80","軃",10,"軏",21,"堋堍埽埭堀堞堙塄堠塥塬墁墉墚墀馨鼙懿艹艽艿芏芊芨芄芎芑芗芙芫芸芾芰苈苊苣芘芷芮苋苌苁芩芴芡芪芟苄苎芤苡茉苷苤茏茇苜苴苒苘茌苻苓茑茚茆茔茕苠苕茜荑荛荜茈莒茼茴茱莛荞茯荏荇荃荟荀茗荠茭茺茳荦荥"], +["dd40","軥",62], +["dd80","輤",32,"荨茛荩荬荪荭荮莰荸莳莴莠莪莓莜莅荼莶莩荽莸荻莘莞莨莺莼菁萁菥菘堇萘萋菝菽菖萜萸萑萆菔菟萏萃菸菹菪菅菀萦菰菡葜葑葚葙葳蒇蒈葺蒉葸萼葆葩葶蒌蒎萱葭蓁蓍蓐蓦蒽蓓蓊蒿蒺蓠蒡蒹蒴蒗蓥蓣蔌甍蔸蓰蔹蔟蔺"], +["de40","轅",32,"轪辀辌辒辝辠辡辢辤辥辦辧辪辬辭辮辯農辳辴辵辷辸辺辻込辿迀迃迆"], +["de80","迉",4,"迏迒迖迗迚迠迡迣迧迬迯迱迲迴迵迶迺迻迼迾迿逇逈逌逎逓逕逘蕖蔻蓿蓼蕙蕈蕨蕤蕞蕺瞢蕃蕲蕻薤薨薇薏蕹薮薜薅薹薷薰藓藁藜藿蘧蘅蘩蘖蘼廾弈夼奁耷奕奚奘匏尢尥尬尴扌扪抟抻拊拚拗拮挢拶挹捋捃掭揶捱捺掎掴捭掬掊捩掮掼揲揸揠揿揄揞揎摒揆掾摅摁搋搛搠搌搦搡摞撄摭撖"], +["df40","這逜連逤逥逧",5,"逰",4,"逷逹逺逽逿遀遃遅遆遈",4,"過達違遖遙遚遜",5,"遤遦遧適遪遫遬遯",4,"遶",6,"遾邁"], +["df80","還邅邆邇邉邊邌",4,"邒邔邖邘邚邜邞邟邠邤邥邧邨邩邫邭邲邷邼邽邿郀摺撷撸撙撺擀擐擗擤擢攉攥攮弋忒甙弑卟叱叽叩叨叻吒吖吆呋呒呓呔呖呃吡呗呙吣吲咂咔呷呱呤咚咛咄呶呦咝哐咭哂咴哒咧咦哓哔呲咣哕咻咿哌哙哚哜咩咪咤哝哏哞唛哧唠哽唔哳唢唣唏唑唧唪啧喏喵啉啭啁啕唿啐唼"], +["e040","郂郃郆郈郉郋郌郍郒郔郕郖郘郙郚郞郟郠郣郤郥郩郪郬郮郰郱郲郳郵郶郷郹郺郻郼郿鄀鄁鄃鄅",19,"鄚鄛鄜"], +["e080","鄝鄟鄠鄡鄤",10,"鄰鄲",6,"鄺",8,"酄唷啖啵啶啷唳唰啜喋嗒喃喱喹喈喁喟啾嗖喑啻嗟喽喾喔喙嗪嗷嗉嘟嗑嗫嗬嗔嗦嗝嗄嗯嗥嗲嗳嗌嗍嗨嗵嗤辔嘞嘈嘌嘁嘤嘣嗾嘀嘧嘭噘嘹噗嘬噍噢噙噜噌噔嚆噤噱噫噻噼嚅嚓嚯囔囗囝囡囵囫囹囿圄圊圉圜帏帙帔帑帱帻帼"], +["e140","酅酇酈酑酓酔酕酖酘酙酛酜酟酠酦酧酨酫酭酳酺酻酼醀",4,"醆醈醊醎醏醓",6,"醜",5,"醤",5,"醫醬醰醱醲醳醶醷醸醹醻"], +["e180","醼",10,"釈釋釐釒",9,"針",8,"帷幄幔幛幞幡岌屺岍岐岖岈岘岙岑岚岜岵岢岽岬岫岱岣峁岷峄峒峤峋峥崂崃崧崦崮崤崞崆崛嵘崾崴崽嵬嵛嵯嵝嵫嵋嵊嵩嵴嶂嶙嶝豳嶷巅彳彷徂徇徉後徕徙徜徨徭徵徼衢彡犭犰犴犷犸狃狁狎狍狒狨狯狩狲狴狷猁狳猃狺"], +["e240","釦",62], +["e280","鈥",32,"狻猗猓猡猊猞猝猕猢猹猥猬猸猱獐獍獗獠獬獯獾舛夥飧夤夂饣饧",5,"饴饷饽馀馄馇馊馍馐馑馓馔馕庀庑庋庖庥庠庹庵庾庳赓廒廑廛廨廪膺忄忉忖忏怃忮怄忡忤忾怅怆忪忭忸怙怵怦怛怏怍怩怫怊怿怡恸恹恻恺恂"], +["e340","鉆",45,"鉵",16], +["e380","銆",7,"銏",24,"恪恽悖悚悭悝悃悒悌悛惬悻悱惝惘惆惚悴愠愦愕愣惴愀愎愫慊慵憬憔憧憷懔懵忝隳闩闫闱闳闵闶闼闾阃阄阆阈阊阋阌阍阏阒阕阖阗阙阚丬爿戕氵汔汜汊沣沅沐沔沌汨汩汴汶沆沩泐泔沭泷泸泱泗沲泠泖泺泫泮沱泓泯泾"], +["e440","銨",5,"銯",24,"鋉",31], +["e480","鋩",32,"洹洧洌浃浈洇洄洙洎洫浍洮洵洚浏浒浔洳涑浯涞涠浞涓涔浜浠浼浣渚淇淅淞渎涿淠渑淦淝淙渖涫渌涮渫湮湎湫溲湟溆湓湔渲渥湄滟溱溘滠漭滢溥溧溽溻溷滗溴滏溏滂溟潢潆潇漤漕滹漯漶潋潴漪漉漩澉澍澌潸潲潼潺濑"], +["e540","錊",51,"錿",10], +["e580","鍊",31,"鍫濉澧澹澶濂濡濮濞濠濯瀚瀣瀛瀹瀵灏灞宀宄宕宓宥宸甯骞搴寤寮褰寰蹇謇辶迓迕迥迮迤迩迦迳迨逅逄逋逦逑逍逖逡逵逶逭逯遄遑遒遐遨遘遢遛暹遴遽邂邈邃邋彐彗彖彘尻咫屐屙孱屣屦羼弪弩弭艴弼鬻屮妁妃妍妩妪妣"], +["e640","鍬",34,"鎐",27], +["e680","鎬",29,"鏋鏌鏍妗姊妫妞妤姒妲妯姗妾娅娆姝娈姣姘姹娌娉娲娴娑娣娓婀婧婊婕娼婢婵胬媪媛婷婺媾嫫媲嫒嫔媸嫠嫣嫱嫖嫦嫘嫜嬉嬗嬖嬲嬷孀尕尜孚孥孳孑孓孢驵驷驸驺驿驽骀骁骅骈骊骐骒骓骖骘骛骜骝骟骠骢骣骥骧纟纡纣纥纨纩"], +["e740","鏎",7,"鏗",54], +["e780","鐎",32,"纭纰纾绀绁绂绉绋绌绐绔绗绛绠绡绨绫绮绯绱绲缍绶绺绻绾缁缂缃缇缈缋缌缏缑缒缗缙缜缛缟缡",6,"缪缫缬缭缯",4,"缵幺畿巛甾邕玎玑玮玢玟珏珂珑玷玳珀珉珈珥珙顼琊珩珧珞玺珲琏琪瑛琦琥琨琰琮琬"], +["e840","鐯",14,"鐿",43,"鑬鑭鑮鑯"], +["e880","鑰",20,"钑钖钘铇铏铓铔铚铦铻锜锠琛琚瑁瑜瑗瑕瑙瑷瑭瑾璜璎璀璁璇璋璞璨璩璐璧瓒璺韪韫韬杌杓杞杈杩枥枇杪杳枘枧杵枨枞枭枋杷杼柰栉柘栊柩枰栌柙枵柚枳柝栀柃枸柢栎柁柽栲栳桠桡桎桢桄桤梃栝桕桦桁桧桀栾桊桉栩梵梏桴桷梓桫棂楮棼椟椠棹"], +["e940","锧锳锽镃镈镋镕镚镠镮镴镵長",7,"門",42], +["e980","閫",32,"椤棰椋椁楗棣椐楱椹楠楂楝榄楫榀榘楸椴槌榇榈槎榉楦楣楹榛榧榻榫榭槔榱槁槊槟榕槠榍槿樯槭樗樘橥槲橄樾檠橐橛樵檎橹樽樨橘橼檑檐檩檗檫猷獒殁殂殇殄殒殓殍殚殛殡殪轫轭轱轲轳轵轶轸轷轹轺轼轾辁辂辄辇辋"], +["ea40","闌",27,"闬闿阇阓阘阛阞阠阣",6,"阫阬阭阯阰阷阸阹阺阾陁陃陊陎陏陑陒陓陖陗"], +["ea80","陘陙陚陜陝陞陠陣陥陦陫陭",4,"陳陸",12,"隇隉隊辍辎辏辘辚軎戋戗戛戟戢戡戥戤戬臧瓯瓴瓿甏甑甓攴旮旯旰昊昙杲昃昕昀炅曷昝昴昱昶昵耆晟晔晁晏晖晡晗晷暄暌暧暝暾曛曜曦曩贲贳贶贻贽赀赅赆赈赉赇赍赕赙觇觊觋觌觎觏觐觑牮犟牝牦牯牾牿犄犋犍犏犒挈挲掰"], +["eb40","隌階隑隒隓隕隖隚際隝",9,"隨",7,"隱隲隴隵隷隸隺隻隿雂雃雈雊雋雐雑雓雔雖",9,"雡",6,"雫"], +["eb80","雬雭雮雰雱雲雴雵雸雺電雼雽雿霂霃霅霊霋霌霐霑霒霔霕霗",4,"霝霟霠搿擘耄毪毳毽毵毹氅氇氆氍氕氘氙氚氡氩氤氪氲攵敕敫牍牒牖爰虢刖肟肜肓肼朊肽肱肫肭肴肷胧胨胩胪胛胂胄胙胍胗朐胝胫胱胴胭脍脎胲胼朕脒豚脶脞脬脘脲腈腌腓腴腙腚腱腠腩腼腽腭腧塍媵膈膂膑滕膣膪臌朦臊膻"], +["ec40","霡",8,"霫霬霮霯霱霳",4,"霺霻霼霽霿",18,"靔靕靗靘靚靜靝靟靣靤靦靧靨靪",7], +["ec80","靲靵靷",4,"靽",7,"鞆",4,"鞌鞎鞏鞐鞓鞕鞖鞗鞙",4,"臁膦欤欷欹歃歆歙飑飒飓飕飙飚殳彀毂觳斐齑斓於旆旄旃旌旎旒旖炀炜炖炝炻烀炷炫炱烨烊焐焓焖焯焱煳煜煨煅煲煊煸煺熘熳熵熨熠燠燔燧燹爝爨灬焘煦熹戾戽扃扈扉礻祀祆祉祛祜祓祚祢祗祠祯祧祺禅禊禚禧禳忑忐"], +["ed40","鞞鞟鞡鞢鞤",6,"鞬鞮鞰鞱鞳鞵",46], +["ed80","韤韥韨韮",4,"韴韷",23,"怼恝恚恧恁恙恣悫愆愍慝憩憝懋懑戆肀聿沓泶淼矶矸砀砉砗砘砑斫砭砜砝砹砺砻砟砼砥砬砣砩硎硭硖硗砦硐硇硌硪碛碓碚碇碜碡碣碲碹碥磔磙磉磬磲礅磴礓礤礞礴龛黹黻黼盱眄眍盹眇眈眚眢眙眭眦眵眸睐睑睇睃睚睨"], +["ee40","頏",62], +["ee80","顎",32,"睢睥睿瞍睽瞀瞌瞑瞟瞠瞰瞵瞽町畀畎畋畈畛畲畹疃罘罡罟詈罨罴罱罹羁罾盍盥蠲钅钆钇钋钊钌钍钏钐钔钗钕钚钛钜钣钤钫钪钭钬钯钰钲钴钶",4,"钼钽钿铄铈",6,"铐铑铒铕铖铗铙铘铛铞铟铠铢铤铥铧铨铪"], +["ef40","顯",5,"颋颎颒颕颙颣風",37,"飏飐飔飖飗飛飜飝飠",4], +["ef80","飥飦飩",30,"铩铫铮铯铳铴铵铷铹铼铽铿锃锂锆锇锉锊锍锎锏锒",4,"锘锛锝锞锟锢锪锫锩锬锱锲锴锶锷锸锼锾锿镂锵镄镅镆镉镌镎镏镒镓镔镖镗镘镙镛镞镟镝镡镢镤",8,"镯镱镲镳锺矧矬雉秕秭秣秫稆嵇稃稂稞稔"], +["f040","餈",4,"餎餏餑",28,"餯",26], +["f080","饊",9,"饖",12,"饤饦饳饸饹饻饾馂馃馉稹稷穑黏馥穰皈皎皓皙皤瓞瓠甬鸠鸢鸨",4,"鸲鸱鸶鸸鸷鸹鸺鸾鹁鹂鹄鹆鹇鹈鹉鹋鹌鹎鹑鹕鹗鹚鹛鹜鹞鹣鹦",6,"鹱鹭鹳疒疔疖疠疝疬疣疳疴疸痄疱疰痃痂痖痍痣痨痦痤痫痧瘃痱痼痿瘐瘀瘅瘌瘗瘊瘥瘘瘕瘙"], +["f140","馌馎馚",10,"馦馧馩",47], +["f180","駙",32,"瘛瘼瘢瘠癀瘭瘰瘿瘵癃瘾瘳癍癞癔癜癖癫癯翊竦穸穹窀窆窈窕窦窠窬窨窭窳衤衩衲衽衿袂袢裆袷袼裉裢裎裣裥裱褚裼裨裾裰褡褙褓褛褊褴褫褶襁襦襻疋胥皲皴矜耒耔耖耜耠耢耥耦耧耩耨耱耋耵聃聆聍聒聩聱覃顸颀颃"], +["f240","駺",62], +["f280","騹",32,"颉颌颍颏颔颚颛颞颟颡颢颥颦虍虔虬虮虿虺虼虻蚨蚍蚋蚬蚝蚧蚣蚪蚓蚩蚶蛄蚵蛎蚰蚺蚱蚯蛉蛏蚴蛩蛱蛲蛭蛳蛐蜓蛞蛴蛟蛘蛑蜃蜇蛸蜈蜊蜍蜉蜣蜻蜞蜥蜮蜚蜾蝈蜴蜱蜩蜷蜿螂蜢蝽蝾蝻蝠蝰蝌蝮螋蝓蝣蝼蝤蝙蝥螓螯螨蟒"], +["f340","驚",17,"驲骃骉骍骎骔骕骙骦骩",6,"骲骳骴骵骹骻骽骾骿髃髄髆",4,"髍髎髏髐髒體髕髖髗髙髚髛髜"], +["f380","髝髞髠髢髣髤髥髧髨髩髪髬髮髰",8,"髺髼",6,"鬄鬅鬆蟆螈螅螭螗螃螫蟥螬螵螳蟋蟓螽蟑蟀蟊蟛蟪蟠蟮蠖蠓蟾蠊蠛蠡蠹蠼缶罂罄罅舐竺竽笈笃笄笕笊笫笏筇笸笪笙笮笱笠笥笤笳笾笞筘筚筅筵筌筝筠筮筻筢筲筱箐箦箧箸箬箝箨箅箪箜箢箫箴篑篁篌篝篚篥篦篪簌篾篼簏簖簋"], +["f440","鬇鬉",5,"鬐鬑鬒鬔",10,"鬠鬡鬢鬤",10,"鬰鬱鬳",7,"鬽鬾鬿魀魆魊魋魌魎魐魒魓魕",5], +["f480","魛",32,"簟簪簦簸籁籀臾舁舂舄臬衄舡舢舣舭舯舨舫舸舻舳舴舾艄艉艋艏艚艟艨衾袅袈裘裟襞羝羟羧羯羰羲籼敉粑粝粜粞粢粲粼粽糁糇糌糍糈糅糗糨艮暨羿翎翕翥翡翦翩翮翳糸絷綦綮繇纛麸麴赳趄趔趑趱赧赭豇豉酊酐酎酏酤"], +["f540","魼",62], +["f580","鮻",32,"酢酡酰酩酯酽酾酲酴酹醌醅醐醍醑醢醣醪醭醮醯醵醴醺豕鹾趸跫踅蹙蹩趵趿趼趺跄跖跗跚跞跎跏跛跆跬跷跸跣跹跻跤踉跽踔踝踟踬踮踣踯踺蹀踹踵踽踱蹉蹁蹂蹑蹒蹊蹰蹶蹼蹯蹴躅躏躔躐躜躞豸貂貊貅貘貔斛觖觞觚觜"], +["f640","鯜",62], +["f680","鰛",32,"觥觫觯訾謦靓雩雳雯霆霁霈霏霎霪霭霰霾龀龃龅",5,"龌黾鼋鼍隹隼隽雎雒瞿雠銎銮鋈錾鍪鏊鎏鐾鑫鱿鲂鲅鲆鲇鲈稣鲋鲎鲐鲑鲒鲔鲕鲚鲛鲞",5,"鲥",4,"鲫鲭鲮鲰",7,"鲺鲻鲼鲽鳄鳅鳆鳇鳊鳋"], +["f740","鰼",62], +["f780","鱻鱽鱾鲀鲃鲄鲉鲊鲌鲏鲓鲖鲗鲘鲙鲝鲪鲬鲯鲹鲾",4,"鳈鳉鳑鳒鳚鳛鳠鳡鳌",4,"鳓鳔鳕鳗鳘鳙鳜鳝鳟鳢靼鞅鞑鞒鞔鞯鞫鞣鞲鞴骱骰骷鹘骶骺骼髁髀髅髂髋髌髑魅魃魇魉魈魍魑飨餍餮饕饔髟髡髦髯髫髻髭髹鬈鬏鬓鬟鬣麽麾縻麂麇麈麋麒鏖麝麟黛黜黝黠黟黢黩黧黥黪黯鼢鼬鼯鼹鼷鼽鼾齄"], +["f840","鳣",62], +["f880","鴢",32], +["f940","鵃",62], +["f980","鶂",32], +["fa40","鶣",62], +["fa80","鷢",32], +["fb40","鸃",27,"鸤鸧鸮鸰鸴鸻鸼鹀鹍鹐鹒鹓鹔鹖鹙鹝鹟鹠鹡鹢鹥鹮鹯鹲鹴",9,"麀"], +["fb80","麁麃麄麅麆麉麊麌",5,"麔",8,"麞麠",5,"麧麨麩麪"], +["fc40","麫",8,"麵麶麷麹麺麼麿",4,"黅黆黇黈黊黋黌黐黒黓黕黖黗黙黚點黡黣黤黦黨黫黬黭黮黰",8,"黺黽黿",6], +["fc80","鼆",4,"鼌鼏鼑鼒鼔鼕鼖鼘鼚",5,"鼡鼣",8,"鼭鼮鼰鼱"], +["fd40","鼲",4,"鼸鼺鼼鼿",4,"齅",10,"齒",38], +["fd80","齹",5,"龁龂龍",11,"龜龝龞龡",4,"郎凉秊裏隣"], +["fe40","兀嗀﨎﨏﨑﨓﨔礼﨟蘒﨡﨣﨤﨧﨨﨩"] +] diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/tables/cp949.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/tables/cp949.json new file mode 100644 index 0000000000000000000000000000000000000000..2022a007ff7ac97ce51167903d116eec42bffd9a --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/tables/cp949.json @@ -0,0 +1,273 @@ +[ +["0","\u0000",127], +["8141","갂갃갅갆갋",4,"갘갞갟갡갢갣갥",6,"갮갲갳갴"], +["8161","갵갶갷갺갻갽갾갿걁",9,"걌걎",5,"걕"], +["8181","걖걗걙걚걛걝",18,"걲걳걵걶걹걻",4,"겂겇겈겍겎겏겑겒겓겕",6,"겞겢",5,"겫겭겮겱",6,"겺겾겿곀곂곃곅곆곇곉곊곋곍",7,"곖곘",7,"곢곣곥곦곩곫곭곮곲곴곷",4,"곾곿괁괂괃괅괇",4,"괎괐괒괓"], +["8241","괔괕괖괗괙괚괛괝괞괟괡",7,"괪괫괮",5], +["8261","괶괷괹괺괻괽",6,"굆굈굊",5,"굑굒굓굕굖굗"], +["8281","굙",7,"굢굤",7,"굮굯굱굲굷굸굹굺굾궀궃",4,"궊궋궍궎궏궑",10,"궞",5,"궥",17,"궸",7,"귂귃귅귆귇귉",6,"귒귔",7,"귝귞귟귡귢귣귥",18], +["8341","귺귻귽귾긂",5,"긊긌긎",5,"긕",7], +["8361","긝",18,"긲긳긵긶긹긻긼"], +["8381","긽긾긿깂깄깇깈깉깋깏깑깒깓깕깗",4,"깞깢깣깤깦깧깪깫깭깮깯깱",6,"깺깾",5,"꺆",5,"꺍",46,"꺿껁껂껃껅",6,"껎껒",5,"껚껛껝",8], +["8441","껦껧껩껪껬껮",5,"껵껶껷껹껺껻껽",8], +["8461","꼆꼉꼊꼋꼌꼎꼏꼑",18], +["8481","꼤",7,"꼮꼯꼱꼳꼵",6,"꼾꽀꽄꽅꽆꽇꽊",5,"꽑",10,"꽞",5,"꽦",18,"꽺",5,"꾁꾂꾃꾅꾆꾇꾉",6,"꾒꾓꾔꾖",5,"꾝",26,"꾺꾻꾽꾾"], +["8541","꾿꿁",5,"꿊꿌꿏",4,"꿕",6,"꿝",4], +["8561","꿢",5,"꿪",5,"꿲꿳꿵꿶꿷꿹",6,"뀂뀃"], +["8581","뀅",6,"뀍뀎뀏뀑뀒뀓뀕",6,"뀞",9,"뀩",26,"끆끇끉끋끍끏끐끑끒끖끘끚끛끜끞",29,"끾끿낁낂낃낅",6,"낎낐낒",5,"낛낝낞낣낤"], +["8641","낥낦낧낪낰낲낶낷낹낺낻낽",6,"냆냊",5,"냒"], +["8661","냓냕냖냗냙",6,"냡냢냣냤냦",10], +["8681","냱",22,"넊넍넎넏넑넔넕넖넗넚넞",4,"넦넧넩넪넫넭",6,"넶넺",5,"녂녃녅녆녇녉",6,"녒녓녖녗녙녚녛녝녞녟녡",22,"녺녻녽녾녿놁놃",4,"놊놌놎놏놐놑놕놖놗놙놚놛놝"], +["8741","놞",9,"놩",15], +["8761","놹",18,"뇍뇎뇏뇑뇒뇓뇕"], +["8781","뇖",5,"뇞뇠",7,"뇪뇫뇭뇮뇯뇱",7,"뇺뇼뇾",5,"눆눇눉눊눍",6,"눖눘눚",5,"눡",18,"눵",6,"눽",26,"뉙뉚뉛뉝뉞뉟뉡",6,"뉪",4], +["8841","뉯",4,"뉶",5,"뉽",6,"늆늇늈늊",4], +["8861","늏늒늓늕늖늗늛",4,"늢늤늧늨늩늫늭늮늯늱늲늳늵늶늷"], +["8881","늸",15,"닊닋닍닎닏닑닓",4,"닚닜닞닟닠닡닣닧닩닪닰닱닲닶닼닽닾댂댃댅댆댇댉",6,"댒댖",5,"댝",54,"덗덙덚덝덠덡덢덣"], +["8941","덦덨덪덬덭덯덲덳덵덶덷덹",6,"뎂뎆",5,"뎍"], +["8961","뎎뎏뎑뎒뎓뎕",10,"뎢",5,"뎩뎪뎫뎭"], +["8981","뎮",21,"돆돇돉돊돍돏돑돒돓돖돘돚돜돞돟돡돢돣돥돦돧돩",18,"돽",18,"됑",6,"됙됚됛됝됞됟됡",6,"됪됬",7,"됵",15], +["8a41","둅",10,"둒둓둕둖둗둙",6,"둢둤둦"], +["8a61","둧",4,"둭",18,"뒁뒂"], +["8a81","뒃",4,"뒉",19,"뒞",5,"뒥뒦뒧뒩뒪뒫뒭",7,"뒶뒸뒺",5,"듁듂듃듅듆듇듉",6,"듑듒듓듔듖",5,"듞듟듡듢듥듧",4,"듮듰듲",5,"듹",26,"딖딗딙딚딝"], +["8b41","딞",5,"딦딫",4,"딲딳딵딶딷딹",6,"땂땆"], +["8b61","땇땈땉땊땎땏땑땒땓땕",6,"땞땢",8], +["8b81","땫",52,"떢떣떥떦떧떩떬떭떮떯떲떶",4,"떾떿뗁뗂뗃뗅",6,"뗎뗒",5,"뗙",18,"뗭",18], +["8c41","똀",15,"똒똓똕똖똗똙",4], +["8c61","똞",6,"똦",5,"똭",6,"똵",5], +["8c81","똻",12,"뙉",26,"뙥뙦뙧뙩",50,"뚞뚟뚡뚢뚣뚥",5,"뚭뚮뚯뚰뚲",16], +["8d41","뛃",16,"뛕",8], +["8d61","뛞",17,"뛱뛲뛳뛵뛶뛷뛹뛺"], +["8d81","뛻",4,"뜂뜃뜄뜆",33,"뜪뜫뜭뜮뜱",6,"뜺뜼",7,"띅띆띇띉띊띋띍",6,"띖",9,"띡띢띣띥띦띧띩",6,"띲띴띶",5,"띾띿랁랂랃랅",6,"랎랓랔랕랚랛랝랞"], +["8e41","랟랡",6,"랪랮",5,"랶랷랹",8], +["8e61","럂",4,"럈럊",19], +["8e81","럞",13,"럮럯럱럲럳럵",6,"럾렂",4,"렊렋렍렎렏렑",6,"렚렜렞",5,"렦렧렩렪렫렭",6,"렶렺",5,"롁롂롃롅",11,"롒롔",7,"롞롟롡롢롣롥",6,"롮롰롲",5,"롹롺롻롽",7], +["8f41","뢅",7,"뢎",17], +["8f61","뢠",7,"뢩",6,"뢱뢲뢳뢵뢶뢷뢹",4], +["8f81","뢾뢿룂룄룆",5,"룍룎룏룑룒룓룕",7,"룞룠룢",5,"룪룫룭룮룯룱",6,"룺룼룾",5,"뤅",18,"뤙",6,"뤡",26,"뤾뤿륁륂륃륅",6,"륍륎륐륒",5], +["9041","륚륛륝륞륟륡",6,"륪륬륮",5,"륶륷륹륺륻륽"], +["9061","륾",5,"릆릈릋릌릏",15], +["9081","릟",12,"릮릯릱릲릳릵",6,"릾맀맂",5,"맊맋맍맓",4,"맚맜맟맠맢맦맧맩맪맫맭",6,"맶맻",4,"먂",5,"먉",11,"먖",33,"먺먻먽먾먿멁멃멄멅멆"], +["9141","멇멊멌멏멐멑멒멖멗멙멚멛멝",6,"멦멪",5], +["9161","멲멳멵멶멷멹",9,"몆몈몉몊몋몍",5], +["9181","몓",20,"몪몭몮몯몱몳",4,"몺몼몾",5,"뫅뫆뫇뫉",14,"뫚",33,"뫽뫾뫿묁묂묃묅",7,"묎묐묒",5,"묙묚묛묝묞묟묡",6], +["9241","묨묪묬",7,"묷묹묺묿",4,"뭆뭈뭊뭋뭌뭎뭑뭒"], +["9261","뭓뭕뭖뭗뭙",7,"뭢뭤",7,"뭭",4], +["9281","뭲",21,"뮉뮊뮋뮍뮎뮏뮑",18,"뮥뮦뮧뮩뮪뮫뮭",6,"뮵뮶뮸",7,"믁믂믃믅믆믇믉",6,"믑믒믔",35,"믺믻믽믾밁"], +["9341","밃",4,"밊밎밐밒밓밙밚밠밡밢밣밦밨밪밫밬밮밯밲밳밵"], +["9361","밶밷밹",6,"뱂뱆뱇뱈뱊뱋뱎뱏뱑",8], +["9381","뱚뱛뱜뱞",37,"벆벇벉벊벍벏",4,"벖벘벛",4,"벢벣벥벦벩",6,"벲벶",5,"벾벿볁볂볃볅",7,"볎볒볓볔볖볗볙볚볛볝",22,"볷볹볺볻볽"], +["9441","볾",5,"봆봈봊",5,"봑봒봓봕",8], +["9461","봞",5,"봥",6,"봭",12], +["9481","봺",5,"뵁",6,"뵊뵋뵍뵎뵏뵑",6,"뵚",9,"뵥뵦뵧뵩",22,"붂붃붅붆붋",4,"붒붔붖붗붘붛붝",6,"붥",10,"붱",6,"붹",24], +["9541","뷒뷓뷖뷗뷙뷚뷛뷝",11,"뷪",5,"뷱"], +["9561","뷲뷳뷵뷶뷷뷹",6,"븁븂븄븆",5,"븎븏븑븒븓"], +["9581","븕",6,"븞븠",35,"빆빇빉빊빋빍빏",4,"빖빘빜빝빞빟빢빣빥빦빧빩빫",4,"빲빶",4,"빾빿뺁뺂뺃뺅",6,"뺎뺒",5,"뺚",13,"뺩",14], +["9641","뺸",23,"뻒뻓"], +["9661","뻕뻖뻙",6,"뻡뻢뻦",5,"뻭",8], +["9681","뻶",10,"뼂",5,"뼊",13,"뼚뼞",33,"뽂뽃뽅뽆뽇뽉",6,"뽒뽓뽔뽖",44], +["9741","뾃",16,"뾕",8], +["9761","뾞",17,"뾱",7], +["9781","뾹",11,"뿆",5,"뿎뿏뿑뿒뿓뿕",6,"뿝뿞뿠뿢",89,"쀽쀾쀿"], +["9841","쁀",16,"쁒",5,"쁙쁚쁛"], +["9861","쁝쁞쁟쁡",6,"쁪",15], +["9881","쁺",21,"삒삓삕삖삗삙",6,"삢삤삦",5,"삮삱삲삷",4,"삾샂샃샄샆샇샊샋샍샎샏샑",6,"샚샞",5,"샦샧샩샪샫샭",6,"샶샸샺",5,"섁섂섃섅섆섇섉",6,"섑섒섓섔섖",5,"섡섢섥섨섩섪섫섮"], +["9941","섲섳섴섵섷섺섻섽섾섿셁",6,"셊셎",5,"셖셗"], +["9961","셙셚셛셝",6,"셦셪",5,"셱셲셳셵셶셷셹셺셻"], +["9981","셼",8,"솆",5,"솏솑솒솓솕솗",4,"솞솠솢솣솤솦솧솪솫솭솮솯솱",11,"솾",5,"쇅쇆쇇쇉쇊쇋쇍",6,"쇕쇖쇙",6,"쇡쇢쇣쇥쇦쇧쇩",6,"쇲쇴",7,"쇾쇿숁숂숃숅",6,"숎숐숒",5,"숚숛숝숞숡숢숣"], +["9a41","숤숥숦숧숪숬숮숰숳숵",16], +["9a61","쉆쉇쉉",6,"쉒쉓쉕쉖쉗쉙",6,"쉡쉢쉣쉤쉦"], +["9a81","쉧",4,"쉮쉯쉱쉲쉳쉵",6,"쉾슀슂",5,"슊",5,"슑",6,"슙슚슜슞",5,"슦슧슩슪슫슮",5,"슶슸슺",33,"싞싟싡싢싥",5,"싮싰싲싳싴싵싷싺싽싾싿쌁",6,"쌊쌋쌎쌏"], +["9b41","쌐쌑쌒쌖쌗쌙쌚쌛쌝",6,"쌦쌧쌪",8], +["9b61","쌳",17,"썆",7], +["9b81","썎",25,"썪썫썭썮썯썱썳",4,"썺썻썾",5,"쎅쎆쎇쎉쎊쎋쎍",50,"쏁",22,"쏚"], +["9c41","쏛쏝쏞쏡쏣",4,"쏪쏫쏬쏮",5,"쏶쏷쏹",5], +["9c61","쏿",8,"쐉",6,"쐑",9], +["9c81","쐛",8,"쐥",6,"쐭쐮쐯쐱쐲쐳쐵",6,"쐾",9,"쑉",26,"쑦쑧쑩쑪쑫쑭",6,"쑶쑷쑸쑺",5,"쒁",18,"쒕",6,"쒝",12], +["9d41","쒪",13,"쒹쒺쒻쒽",8], +["9d61","쓆",25], +["9d81","쓠",8,"쓪",5,"쓲쓳쓵쓶쓷쓹쓻쓼쓽쓾씂",9,"씍씎씏씑씒씓씕",6,"씝",10,"씪씫씭씮씯씱",6,"씺씼씾",5,"앆앇앋앏앐앑앒앖앚앛앜앟앢앣앥앦앧앩",6,"앲앶",5,"앾앿얁얂얃얅얆얈얉얊얋얎얐얒얓얔"], +["9e41","얖얙얚얛얝얞얟얡",7,"얪",9,"얶"], +["9e61","얷얺얿",4,"엋엍엏엒엓엕엖엗엙",6,"엢엤엦엧"], +["9e81","엨엩엪엫엯엱엲엳엵엸엹엺엻옂옃옄옉옊옋옍옎옏옑",6,"옚옝",6,"옦옧옩옪옫옯옱옲옶옸옺옼옽옾옿왂왃왅왆왇왉",6,"왒왖",5,"왞왟왡",10,"왭왮왰왲",5,"왺왻왽왾왿욁",6,"욊욌욎",5,"욖욗욙욚욛욝",6,"욦"], +["9f41","욨욪",5,"욲욳욵욶욷욻",4,"웂웄웆",5,"웎"], +["9f61","웏웑웒웓웕",6,"웞웟웢",5,"웪웫웭웮웯웱웲"], +["9f81","웳",4,"웺웻웼웾",5,"윆윇윉윊윋윍",6,"윖윘윚",5,"윢윣윥윦윧윩",6,"윲윴윶윸윹윺윻윾윿읁읂읃읅",4,"읋읎읐읙읚읛읝읞읟읡",6,"읩읪읬",7,"읶읷읹읺읻읿잀잁잂잆잋잌잍잏잒잓잕잙잛",4,"잢잧",4,"잮잯잱잲잳잵잶잷"], +["a041","잸잹잺잻잾쟂",5,"쟊쟋쟍쟏쟑",6,"쟙쟚쟛쟜"], +["a061","쟞",5,"쟥쟦쟧쟩쟪쟫쟭",13], +["a081","쟻",4,"젂젃젅젆젇젉젋",4,"젒젔젗",4,"젞젟젡젢젣젥",6,"젮젰젲",5,"젹젺젻젽젾젿졁",6,"졊졋졎",5,"졕",26,"졲졳졵졶졷졹졻",4,"좂좄좈좉좊좎",5,"좕",7,"좞좠좢좣좤"], +["a141","좥좦좧좩",18,"좾좿죀죁"], +["a161","죂죃죅죆죇죉죊죋죍",6,"죖죘죚",5,"죢죣죥"], +["a181","죦",14,"죶",5,"죾죿줁줂줃줇",4,"줎 、。·‥…¨〃­―∥\∼‘’“”〔〕〈",9,"±×÷≠≤≥∞∴°′″℃Å¢£¥♂♀∠⊥⌒∂∇≡≒§※☆★○●◎◇◆□■△▲▽▼→←↑↓↔〓≪≫√∽∝∵∫∬∈∋⊆⊇⊂⊃∪∩∧∨¬"], +["a241","줐줒",5,"줙",18], +["a261","줭",6,"줵",18], +["a281","쥈",7,"쥒쥓쥕쥖쥗쥙",6,"쥢쥤",7,"쥭쥮쥯⇒⇔∀∃´~ˇ˘˝˚˙¸˛¡¿ː∮∑∏¤℉‰◁◀▷▶♤♠♡♥♧♣⊙◈▣◐◑▒▤▥▨▧▦▩♨☏☎☜☞¶†‡↕↗↙↖↘♭♩♪♬㉿㈜№㏇™㏂㏘℡€®"], +["a341","쥱쥲쥳쥵",6,"쥽",10,"즊즋즍즎즏"], +["a361","즑",6,"즚즜즞",16], +["a381","즯",16,"짂짃짅짆짉짋",4,"짒짔짗짘짛!",58,"₩]",32," ̄"], +["a441","짞짟짡짣짥짦짨짩짪짫짮짲",5,"짺짻짽짾짿쨁쨂쨃쨄"], +["a461","쨅쨆쨇쨊쨎",5,"쨕쨖쨗쨙",12], +["a481","쨦쨧쨨쨪",28,"ㄱ",93], +["a541","쩇",4,"쩎쩏쩑쩒쩓쩕",6,"쩞쩢",5,"쩩쩪"], +["a561","쩫",17,"쩾",5,"쪅쪆"], +["a581","쪇",16,"쪙",14,"ⅰ",9], +["a5b0","Ⅰ",9], +["a5c1","Α",16,"Σ",6], +["a5e1","α",16,"σ",6], +["a641","쪨",19,"쪾쪿쫁쫂쫃쫅"], +["a661","쫆",5,"쫎쫐쫒쫔쫕쫖쫗쫚",5,"쫡",6], +["a681","쫨쫩쫪쫫쫭",6,"쫵",18,"쬉쬊─│┌┐┘└├┬┤┴┼━┃┏┓┛┗┣┳┫┻╋┠┯┨┷┿┝┰┥┸╂┒┑┚┙┖┕┎┍┞┟┡┢┦┧┩┪┭┮┱┲┵┶┹┺┽┾╀╁╃",7], +["a741","쬋",4,"쬑쬒쬓쬕쬖쬗쬙",6,"쬢",7], +["a761","쬪",22,"쭂쭃쭄"], +["a781","쭅쭆쭇쭊쭋쭍쭎쭏쭑",6,"쭚쭛쭜쭞",5,"쭥",7,"㎕㎖㎗ℓ㎘㏄㎣㎤㎥㎦㎙",9,"㏊㎍㎎㎏㏏㎈㎉㏈㎧㎨㎰",9,"㎀",4,"㎺",5,"㎐",4,"Ω㏀㏁㎊㎋㎌㏖㏅㎭㎮㎯㏛㎩㎪㎫㎬㏝㏐㏓㏃㏉㏜㏆"], +["a841","쭭",10,"쭺",14], +["a861","쮉",18,"쮝",6], +["a881","쮤",19,"쮹",11,"ÆÐªĦ"], +["a8a6","IJ"], +["a8a8","ĿŁØŒºÞŦŊ"], +["a8b1","㉠",27,"ⓐ",25,"①",14,"½⅓⅔¼¾⅛⅜⅝⅞"], +["a941","쯅",14,"쯕",10], +["a961","쯠쯡쯢쯣쯥쯦쯨쯪",18], +["a981","쯽",14,"찎찏찑찒찓찕",6,"찞찟찠찣찤æđðħıijĸŀłøœßþŧŋʼn㈀",27,"⒜",25,"⑴",14,"¹²³⁴ⁿ₁₂₃₄"], +["aa41","찥찦찪찫찭찯찱",6,"찺찿",4,"챆챇챉챊챋챍챎"], +["aa61","챏",4,"챖챚",5,"챡챢챣챥챧챩",6,"챱챲"], +["aa81","챳챴챶",29,"ぁ",82], +["ab41","첔첕첖첗첚첛첝첞첟첡",6,"첪첮",5,"첶첷첹"], +["ab61","첺첻첽",6,"쳆쳈쳊",5,"쳑쳒쳓쳕",5], +["ab81","쳛",8,"쳥",6,"쳭쳮쳯쳱",12,"ァ",85], +["ac41","쳾쳿촀촂",5,"촊촋촍촎촏촑",6,"촚촜촞촟촠"], +["ac61","촡촢촣촥촦촧촩촪촫촭",11,"촺",4], +["ac81","촿",28,"쵝쵞쵟А",5,"ЁЖ",25], +["acd1","а",5,"ёж",25], +["ad41","쵡쵢쵣쵥",6,"쵮쵰쵲",5,"쵹",7], +["ad61","춁",6,"춉",10,"춖춗춙춚춛춝춞춟"], +["ad81","춠춡춢춣춦춨춪",5,"춱",18,"췅"], +["ae41","췆",5,"췍췎췏췑",16], +["ae61","췢",5,"췩췪췫췭췮췯췱",6,"췺췼췾",4], +["ae81","츃츅츆츇츉츊츋츍",6,"츕츖츗츘츚",5,"츢츣츥츦츧츩츪츫"], +["af41","츬츭츮츯츲츴츶",19], +["af61","칊",13,"칚칛칝칞칢",5,"칪칬"], +["af81","칮",5,"칶칷칹칺칻칽",6,"캆캈캊",5,"캒캓캕캖캗캙"], +["b041","캚",5,"캢캦",5,"캮",12], +["b061","캻",5,"컂",19], +["b081","컖",13,"컦컧컩컪컭",6,"컶컺",5,"가각간갇갈갉갊감",7,"같",4,"갠갤갬갭갯갰갱갸갹갼걀걋걍걔걘걜거걱건걷걸걺검겁것겄겅겆겉겊겋게겐겔겜겝겟겠겡겨격겪견겯결겸겹겻겼경곁계곈곌곕곗고곡곤곧골곪곬곯곰곱곳공곶과곽관괄괆"], +["b141","켂켃켅켆켇켉",6,"켒켔켖",5,"켝켞켟켡켢켣"], +["b161","켥",6,"켮켲",5,"켹",11], +["b181","콅",14,"콖콗콙콚콛콝",6,"콦콨콪콫콬괌괍괏광괘괜괠괩괬괭괴괵괸괼굄굅굇굉교굔굘굡굣구국군굳굴굵굶굻굼굽굿궁궂궈궉권궐궜궝궤궷귀귁귄귈귐귑귓규균귤그극근귿글긁금급긋긍긔기긱긴긷길긺김깁깃깅깆깊까깍깎깐깔깖깜깝깟깠깡깥깨깩깬깰깸"], +["b241","콭콮콯콲콳콵콶콷콹",6,"쾁쾂쾃쾄쾆",5,"쾍"], +["b261","쾎",18,"쾢",5,"쾩"], +["b281","쾪",5,"쾱",18,"쿅",6,"깹깻깼깽꺄꺅꺌꺼꺽꺾껀껄껌껍껏껐껑께껙껜껨껫껭껴껸껼꼇꼈꼍꼐꼬꼭꼰꼲꼴꼼꼽꼿꽁꽂꽃꽈꽉꽐꽜꽝꽤꽥꽹꾀꾄꾈꾐꾑꾕꾜꾸꾹꾼꿀꿇꿈꿉꿋꿍꿎꿔꿜꿨꿩꿰꿱꿴꿸뀀뀁뀄뀌뀐뀔뀜뀝뀨끄끅끈끊끌끎끓끔끕끗끙"], +["b341","쿌",19,"쿢쿣쿥쿦쿧쿩"], +["b361","쿪",5,"쿲쿴쿶",5,"쿽쿾쿿퀁퀂퀃퀅",5], +["b381","퀋",5,"퀒",5,"퀙",19,"끝끼끽낀낄낌낍낏낑나낙낚난낟날낡낢남납낫",4,"낱낳내낵낸낼냄냅냇냈냉냐냑냔냘냠냥너넉넋넌널넒넓넘넙넛넜넝넣네넥넨넬넴넵넷넸넹녀녁년녈념녑녔녕녘녜녠노녹논놀놂놈놉놋농높놓놔놘놜놨뇌뇐뇔뇜뇝"], +["b441","퀮",5,"퀶퀷퀹퀺퀻퀽",6,"큆큈큊",5], +["b461","큑큒큓큕큖큗큙",6,"큡",10,"큮큯"], +["b481","큱큲큳큵",6,"큾큿킀킂",18,"뇟뇨뇩뇬뇰뇹뇻뇽누눅눈눋눌눔눕눗눙눠눴눼뉘뉜뉠뉨뉩뉴뉵뉼늄늅늉느늑는늘늙늚늠늡늣능늦늪늬늰늴니닉닌닐닒님닙닛닝닢다닥닦단닫",4,"닳담답닷",4,"닿대댁댄댈댐댑댓댔댕댜더덕덖던덛덜덞덟덤덥"], +["b541","킕",14,"킦킧킩킪킫킭",5], +["b561","킳킶킸킺",5,"탂탃탅탆탇탊",5,"탒탖",4], +["b581","탛탞탟탡탢탣탥",6,"탮탲",5,"탹",11,"덧덩덫덮데덱덴델뎀뎁뎃뎄뎅뎌뎐뎔뎠뎡뎨뎬도독돈돋돌돎돐돔돕돗동돛돝돠돤돨돼됐되된될됨됩됫됴두둑둔둘둠둡둣둥둬뒀뒈뒝뒤뒨뒬뒵뒷뒹듀듄듈듐듕드득든듣들듦듬듭듯등듸디딕딘딛딜딤딥딧딨딩딪따딱딴딸"], +["b641","턅",7,"턎",17], +["b661","턠",15,"턲턳턵턶턷턹턻턼턽턾"], +["b681","턿텂텆",5,"텎텏텑텒텓텕",6,"텞텠텢",5,"텩텪텫텭땀땁땃땄땅땋때땍땐땔땜땝땟땠땡떠떡떤떨떪떫떰떱떳떴떵떻떼떽뗀뗄뗌뗍뗏뗐뗑뗘뗬또똑똔똘똥똬똴뙈뙤뙨뚜뚝뚠뚤뚫뚬뚱뛔뛰뛴뛸뜀뜁뜅뜨뜩뜬뜯뜰뜸뜹뜻띄띈띌띔띕띠띤띨띰띱띳띵라락란랄람랍랏랐랑랒랖랗"], +["b741","텮",13,"텽",6,"톅톆톇톉톊"], +["b761","톋",20,"톢톣톥톦톧"], +["b781","톩",6,"톲톴톶톷톸톹톻톽톾톿퇁",14,"래랙랜랠램랩랫랬랭랴략랸럇량러럭런럴럼럽럿렀렁렇레렉렌렐렘렙렛렝려력련렬렴렵렷렸령례롄롑롓로록론롤롬롭롯롱롸롼뢍뢨뢰뢴뢸룀룁룃룅료룐룔룝룟룡루룩룬룰룸룹룻룽뤄뤘뤠뤼뤽륀륄륌륏륑류륙륜률륨륩"], +["b841","퇐",7,"퇙",17], +["b861","퇫",8,"퇵퇶퇷퇹",13], +["b881","툈툊",5,"툑",24,"륫륭르륵른를름릅릇릉릊릍릎리릭린릴림립릿링마막만많",4,"맘맙맛망맞맡맣매맥맨맬맴맵맷맸맹맺먀먁먈먕머먹먼멀멂멈멉멋멍멎멓메멕멘멜멤멥멧멨멩며멱면멸몃몄명몇몌모목몫몬몰몲몸몹못몽뫄뫈뫘뫙뫼"], +["b941","툪툫툮툯툱툲툳툵",6,"툾퉀퉂",5,"퉉퉊퉋퉌"], +["b961","퉍",14,"퉝",6,"퉥퉦퉧퉨"], +["b981","퉩",22,"튂튃튅튆튇튉튊튋튌묀묄묍묏묑묘묜묠묩묫무묵묶문묻물묽묾뭄뭅뭇뭉뭍뭏뭐뭔뭘뭡뭣뭬뮈뮌뮐뮤뮨뮬뮴뮷므믄믈믐믓미믹민믿밀밂밈밉밋밌밍및밑바",4,"받",4,"밤밥밧방밭배백밴밸뱀뱁뱃뱄뱅뱉뱌뱍뱐뱝버벅번벋벌벎범법벗"], +["ba41","튍튎튏튒튓튔튖",5,"튝튞튟튡튢튣튥",6,"튭"], +["ba61","튮튯튰튲",5,"튺튻튽튾틁틃",4,"틊틌",5], +["ba81","틒틓틕틖틗틙틚틛틝",6,"틦",9,"틲틳틵틶틷틹틺벙벚베벡벤벧벨벰벱벳벴벵벼벽변별볍볏볐병볕볘볜보복볶본볼봄봅봇봉봐봔봤봬뵀뵈뵉뵌뵐뵘뵙뵤뵨부북분붇불붉붊붐붑붓붕붙붚붜붤붰붸뷔뷕뷘뷜뷩뷰뷴뷸븀븃븅브븍븐블븜븝븟비빅빈빌빎빔빕빗빙빚빛빠빡빤"], +["bb41","틻",4,"팂팄팆",5,"팏팑팒팓팕팗",4,"팞팢팣"], +["bb61","팤팦팧팪팫팭팮팯팱",6,"팺팾",5,"퍆퍇퍈퍉"], +["bb81","퍊",31,"빨빪빰빱빳빴빵빻빼빽뺀뺄뺌뺍뺏뺐뺑뺘뺙뺨뻐뻑뻔뻗뻘뻠뻣뻤뻥뻬뼁뼈뼉뼘뼙뼛뼜뼝뽀뽁뽄뽈뽐뽑뽕뾔뾰뿅뿌뿍뿐뿔뿜뿟뿡쀼쁑쁘쁜쁠쁨쁩삐삑삔삘삠삡삣삥사삭삯산삳살삵삶삼삽삿샀상샅새색샌샐샘샙샛샜생샤"], +["bc41","퍪",17,"퍾퍿펁펂펃펅펆펇"], +["bc61","펈펉펊펋펎펒",5,"펚펛펝펞펟펡",6,"펪펬펮"], +["bc81","펯",4,"펵펶펷펹펺펻펽",6,"폆폇폊",5,"폑",5,"샥샨샬샴샵샷샹섀섄섈섐섕서",4,"섣설섦섧섬섭섯섰성섶세섹센셀셈셉셋셌셍셔셕션셜셤셥셧셨셩셰셴셸솅소속솎손솔솖솜솝솟송솥솨솩솬솰솽쇄쇈쇌쇔쇗쇘쇠쇤쇨쇰쇱쇳쇼쇽숀숄숌숍숏숑수숙순숟술숨숩숫숭"], +["bd41","폗폙",7,"폢폤",7,"폮폯폱폲폳폵폶폷"], +["bd61","폸폹폺폻폾퐀퐂",5,"퐉",13], +["bd81","퐗",5,"퐞",25,"숯숱숲숴쉈쉐쉑쉔쉘쉠쉥쉬쉭쉰쉴쉼쉽쉿슁슈슉슐슘슛슝스슥슨슬슭슴습슷승시식신싣실싫심십싯싱싶싸싹싻싼쌀쌈쌉쌌쌍쌓쌔쌕쌘쌜쌤쌥쌨쌩썅써썩썬썰썲썸썹썼썽쎄쎈쎌쏀쏘쏙쏜쏟쏠쏢쏨쏩쏭쏴쏵쏸쐈쐐쐤쐬쐰"], +["be41","퐸",7,"푁푂푃푅",14], +["be61","푔",7,"푝푞푟푡푢푣푥",7,"푮푰푱푲"], +["be81","푳",4,"푺푻푽푾풁풃",4,"풊풌풎",5,"풕",8,"쐴쐼쐽쑈쑤쑥쑨쑬쑴쑵쑹쒀쒔쒜쒸쒼쓩쓰쓱쓴쓸쓺쓿씀씁씌씐씔씜씨씩씬씰씸씹씻씽아악안앉않알앍앎앓암압앗았앙앝앞애액앤앨앰앱앳앴앵야약얀얄얇얌얍얏양얕얗얘얜얠얩어억언얹얻얼얽얾엄",6,"엌엎"], +["bf41","풞",10,"풪",14], +["bf61","풹",18,"퓍퓎퓏퓑퓒퓓퓕"], +["bf81","퓖",5,"퓝퓞퓠",7,"퓩퓪퓫퓭퓮퓯퓱",6,"퓹퓺퓼에엑엔엘엠엡엣엥여역엮연열엶엷염",5,"옅옆옇예옌옐옘옙옛옜오옥온올옭옮옰옳옴옵옷옹옻와왁완왈왐왑왓왔왕왜왝왠왬왯왱외왹왼욀욈욉욋욍요욕욘욜욤욥욧용우욱운울욹욺움웁웃웅워웍원월웜웝웠웡웨"], +["c041","퓾",5,"픅픆픇픉픊픋픍",6,"픖픘",5], +["c061","픞",25], +["c081","픸픹픺픻픾픿핁핂핃핅",6,"핎핐핒",5,"핚핛핝핞핟핡핢핣웩웬웰웸웹웽위윅윈윌윔윕윗윙유육윤율윰윱윳융윷으윽은을읊음읍읏응",7,"읜읠읨읫이익인일읽읾잃임입잇있잉잊잎자작잔잖잗잘잚잠잡잣잤장잦재잭잰잴잼잽잿쟀쟁쟈쟉쟌쟎쟐쟘쟝쟤쟨쟬저적전절젊"], +["c141","핤핦핧핪핬핮",5,"핶핷핹핺핻핽",6,"햆햊햋"], +["c161","햌햍햎햏햑",19,"햦햧"], +["c181","햨",31,"점접젓정젖제젝젠젤젬젭젯젱져젼졀졈졉졌졍졔조족존졸졺좀좁좃종좆좇좋좌좍좔좝좟좡좨좼좽죄죈죌죔죕죗죙죠죡죤죵주죽준줄줅줆줌줍줏중줘줬줴쥐쥑쥔쥘쥠쥡쥣쥬쥰쥴쥼즈즉즌즐즘즙즛증지직진짇질짊짐집짓"], +["c241","헊헋헍헎헏헑헓",4,"헚헜헞",5,"헦헧헩헪헫헭헮"], +["c261","헯",4,"헶헸헺",5,"혂혃혅혆혇혉",6,"혒"], +["c281","혖",5,"혝혞혟혡혢혣혥",7,"혮",9,"혺혻징짖짙짚짜짝짠짢짤짧짬짭짯짰짱째짹짼쨀쨈쨉쨋쨌쨍쨔쨘쨩쩌쩍쩐쩔쩜쩝쩟쩠쩡쩨쩽쪄쪘쪼쪽쫀쫄쫌쫍쫏쫑쫓쫘쫙쫠쫬쫴쬈쬐쬔쬘쬠쬡쭁쭈쭉쭌쭐쭘쭙쭝쭤쭸쭹쮜쮸쯔쯤쯧쯩찌찍찐찔찜찝찡찢찧차착찬찮찰참찹찻"], +["c341","혽혾혿홁홂홃홄홆홇홊홌홎홏홐홒홓홖홗홙홚홛홝",4], +["c361","홢",4,"홨홪",5,"홲홳홵",11], +["c381","횁횂횄횆",5,"횎횏횑횒횓횕",7,"횞횠횢",5,"횩횪찼창찾채책챈챌챔챕챗챘챙챠챤챦챨챰챵처척천철첨첩첫첬청체첵첸첼쳄쳅쳇쳉쳐쳔쳤쳬쳰촁초촉촌촐촘촙촛총촤촨촬촹최쵠쵤쵬쵭쵯쵱쵸춈추축춘출춤춥춧충춰췄췌췐취췬췰췸췹췻췽츄츈츌츔츙츠측츤츨츰츱츳층"], +["c441","횫횭횮횯횱",7,"횺횼",7,"훆훇훉훊훋"], +["c461","훍훎훏훐훒훓훕훖훘훚",5,"훡훢훣훥훦훧훩",4], +["c481","훮훯훱훲훳훴훶",5,"훾훿휁휂휃휅",11,"휒휓휔치칙친칟칠칡침칩칫칭카칵칸칼캄캅캇캉캐캑캔캘캠캡캣캤캥캬캭컁커컥컨컫컬컴컵컷컸컹케켁켄켈켐켑켓켕켜켠켤켬켭켯켰켱켸코콕콘콜콤콥콧콩콰콱콴콸쾀쾅쾌쾡쾨쾰쿄쿠쿡쿤쿨쿰쿱쿳쿵쿼퀀퀄퀑퀘퀭퀴퀵퀸퀼"], +["c541","휕휖휗휚휛휝휞휟휡",6,"휪휬휮",5,"휶휷휹"], +["c561","휺휻휽",6,"흅흆흈흊",5,"흒흓흕흚",4], +["c581","흟흢흤흦흧흨흪흫흭흮흯흱흲흳흵",6,"흾흿힀힂",5,"힊힋큄큅큇큉큐큔큘큠크큭큰클큼큽킁키킥킨킬킴킵킷킹타탁탄탈탉탐탑탓탔탕태택탠탤탬탭탯탰탱탸턍터턱턴털턺텀텁텃텄텅테텍텐텔템텝텟텡텨텬텼톄톈토톡톤톨톰톱톳통톺톼퇀퇘퇴퇸툇툉툐투툭툰툴툼툽툿퉁퉈퉜"], +["c641","힍힎힏힑",6,"힚힜힞",5], +["c6a1","퉤튀튁튄튈튐튑튕튜튠튤튬튱트특튼튿틀틂틈틉틋틔틘틜틤틥티틱틴틸팀팁팃팅파팍팎판팔팖팜팝팟팠팡팥패팩팬팰팸팹팻팼팽퍄퍅퍼퍽펀펄펌펍펏펐펑페펙펜펠펨펩펫펭펴편펼폄폅폈평폐폘폡폣포폭폰폴폼폽폿퐁"], +["c7a1","퐈퐝푀푄표푠푤푭푯푸푹푼푿풀풂품풉풋풍풔풩퓌퓐퓔퓜퓟퓨퓬퓰퓸퓻퓽프픈플픔픕픗피픽핀필핌핍핏핑하학한할핥함합핫항해핵핸핼햄햅햇했행햐향허헉헌헐헒험헙헛헝헤헥헨헬헴헵헷헹혀혁현혈혐협혓혔형혜혠"], +["c8a1","혤혭호혹혼홀홅홈홉홋홍홑화확환활홧황홰홱홴횃횅회획횐횔횝횟횡효횬횰횹횻후훅훈훌훑훔훗훙훠훤훨훰훵훼훽휀휄휑휘휙휜휠휨휩휫휭휴휵휸휼흄흇흉흐흑흔흖흗흘흙흠흡흣흥흩희흰흴흼흽힁히힉힌힐힘힙힛힝"], +["caa1","伽佳假價加可呵哥嘉嫁家暇架枷柯歌珂痂稼苛茄街袈訶賈跏軻迦駕刻却各恪慤殼珏脚覺角閣侃刊墾奸姦干幹懇揀杆柬桿澗癎看磵稈竿簡肝艮艱諫間乫喝曷渴碣竭葛褐蝎鞨勘坎堪嵌感憾戡敢柑橄減甘疳監瞰紺邯鑑鑒龕"], +["cba1","匣岬甲胛鉀閘剛堈姜岡崗康强彊慷江畺疆糠絳綱羌腔舡薑襁講鋼降鱇介价個凱塏愷愾慨改槪漑疥皆盖箇芥蓋豈鎧開喀客坑更粳羹醵倨去居巨拒据據擧渠炬祛距踞車遽鉅鋸乾件健巾建愆楗腱虔蹇鍵騫乞傑杰桀儉劍劒檢"], +["cca1","瞼鈐黔劫怯迲偈憩揭擊格檄激膈覡隔堅牽犬甄絹繭肩見譴遣鵑抉決潔結缺訣兼慊箝謙鉗鎌京俓倞傾儆勁勍卿坰境庚徑慶憬擎敬景暻更梗涇炅烱璟璥瓊痙硬磬竟競絅經耕耿脛莖警輕逕鏡頃頸驚鯨係啓堺契季屆悸戒桂械"], +["cda1","棨溪界癸磎稽系繫繼計誡谿階鷄古叩告呱固姑孤尻庫拷攷故敲暠枯槁沽痼皐睾稿羔考股膏苦苽菰藁蠱袴誥賈辜錮雇顧高鼓哭斛曲梏穀谷鵠困坤崑昆梱棍滾琨袞鯤汨滑骨供公共功孔工恐恭拱控攻珙空蚣貢鞏串寡戈果瓜"], +["cea1","科菓誇課跨過鍋顆廓槨藿郭串冠官寬慣棺款灌琯瓘管罐菅觀貫關館刮恝括适侊光匡壙廣曠洸炚狂珖筐胱鑛卦掛罫乖傀塊壞怪愧拐槐魁宏紘肱轟交僑咬喬嬌嶠巧攪敎校橋狡皎矯絞翹膠蕎蛟較轎郊餃驕鮫丘久九仇俱具勾"], +["cfa1","區口句咎嘔坵垢寇嶇廐懼拘救枸柩構歐毆毬求溝灸狗玖球瞿矩究絿耉臼舅舊苟衢謳購軀逑邱鉤銶駒驅鳩鷗龜國局菊鞠鞫麴君窘群裙軍郡堀屈掘窟宮弓穹窮芎躬倦券勸卷圈拳捲權淃眷厥獗蕨蹶闕机櫃潰詭軌饋句晷歸貴"], +["d0a1","鬼龜叫圭奎揆槻珪硅窺竅糾葵規赳逵閨勻均畇筠菌鈞龜橘克剋劇戟棘極隙僅劤勤懃斤根槿瑾筋芹菫覲謹近饉契今妗擒昑檎琴禁禽芩衾衿襟金錦伋及急扱汲級給亘兢矜肯企伎其冀嗜器圻基埼夔奇妓寄岐崎己幾忌技旗旣"], +["d1a1","朞期杞棋棄機欺氣汽沂淇玘琦琪璂璣畸畿碁磯祁祇祈祺箕紀綺羈耆耭肌記譏豈起錡錤飢饑騎騏驥麒緊佶吉拮桔金喫儺喇奈娜懦懶拏拿癩",5,"那樂",4,"諾酪駱亂卵暖欄煖爛蘭難鸞捏捺南嵐枏楠湳濫男藍襤拉"], +["d2a1","納臘蠟衲囊娘廊",4,"乃來內奈柰耐冷女年撚秊念恬拈捻寧寗努勞奴弩怒擄櫓爐瑙盧",5,"駑魯",10,"濃籠聾膿農惱牢磊腦賂雷尿壘",7,"嫩訥杻紐勒",5,"能菱陵尼泥匿溺多茶"], +["d3a1","丹亶但單團壇彖斷旦檀段湍短端簞緞蛋袒鄲鍛撻澾獺疸達啖坍憺擔曇淡湛潭澹痰聃膽蕁覃談譚錟沓畓答踏遝唐堂塘幢戇撞棠當糖螳黨代垈坮大對岱帶待戴擡玳臺袋貸隊黛宅德悳倒刀到圖堵塗導屠島嶋度徒悼挑掉搗桃"], +["d4a1","棹櫂淘渡滔濤燾盜睹禱稻萄覩賭跳蹈逃途道都鍍陶韜毒瀆牘犢獨督禿篤纛讀墩惇敦旽暾沌焞燉豚頓乭突仝冬凍動同憧東桐棟洞潼疼瞳童胴董銅兜斗杜枓痘竇荳讀豆逗頭屯臀芚遁遯鈍得嶝橙燈登等藤謄鄧騰喇懶拏癩羅"], +["d5a1","蘿螺裸邏樂洛烙珞絡落諾酪駱丹亂卵欄欒瀾爛蘭鸞剌辣嵐擥攬欖濫籃纜藍襤覽拉臘蠟廊朗浪狼琅瑯螂郞來崍徠萊冷掠略亮倆兩凉梁樑粮粱糧良諒輛量侶儷勵呂廬慮戾旅櫚濾礪藜蠣閭驢驪麗黎力曆歷瀝礫轢靂憐戀攣漣"], +["d6a1","煉璉練聯蓮輦連鍊冽列劣洌烈裂廉斂殮濂簾獵令伶囹寧岺嶺怜玲笭羚翎聆逞鈴零靈領齡例澧禮醴隷勞怒撈擄櫓潞瀘爐盧老蘆虜路輅露魯鷺鹵碌祿綠菉錄鹿麓論壟弄朧瀧瓏籠聾儡瀨牢磊賂賚賴雷了僚寮廖料燎療瞭聊蓼"], +["d7a1","遼鬧龍壘婁屢樓淚漏瘻累縷蔞褸鏤陋劉旒柳榴流溜瀏琉瑠留瘤硫謬類六戮陸侖倫崙淪綸輪律慄栗率隆勒肋凜凌楞稜綾菱陵俚利厘吏唎履悧李梨浬犁狸理璃異痢籬罹羸莉裏裡里釐離鯉吝潾燐璘藺躪隣鱗麟林淋琳臨霖砬"], +["d8a1","立笠粒摩瑪痲碼磨馬魔麻寞幕漠膜莫邈万卍娩巒彎慢挽晩曼滿漫灣瞞萬蔓蠻輓饅鰻唜抹末沫茉襪靺亡妄忘忙望網罔芒茫莽輞邙埋妹媒寐昧枚梅每煤罵買賣邁魅脈貊陌驀麥孟氓猛盲盟萌冪覓免冕勉棉沔眄眠綿緬面麵滅"], +["d9a1","蔑冥名命明暝椧溟皿瞑茗蓂螟酩銘鳴袂侮冒募姆帽慕摸摹暮某模母毛牟牡瑁眸矛耗芼茅謀謨貌木沐牧目睦穆鶩歿沒夢朦蒙卯墓妙廟描昴杳渺猫竗苗錨務巫憮懋戊拇撫无楙武毋無珷畝繆舞茂蕪誣貿霧鵡墨默們刎吻問文"], +["daa1","汶紊紋聞蚊門雯勿沕物味媚尾嵋彌微未梶楣渼湄眉米美薇謎迷靡黴岷悶愍憫敏旻旼民泯玟珉緡閔密蜜謐剝博拍搏撲朴樸泊珀璞箔粕縛膊舶薄迫雹駁伴半反叛拌搬攀斑槃泮潘班畔瘢盤盼磐磻礬絆般蟠返頒飯勃拔撥渤潑"], +["dba1","發跋醱鉢髮魃倣傍坊妨尨幇彷房放方旁昉枋榜滂磅紡肪膀舫芳蒡蚌訪謗邦防龐倍俳北培徘拜排杯湃焙盃背胚裴裵褙賠輩配陪伯佰帛柏栢白百魄幡樊煩燔番磻繁蕃藩飜伐筏罰閥凡帆梵氾汎泛犯範范法琺僻劈壁擘檗璧癖"], +["dca1","碧蘗闢霹便卞弁變辨辯邊別瞥鱉鼈丙倂兵屛幷昞昺柄棅炳甁病秉竝輧餠騈保堡報寶普步洑湺潽珤甫菩補褓譜輔伏僕匐卜宓復服福腹茯蔔複覆輹輻馥鰒本乶俸奉封峯峰捧棒烽熢琫縫蓬蜂逢鋒鳳不付俯傅剖副否咐埠夫婦"], +["dda1","孚孵富府復扶敷斧浮溥父符簿缶腐腑膚艀芙莩訃負賦賻赴趺部釜阜附駙鳧北分吩噴墳奔奮忿憤扮昐汾焚盆粉糞紛芬賁雰不佛弗彿拂崩朋棚硼繃鵬丕備匕匪卑妃婢庇悲憊扉批斐枇榧比毖毗毘沸泌琵痺砒碑秕秘粃緋翡肥"], +["dea1","脾臂菲蜚裨誹譬費鄙非飛鼻嚬嬪彬斌檳殯浜濱瀕牝玭貧賓頻憑氷聘騁乍事些仕伺似使俟僿史司唆嗣四士奢娑寫寺射巳師徙思捨斜斯柶査梭死沙泗渣瀉獅砂社祀祠私篩紗絲肆舍莎蓑蛇裟詐詞謝賜赦辭邪飼駟麝削數朔索"], +["dfa1","傘刪山散汕珊産疝算蒜酸霰乷撒殺煞薩三參杉森渗芟蔘衫揷澁鈒颯上傷像償商喪嘗孀尙峠常床庠廂想桑橡湘爽牀狀相祥箱翔裳觴詳象賞霜塞璽賽嗇塞穡索色牲生甥省笙墅壻嶼序庶徐恕抒捿敍暑曙書栖棲犀瑞筮絮緖署"], +["e0a1","胥舒薯西誓逝鋤黍鼠夕奭席惜昔晳析汐淅潟石碩蓆釋錫仙僊先善嬋宣扇敾旋渲煽琁瑄璇璿癬禪線繕羨腺膳船蘚蟬詵跣選銑鐥饍鮮卨屑楔泄洩渫舌薛褻設說雪齧剡暹殲纖蟾贍閃陝攝涉燮葉城姓宬性惺成星晟猩珹盛省筬"], +["e1a1","聖聲腥誠醒世勢歲洗稅笹細說貰召嘯塑宵小少巢所掃搔昭梳沼消溯瀟炤燒甦疏疎瘙笑篠簫素紹蔬蕭蘇訴逍遡邵銷韶騷俗屬束涑粟續謖贖速孫巽損蓀遜飡率宋悚松淞訟誦送頌刷殺灑碎鎖衰釗修受嗽囚垂壽嫂守岫峀帥愁"], +["e2a1","戍手授搜收數樹殊水洙漱燧狩獸琇璲瘦睡秀穗竪粹綏綬繡羞脩茱蒐蓚藪袖誰讐輸遂邃酬銖銹隋隧隨雖需須首髓鬚叔塾夙孰宿淑潚熟琡璹肅菽巡徇循恂旬栒楯橓殉洵淳珣盾瞬筍純脣舜荀蓴蕣詢諄醇錞順馴戌術述鉥崇崧"], +["e3a1","嵩瑟膝蝨濕拾習褶襲丞乘僧勝升承昇繩蠅陞侍匙嘶始媤尸屎屍市弑恃施是時枾柴猜矢示翅蒔蓍視試詩諡豕豺埴寔式息拭植殖湜熄篒蝕識軾食飾伸侁信呻娠宸愼新晨燼申神紳腎臣莘薪藎蜃訊身辛辰迅失室實悉審尋心沁"], +["e4a1","沈深瀋甚芯諶什十拾雙氏亞俄兒啞娥峨我牙芽莪蛾衙訝阿雅餓鴉鵝堊岳嶽幄惡愕握樂渥鄂鍔顎鰐齷安岸按晏案眼雁鞍顔鮟斡謁軋閼唵岩巖庵暗癌菴闇壓押狎鴨仰央怏昻殃秧鴦厓哀埃崖愛曖涯碍艾隘靄厄扼掖液縊腋額"], +["e5a1","櫻罌鶯鸚也倻冶夜惹揶椰爺耶若野弱掠略約若葯蒻藥躍亮佯兩凉壤孃恙揚攘敭暘梁楊樣洋瀁煬痒瘍禳穰糧羊良襄諒讓釀陽量養圄御於漁瘀禦語馭魚齬億憶抑檍臆偃堰彦焉言諺孼蘖俺儼嚴奄掩淹嶪業円予余勵呂女如廬"], +["e6a1","旅歟汝濾璵礖礪與艅茹輿轝閭餘驪麗黎亦力域役易曆歷疫繹譯轢逆驛嚥堧姸娟宴年延憐戀捐挻撚椽沇沿涎涓淵演漣烟然煙煉燃燕璉硏硯秊筵緣練縯聯衍軟輦蓮連鉛鍊鳶列劣咽悅涅烈熱裂說閱厭廉念捻染殮炎焰琰艶苒"], +["e7a1","簾閻髥鹽曄獵燁葉令囹塋寧嶺嶸影怜映暎楹榮永泳渶潁濚瀛瀯煐營獰玲瑛瑩瓔盈穎纓羚聆英詠迎鈴鍈零霙靈領乂倪例刈叡曳汭濊猊睿穢芮藝蘂禮裔詣譽豫醴銳隸霓預五伍俉傲午吾吳嗚塢墺奧娛寤悟惡懊敖旿晤梧汚澳"], +["e8a1","烏熬獒筽蜈誤鰲鼇屋沃獄玉鈺溫瑥瘟穩縕蘊兀壅擁瓮甕癰翁邕雍饔渦瓦窩窪臥蛙蝸訛婉完宛梡椀浣玩琓琬碗緩翫脘腕莞豌阮頑曰往旺枉汪王倭娃歪矮外嵬巍猥畏了僚僥凹堯夭妖姚寥寮尿嶢拗搖撓擾料曜樂橈燎燿瑤療"], +["e9a1","窈窯繇繞耀腰蓼蟯要謠遙遼邀饒慾欲浴縟褥辱俑傭冗勇埇墉容庸慂榕涌湧溶熔瑢用甬聳茸蓉踊鎔鏞龍于佑偶優又友右宇寓尤愚憂旴牛玗瑀盂祐禑禹紆羽芋藕虞迂遇郵釪隅雨雩勖彧旭昱栯煜稶郁頊云暈橒殞澐熉耘芸蕓"], +["eaa1","運隕雲韻蔚鬱亐熊雄元原員圓園垣媛嫄寃怨愿援沅洹湲源爰猿瑗苑袁轅遠阮院願鴛月越鉞位偉僞危圍委威尉慰暐渭爲瑋緯胃萎葦蔿蝟衛褘謂違韋魏乳侑儒兪劉唯喩孺宥幼幽庾悠惟愈愉揄攸有杻柔柚柳楡楢油洧流游溜"], +["eba1","濡猶猷琉瑜由留癒硫紐維臾萸裕誘諛諭踰蹂遊逾遺酉釉鍮類六堉戮毓肉育陸倫允奫尹崙淪潤玧胤贇輪鈗閏律慄栗率聿戎瀜絨融隆垠恩慇殷誾銀隱乙吟淫蔭陰音飮揖泣邑凝應膺鷹依倚儀宜意懿擬椅毅疑矣義艤薏蟻衣誼"], +["eca1","議醫二以伊利吏夷姨履已弛彛怡易李梨泥爾珥理異痍痢移罹而耳肄苡荑裏裡貽貳邇里離飴餌匿溺瀷益翊翌翼謚人仁刃印吝咽因姻寅引忍湮燐璘絪茵藺蚓認隣靭靷鱗麟一佚佾壹日溢逸鎰馹任壬妊姙恁林淋稔臨荏賃入卄"], +["eda1","立笠粒仍剩孕芿仔刺咨姉姿子字孜恣慈滋炙煮玆瓷疵磁紫者自茨蔗藉諮資雌作勺嚼斫昨灼炸爵綽芍酌雀鵲孱棧殘潺盞岑暫潛箴簪蠶雜丈仗匠場墻壯奬將帳庄張掌暲杖樟檣欌漿牆狀獐璋章粧腸臟臧莊葬蔣薔藏裝贓醬長"], +["eea1","障再哉在宰才材栽梓渽滓災縡裁財載齋齎爭箏諍錚佇低儲咀姐底抵杵楮樗沮渚狙猪疽箸紵苧菹著藷詛貯躇這邸雎齟勣吊嫡寂摘敵滴狄炙的積笛籍績翟荻謫賊赤跡蹟迪迹適鏑佃佺傳全典前剪塡塼奠專展廛悛戰栓殿氈澱"], +["efa1","煎琠田甸畑癲筌箋箭篆纏詮輾轉鈿銓錢鐫電顚顫餞切截折浙癤竊節絶占岾店漸点粘霑鮎點接摺蝶丁井亭停偵呈姃定幀庭廷征情挺政整旌晶晸柾楨檉正汀淀淨渟湞瀞炡玎珽町睛碇禎程穽精綎艇訂諪貞鄭酊釘鉦鋌錠霆靖"], +["f0a1","靜頂鼎制劑啼堤帝弟悌提梯濟祭第臍薺製諸蹄醍除際霽題齊俎兆凋助嘲弔彫措操早晁曺曹朝條棗槽漕潮照燥爪璪眺祖祚租稠窕粗糟組繰肇藻蚤詔調趙躁造遭釣阻雕鳥族簇足鏃存尊卒拙猝倧宗從悰慫棕淙琮種終綜縱腫"], +["f1a1","踪踵鍾鐘佐坐左座挫罪主住侏做姝胄呪周嗾奏宙州廚晝朱柱株注洲湊澍炷珠疇籌紂紬綢舟蛛註誅走躊輳週酎酒鑄駐竹粥俊儁准埈寯峻晙樽浚準濬焌畯竣蠢逡遵雋駿茁中仲衆重卽櫛楫汁葺增憎曾拯烝甑症繒蒸證贈之只"], +["f2a1","咫地址志持指摯支旨智枝枳止池沚漬知砥祉祗紙肢脂至芝芷蜘誌識贄趾遲直稙稷織職唇嗔塵振搢晉晋桭榛殄津溱珍瑨璡畛疹盡眞瞋秦縉縝臻蔯袗診賑軫辰進鎭陣陳震侄叱姪嫉帙桎瓆疾秩窒膣蛭質跌迭斟朕什執潗緝輯"], +["f3a1","鏶集徵懲澄且侘借叉嗟嵯差次此磋箚茶蹉車遮捉搾着窄錯鑿齪撰澯燦璨瓚竄簒纂粲纘讚贊鑽餐饌刹察擦札紮僭參塹慘慙懺斬站讒讖倉倡創唱娼廠彰愴敞昌昶暢槍滄漲猖瘡窓脹艙菖蒼債埰寀寨彩採砦綵菜蔡采釵冊柵策"], +["f4a1","責凄妻悽處倜刺剔尺慽戚拓擲斥滌瘠脊蹠陟隻仟千喘天川擅泉淺玔穿舛薦賤踐遷釧闡阡韆凸哲喆徹撤澈綴輟轍鐵僉尖沾添甛瞻簽籤詹諂堞妾帖捷牒疊睫諜貼輒廳晴淸聽菁請靑鯖切剃替涕滯締諦逮遞體初剿哨憔抄招梢"], +["f5a1","椒楚樵炒焦硝礁礎秒稍肖艸苕草蕉貂超酢醋醮促囑燭矗蜀觸寸忖村邨叢塚寵悤憁摠總聰蔥銃撮催崔最墜抽推椎楸樞湫皺秋芻萩諏趨追鄒酋醜錐錘鎚雛騶鰍丑畜祝竺筑築縮蓄蹙蹴軸逐春椿瑃出朮黜充忠沖蟲衝衷悴膵萃"], +["f6a1","贅取吹嘴娶就炊翠聚脆臭趣醉驟鷲側仄厠惻測層侈値嗤峙幟恥梔治淄熾痔痴癡稚穉緇緻置致蚩輜雉馳齒則勅飭親七柒漆侵寢枕沈浸琛砧針鍼蟄秤稱快他咤唾墮妥惰打拖朶楕舵陀馱駝倬卓啄坼度托拓擢晫柝濁濯琢琸託"], +["f7a1","鐸呑嘆坦彈憚歎灘炭綻誕奪脫探眈耽貪塔搭榻宕帑湯糖蕩兌台太怠態殆汰泰笞胎苔跆邰颱宅擇澤撑攄兎吐土討慟桶洞痛筒統通堆槌腿褪退頹偸套妬投透鬪慝特闖坡婆巴把播擺杷波派爬琶破罷芭跛頗判坂板版瓣販辦鈑"], +["f8a1","阪八叭捌佩唄悖敗沛浿牌狽稗覇貝彭澎烹膨愎便偏扁片篇編翩遍鞭騙貶坪平枰萍評吠嬖幣廢弊斃肺蔽閉陛佈包匍匏咆哺圃布怖抛抱捕暴泡浦疱砲胞脯苞葡蒲袍褒逋鋪飽鮑幅暴曝瀑爆輻俵剽彪慓杓標漂瓢票表豹飇飄驃"], +["f9a1","品稟楓諷豊風馮彼披疲皮被避陂匹弼必泌珌畢疋筆苾馝乏逼下何厦夏廈昰河瑕荷蝦賀遐霞鰕壑學虐謔鶴寒恨悍旱汗漢澣瀚罕翰閑閒限韓割轄函含咸啣喊檻涵緘艦銜陷鹹合哈盒蛤閤闔陜亢伉姮嫦巷恒抗杭桁沆港缸肛航"], +["faa1","行降項亥偕咳垓奚孩害懈楷海瀣蟹解該諧邂駭骸劾核倖幸杏荇行享向嚮珦鄕響餉饗香噓墟虛許憲櫶獻軒歇險驗奕爀赫革俔峴弦懸晛泫炫玄玹現眩睍絃絢縣舷衒見賢鉉顯孑穴血頁嫌俠協夾峽挾浹狹脅脇莢鋏頰亨兄刑型"], +["fba1","形泂滎瀅灐炯熒珩瑩荊螢衡逈邢鎣馨兮彗惠慧暳蕙蹊醯鞋乎互呼壕壺好岵弧戶扈昊晧毫浩淏湖滸澔濠濩灝狐琥瑚瓠皓祜糊縞胡芦葫蒿虎號蝴護豪鎬頀顥惑或酷婚昏混渾琿魂忽惚笏哄弘汞泓洪烘紅虹訌鴻化和嬅樺火畵"], +["fca1","禍禾花華話譁貨靴廓擴攫確碻穫丸喚奐宦幻患換歡晥桓渙煥環紈還驩鰥活滑猾豁闊凰幌徨恍惶愰慌晃晄榥況湟滉潢煌璜皇篁簧荒蝗遑隍黃匯回廻徊恢悔懷晦會檜淮澮灰獪繪膾茴蛔誨賄劃獲宖橫鐄哮嚆孝效斅曉梟涍淆"], +["fda1","爻肴酵驍侯候厚后吼喉嗅帿後朽煦珝逅勛勳塤壎焄熏燻薰訓暈薨喧暄煊萱卉喙毁彙徽揮暉煇諱輝麾休携烋畦虧恤譎鷸兇凶匈洶胸黑昕欣炘痕吃屹紇訖欠欽歆吸恰洽翕興僖凞喜噫囍姬嬉希憙憘戱晞曦熙熹熺犧禧稀羲詰"] +] diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/tables/cp950.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/tables/cp950.json new file mode 100644 index 0000000000000000000000000000000000000000..d8bc87178dd38fca1829b9e2109c6f71e9721bdf --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/tables/cp950.json @@ -0,0 +1,177 @@ +[ +["0","\u0000",127], +["a140"," ,、。.‧;:?!︰…‥﹐﹑﹒·﹔﹕﹖﹗|–︱—︳╴︴﹏()︵︶{}︷︸〔〕︹︺【】︻︼《》︽︾〈〉︿﹀「」﹁﹂『』﹃﹄﹙﹚"], +["a1a1","﹛﹜﹝﹞‘’“”〝〞‵′#&*※§〃○●△▲◎☆★◇◆□■▽▼㊣℅¯ ̄_ˍ﹉﹊﹍﹎﹋﹌﹟﹠﹡+-×÷±√<>=≦≧≠∞≒≡﹢",4,"~∩∪⊥∠∟⊿㏒㏑∫∮∵∴♀♂⊕⊙↑↓←→↖↗↙↘∥∣/"], +["a240","\∕﹨$¥〒¢£%@℃℉﹩﹪﹫㏕㎜㎝㎞㏎㎡㎎㎏㏄°兙兛兞兝兡兣嗧瓩糎▁",7,"▏▎▍▌▋▊▉┼┴┬┤├▔─│▕┌┐└┘╭"], +["a2a1","╮╰╯═╞╪╡◢◣◥◤╱╲╳0",9,"Ⅰ",9,"〡",8,"十卄卅A",25,"a",21], +["a340","wxyzΑ",16,"Σ",6,"α",16,"σ",6,"ㄅ",10], +["a3a1","ㄐ",25,"˙ˉˊˇˋ"], +["a3e1","€"], +["a440","一乙丁七乃九了二人儿入八几刀刁力匕十卜又三下丈上丫丸凡久么也乞于亡兀刃勺千叉口土士夕大女子孑孓寸小尢尸山川工己已巳巾干廾弋弓才"], +["a4a1","丑丐不中丰丹之尹予云井互五亢仁什仃仆仇仍今介仄元允內六兮公冗凶分切刈勻勾勿化匹午升卅卞厄友及反壬天夫太夭孔少尤尺屯巴幻廿弔引心戈戶手扎支文斗斤方日曰月木欠止歹毋比毛氏水火爪父爻片牙牛犬王丙"], +["a540","世丕且丘主乍乏乎以付仔仕他仗代令仙仞充兄冉冊冬凹出凸刊加功包匆北匝仟半卉卡占卯卮去可古右召叮叩叨叼司叵叫另只史叱台句叭叻四囚外"], +["a5a1","央失奴奶孕它尼巨巧左市布平幼弁弘弗必戊打扔扒扑斥旦朮本未末札正母民氐永汁汀氾犯玄玉瓜瓦甘生用甩田由甲申疋白皮皿目矛矢石示禾穴立丞丟乒乓乩亙交亦亥仿伉伙伊伕伍伐休伏仲件任仰仳份企伋光兇兆先全"], +["a640","共再冰列刑划刎刖劣匈匡匠印危吉吏同吊吐吁吋各向名合吃后吆吒因回囝圳地在圭圬圯圩夙多夷夸妄奸妃好她如妁字存宇守宅安寺尖屹州帆并年"], +["a6a1","式弛忙忖戎戌戍成扣扛托收早旨旬旭曲曳有朽朴朱朵次此死氖汝汗汙江池汐汕污汛汍汎灰牟牝百竹米糸缶羊羽老考而耒耳聿肉肋肌臣自至臼舌舛舟艮色艾虫血行衣西阡串亨位住佇佗佞伴佛何估佐佑伽伺伸佃佔似但佣"], +["a740","作你伯低伶余佝佈佚兌克免兵冶冷別判利刪刨劫助努劬匣即卵吝吭吞吾否呎吧呆呃吳呈呂君吩告吹吻吸吮吵吶吠吼呀吱含吟听囪困囤囫坊坑址坍"], +["a7a1","均坎圾坐坏圻壯夾妝妒妨妞妣妙妖妍妤妓妊妥孝孜孚孛完宋宏尬局屁尿尾岐岑岔岌巫希序庇床廷弄弟彤形彷役忘忌志忍忱快忸忪戒我抄抗抖技扶抉扭把扼找批扳抒扯折扮投抓抑抆改攻攸旱更束李杏材村杜杖杞杉杆杠"], +["a840","杓杗步每求汞沙沁沈沉沅沛汪決沐汰沌汨沖沒汽沃汲汾汴沆汶沍沔沘沂灶灼災灸牢牡牠狄狂玖甬甫男甸皂盯矣私秀禿究系罕肖肓肝肘肛肚育良芒"], +["a8a1","芋芍見角言谷豆豕貝赤走足身車辛辰迂迆迅迄巡邑邢邪邦那酉釆里防阮阱阪阬並乖乳事些亞享京佯依侍佳使佬供例來侃佰併侈佩佻侖佾侏侑佺兔兒兕兩具其典冽函刻券刷刺到刮制剁劾劻卒協卓卑卦卷卸卹取叔受味呵"], +["a940","咖呸咕咀呻呷咄咒咆呼咐呱呶和咚呢周咋命咎固垃坷坪坩坡坦坤坼夜奉奇奈奄奔妾妻委妹妮姑姆姐姍始姓姊妯妳姒姅孟孤季宗定官宜宙宛尚屈居"], +["a9a1","屆岷岡岸岩岫岱岳帘帚帖帕帛帑幸庚店府底庖延弦弧弩往征彿彼忝忠忽念忿怏怔怯怵怖怪怕怡性怩怫怛或戕房戾所承拉拌拄抿拂抹拒招披拓拔拋拈抨抽押拐拙拇拍抵拚抱拘拖拗拆抬拎放斧於旺昔易昌昆昂明昀昏昕昊"], +["aa40","昇服朋杭枋枕東果杳杷枇枝林杯杰板枉松析杵枚枓杼杪杲欣武歧歿氓氛泣注泳沱泌泥河沽沾沼波沫法泓沸泄油況沮泗泅泱沿治泡泛泊沬泯泜泖泠"], +["aaa1","炕炎炒炊炙爬爭爸版牧物狀狎狙狗狐玩玨玟玫玥甽疝疙疚的盂盲直知矽社祀祁秉秈空穹竺糾罔羌羋者肺肥肢肱股肫肩肴肪肯臥臾舍芳芝芙芭芽芟芹花芬芥芯芸芣芰芾芷虎虱初表軋迎返近邵邸邱邶采金長門阜陀阿阻附"], +["ab40","陂隹雨青非亟亭亮信侵侯便俠俑俏保促侶俘俟俊俗侮俐俄係俚俎俞侷兗冒冑冠剎剃削前剌剋則勇勉勃勁匍南卻厚叛咬哀咨哎哉咸咦咳哇哂咽咪品"], +["aba1","哄哈咯咫咱咻咩咧咿囿垂型垠垣垢城垮垓奕契奏奎奐姜姘姿姣姨娃姥姪姚姦威姻孩宣宦室客宥封屎屏屍屋峙峒巷帝帥帟幽庠度建弈弭彥很待徊律徇後徉怒思怠急怎怨恍恰恨恢恆恃恬恫恪恤扁拜挖按拼拭持拮拽指拱拷"], +["ac40","拯括拾拴挑挂政故斫施既春昭映昧是星昨昱昤曷柿染柱柔某柬架枯柵柩柯柄柑枴柚查枸柏柞柳枰柙柢柝柒歪殃殆段毒毗氟泉洋洲洪流津洌洱洞洗"], +["aca1","活洽派洶洛泵洹洧洸洩洮洵洎洫炫為炳炬炯炭炸炮炤爰牲牯牴狩狠狡玷珊玻玲珍珀玳甚甭畏界畎畋疫疤疥疢疣癸皆皇皈盈盆盃盅省盹相眉看盾盼眇矜砂研砌砍祆祉祈祇禹禺科秒秋穿突竿竽籽紂紅紀紉紇約紆缸美羿耄"], +["ad40","耐耍耑耶胖胥胚胃胄背胡胛胎胞胤胝致舢苧范茅苣苛苦茄若茂茉苒苗英茁苜苔苑苞苓苟苯茆虐虹虻虺衍衫要觔計訂訃貞負赴赳趴軍軌述迦迢迪迥"], +["ada1","迭迫迤迨郊郎郁郃酋酊重閂限陋陌降面革韋韭音頁風飛食首香乘亳倌倍倣俯倦倥俸倩倖倆值借倚倒們俺倀倔倨俱倡個候倘俳修倭倪俾倫倉兼冤冥冢凍凌准凋剖剜剔剛剝匪卿原厝叟哨唐唁唷哼哥哲唆哺唔哩哭員唉哮哪"], +["ae40","哦唧唇哽唏圃圄埂埔埋埃堉夏套奘奚娑娘娜娟娛娓姬娠娣娩娥娌娉孫屘宰害家宴宮宵容宸射屑展屐峭峽峻峪峨峰島崁峴差席師庫庭座弱徒徑徐恙"], +["aea1","恣恥恐恕恭恩息悄悟悚悍悔悌悅悖扇拳挈拿捎挾振捕捂捆捏捉挺捐挽挪挫挨捍捌效敉料旁旅時晉晏晃晒晌晅晁書朔朕朗校核案框桓根桂桔栩梳栗桌桑栽柴桐桀格桃株桅栓栘桁殊殉殷氣氧氨氦氤泰浪涕消涇浦浸海浙涓"], +["af40","浬涉浮浚浴浩涌涊浹涅浥涔烊烘烤烙烈烏爹特狼狹狽狸狷玆班琉珮珠珪珞畔畝畜畚留疾病症疲疳疽疼疹痂疸皋皰益盍盎眩真眠眨矩砰砧砸砝破砷"], +["afa1","砥砭砠砟砲祕祐祠祟祖神祝祗祚秤秣秧租秦秩秘窄窈站笆笑粉紡紗紋紊素索純紐紕級紜納紙紛缺罟羔翅翁耆耘耕耙耗耽耿胱脂胰脅胭胴脆胸胳脈能脊胼胯臭臬舀舐航舫舨般芻茫荒荔荊茸荐草茵茴荏茲茹茶茗荀茱茨荃"], +["b040","虔蚊蚪蚓蚤蚩蚌蚣蚜衰衷袁袂衽衹記訐討訌訕訊託訓訖訏訑豈豺豹財貢起躬軒軔軏辱送逆迷退迺迴逃追逅迸邕郡郝郢酒配酌釘針釗釜釙閃院陣陡"], +["b0a1","陛陝除陘陞隻飢馬骨高鬥鬲鬼乾偺偽停假偃偌做偉健偶偎偕偵側偷偏倏偯偭兜冕凰剪副勒務勘動匐匏匙匿區匾參曼商啪啦啄啞啡啃啊唱啖問啕唯啤唸售啜唬啣唳啁啗圈國圉域堅堊堆埠埤基堂堵執培夠奢娶婁婉婦婪婀"], +["b140","娼婢婚婆婊孰寇寅寄寂宿密尉專將屠屜屝崇崆崎崛崖崢崑崩崔崙崤崧崗巢常帶帳帷康庸庶庵庾張強彗彬彩彫得徙從徘御徠徜恿患悉悠您惋悴惦悽"], +["b1a1","情悻悵惜悼惘惕惆惟悸惚惇戚戛扈掠控捲掖探接捷捧掘措捱掩掉掃掛捫推掄授掙採掬排掏掀捻捩捨捺敝敖救教敗啟敏敘敕敔斜斛斬族旋旌旎晝晚晤晨晦晞曹勗望梁梯梢梓梵桿桶梱梧梗械梃棄梭梆梅梔條梨梟梡梂欲殺"], +["b240","毫毬氫涎涼淳淙液淡淌淤添淺清淇淋涯淑涮淞淹涸混淵淅淒渚涵淚淫淘淪深淮淨淆淄涪淬涿淦烹焉焊烽烯爽牽犁猜猛猖猓猙率琅琊球理現琍瓠瓶"], +["b2a1","瓷甜產略畦畢異疏痔痕疵痊痍皎盔盒盛眷眾眼眶眸眺硫硃硎祥票祭移窒窕笠笨笛第符笙笞笮粒粗粕絆絃統紮紹紼絀細紳組累終紲紱缽羞羚翌翎習耜聊聆脯脖脣脫脩脰脤舂舵舷舶船莎莞莘荸莢莖莽莫莒莊莓莉莠荷荻荼"], +["b340","莆莧處彪蛇蛀蚶蛄蚵蛆蛋蚱蚯蛉術袞袈被袒袖袍袋覓規訪訝訣訥許設訟訛訢豉豚販責貫貨貪貧赧赦趾趺軛軟這逍通逗連速逝逐逕逞造透逢逖逛途"], +["b3a1","部郭都酗野釵釦釣釧釭釩閉陪陵陳陸陰陴陶陷陬雀雪雩章竟頂頃魚鳥鹵鹿麥麻傢傍傅備傑傀傖傘傚最凱割剴創剩勞勝勛博厥啻喀喧啼喊喝喘喂喜喪喔喇喋喃喳單喟唾喲喚喻喬喱啾喉喫喙圍堯堪場堤堰報堡堝堠壹壺奠"], +["b440","婷媚婿媒媛媧孳孱寒富寓寐尊尋就嵌嵐崴嵇巽幅帽幀幃幾廊廁廂廄弼彭復循徨惑惡悲悶惠愜愣惺愕惰惻惴慨惱愎惶愉愀愒戟扉掣掌描揀揩揉揆揍"], +["b4a1","插揣提握揖揭揮捶援揪換摒揚揹敞敦敢散斑斐斯普晰晴晶景暑智晾晷曾替期朝棺棕棠棘棗椅棟棵森棧棹棒棲棣棋棍植椒椎棉棚楮棻款欺欽殘殖殼毯氮氯氬港游湔渡渲湧湊渠渥渣減湛湘渤湖湮渭渦湯渴湍渺測湃渝渾滋"], +["b540","溉渙湎湣湄湲湩湟焙焚焦焰無然煮焜牌犄犀猶猥猴猩琺琪琳琢琥琵琶琴琯琛琦琨甥甦畫番痢痛痣痙痘痞痠登發皖皓皴盜睏短硝硬硯稍稈程稅稀窘"], +["b5a1","窗窖童竣等策筆筐筒答筍筋筏筑粟粥絞結絨絕紫絮絲絡給絢絰絳善翔翕耋聒肅腕腔腋腑腎脹腆脾腌腓腴舒舜菩萃菸萍菠菅萋菁華菱菴著萊菰萌菌菽菲菊萸萎萄菜萇菔菟虛蛟蛙蛭蛔蛛蛤蛐蛞街裁裂袱覃視註詠評詞証詁"], +["b640","詔詛詐詆訴診訶詖象貂貯貼貳貽賁費賀貴買貶貿貸越超趁跎距跋跚跑跌跛跆軻軸軼辜逮逵週逸進逶鄂郵鄉郾酣酥量鈔鈕鈣鈉鈞鈍鈐鈇鈑閔閏開閑"], +["b6a1","間閒閎隊階隋陽隅隆隍陲隄雁雅雄集雇雯雲韌項順須飧飪飯飩飲飭馮馭黃黍黑亂傭債傲傳僅傾催傷傻傯僇剿剷剽募勦勤勢勣匯嗟嗨嗓嗦嗎嗜嗇嗑嗣嗤嗯嗚嗡嗅嗆嗥嗉園圓塞塑塘塗塚塔填塌塭塊塢塒塋奧嫁嫉嫌媾媽媼"], +["b740","媳嫂媲嵩嵯幌幹廉廈弒彙徬微愚意慈感想愛惹愁愈慎慌慄慍愾愴愧愍愆愷戡戢搓搾搞搪搭搽搬搏搜搔損搶搖搗搆敬斟新暗暉暇暈暖暄暘暍會榔業"], +["b7a1","楚楷楠楔極椰概楊楨楫楞楓楹榆楝楣楛歇歲毀殿毓毽溢溯滓溶滂源溝滇滅溥溘溼溺溫滑準溜滄滔溪溧溴煎煙煩煤煉照煜煬煦煌煥煞煆煨煖爺牒猷獅猿猾瑯瑚瑕瑟瑞瑁琿瑙瑛瑜當畸瘀痰瘁痲痱痺痿痴痳盞盟睛睫睦睞督"], +["b840","睹睪睬睜睥睨睢矮碎碰碗碘碌碉硼碑碓硿祺祿禁萬禽稜稚稠稔稟稞窟窠筷節筠筮筧粱粳粵經絹綑綁綏絛置罩罪署義羨群聖聘肆肄腱腰腸腥腮腳腫"], +["b8a1","腹腺腦舅艇蒂葷落萱葵葦葫葉葬葛萼萵葡董葩葭葆虞虜號蛹蜓蜈蜇蜀蛾蛻蜂蜃蜆蜊衙裟裔裙補裘裝裡裊裕裒覜解詫該詳試詩詰誇詼詣誠話誅詭詢詮詬詹詻訾詨豢貊貉賊資賈賄貲賃賂賅跡跟跨路跳跺跪跤跦躲較載軾輊"], +["b940","辟農運遊道遂達逼違遐遇遏過遍遑逾遁鄒鄗酬酪酩釉鈷鉗鈸鈽鉀鈾鉛鉋鉤鉑鈴鉉鉍鉅鈹鈿鉚閘隘隔隕雍雋雉雊雷電雹零靖靴靶預頑頓頊頒頌飼飴"], +["b9a1","飽飾馳馱馴髡鳩麂鼎鼓鼠僧僮僥僖僭僚僕像僑僱僎僩兢凳劃劂匱厭嗾嘀嘛嘗嗽嘔嘆嘉嘍嘎嗷嘖嘟嘈嘐嗶團圖塵塾境墓墊塹墅塽壽夥夢夤奪奩嫡嫦嫩嫗嫖嫘嫣孵寞寧寡寥實寨寢寤察對屢嶄嶇幛幣幕幗幔廓廖弊彆彰徹慇"], +["ba40","愿態慷慢慣慟慚慘慵截撇摘摔撤摸摟摺摑摧搴摭摻敲斡旗旖暢暨暝榜榨榕槁榮槓構榛榷榻榫榴槐槍榭槌榦槃榣歉歌氳漳演滾漓滴漩漾漠漬漏漂漢"], +["baa1","滿滯漆漱漸漲漣漕漫漯澈漪滬漁滲滌滷熔熙煽熊熄熒爾犒犖獄獐瑤瑣瑪瑰瑭甄疑瘧瘍瘋瘉瘓盡監瞄睽睿睡磁碟碧碳碩碣禎福禍種稱窪窩竭端管箕箋筵算箝箔箏箸箇箄粹粽精綻綰綜綽綾綠緊綴網綱綺綢綿綵綸維緒緇綬"], +["bb40","罰翠翡翟聞聚肇腐膀膏膈膊腿膂臧臺與舔舞艋蓉蒿蓆蓄蒙蒞蒲蒜蓋蒸蓀蓓蒐蒼蓑蓊蜿蜜蜻蜢蜥蜴蜘蝕蜷蜩裳褂裴裹裸製裨褚裯誦誌語誣認誡誓誤"], +["bba1","說誥誨誘誑誚誧豪貍貌賓賑賒赫趙趕跼輔輒輕輓辣遠遘遜遣遙遞遢遝遛鄙鄘鄞酵酸酷酴鉸銀銅銘銖鉻銓銜銨鉼銑閡閨閩閣閥閤隙障際雌雒需靼鞅韶頗領颯颱餃餅餌餉駁骯骰髦魁魂鳴鳶鳳麼鼻齊億儀僻僵價儂儈儉儅凜"], +["bc40","劇劈劉劍劊勰厲嘮嘻嘹嘲嘿嘴嘩噓噎噗噴嘶嘯嘰墀墟增墳墜墮墩墦奭嬉嫻嬋嫵嬌嬈寮寬審寫層履嶝嶔幢幟幡廢廚廟廝廣廠彈影德徵慶慧慮慝慕憂"], +["bca1","慼慰慫慾憧憐憫憎憬憚憤憔憮戮摩摯摹撞撲撈撐撰撥撓撕撩撒撮播撫撚撬撙撢撳敵敷數暮暫暴暱樣樟槨樁樞標槽模樓樊槳樂樅槭樑歐歎殤毅毆漿潼澄潑潦潔澆潭潛潸潮澎潺潰潤澗潘滕潯潠潟熟熬熱熨牖犛獎獗瑩璋璃"], +["bd40","瑾璀畿瘠瘩瘟瘤瘦瘡瘢皚皺盤瞎瞇瞌瞑瞋磋磅確磊碾磕碼磐稿稼穀稽稷稻窯窮箭箱範箴篆篇篁箠篌糊締練緯緻緘緬緝編緣線緞緩綞緙緲緹罵罷羯"], +["bda1","翩耦膛膜膝膠膚膘蔗蔽蔚蓮蔬蔭蔓蔑蔣蔡蔔蓬蔥蓿蔆螂蝴蝶蝠蝦蝸蝨蝙蝗蝌蝓衛衝褐複褒褓褕褊誼諒談諄誕請諸課諉諂調誰論諍誶誹諛豌豎豬賠賞賦賤賬賭賢賣賜質賡赭趟趣踫踐踝踢踏踩踟踡踞躺輝輛輟輩輦輪輜輞"], +["be40","輥適遮遨遭遷鄰鄭鄧鄱醇醉醋醃鋅銻銷鋪銬鋤鋁銳銼鋒鋇鋰銲閭閱霄霆震霉靠鞍鞋鞏頡頫頜颳養餓餒餘駝駐駟駛駑駕駒駙骷髮髯鬧魅魄魷魯鴆鴉"], +["bea1","鴃麩麾黎墨齒儒儘儔儐儕冀冪凝劑劓勳噙噫噹噩噤噸噪器噥噱噯噬噢噶壁墾壇壅奮嬝嬴學寰導彊憲憑憩憊懍憶憾懊懈戰擅擁擋撻撼據擄擇擂操撿擒擔撾整曆曉暹曄曇暸樽樸樺橙橫橘樹橄橢橡橋橇樵機橈歙歷氅濂澱澡"], +["bf40","濃澤濁澧澳激澹澶澦澠澴熾燉燐燒燈燕熹燎燙燜燃燄獨璜璣璘璟璞瓢甌甍瘴瘸瘺盧盥瞠瞞瞟瞥磨磚磬磧禦積穎穆穌穋窺篙簑築篤篛篡篩篦糕糖縊"], +["bfa1","縑縈縛縣縞縝縉縐罹羲翰翱翮耨膳膩膨臻興艘艙蕊蕙蕈蕨蕩蕃蕉蕭蕪蕞螃螟螞螢融衡褪褲褥褫褡親覦諦諺諫諱謀諜諧諮諾謁謂諷諭諳諶諼豫豭貓賴蹄踱踴蹂踹踵輻輯輸輳辨辦遵遴選遲遼遺鄴醒錠錶鋸錳錯錢鋼錫錄錚"], +["c040","錐錦錡錕錮錙閻隧隨險雕霎霑霖霍霓霏靛靜靦鞘頰頸頻頷頭頹頤餐館餞餛餡餚駭駢駱骸骼髻髭鬨鮑鴕鴣鴦鴨鴒鴛默黔龍龜優償儡儲勵嚎嚀嚐嚅嚇"], +["c0a1","嚏壕壓壑壎嬰嬪嬤孺尷屨嶼嶺嶽嶸幫彌徽應懂懇懦懋戲戴擎擊擘擠擰擦擬擱擢擭斂斃曙曖檀檔檄檢檜櫛檣橾檗檐檠歜殮毚氈濘濱濟濠濛濤濫濯澀濬濡濩濕濮濰燧營燮燦燥燭燬燴燠爵牆獰獲璩環璦璨癆療癌盪瞳瞪瞰瞬"], +["c140","瞧瞭矯磷磺磴磯礁禧禪穗窿簇簍篾篷簌篠糠糜糞糢糟糙糝縮績繆縷縲繃縫總縱繅繁縴縹繈縵縿縯罄翳翼聱聲聰聯聳臆臃膺臂臀膿膽臉膾臨舉艱薪"], +["c1a1","薄蕾薜薑薔薯薛薇薨薊虧蟀蟑螳蟒蟆螫螻螺蟈蟋褻褶襄褸褽覬謎謗謙講謊謠謝謄謐豁谿豳賺賽購賸賻趨蹉蹋蹈蹊轄輾轂轅輿避遽還邁邂邀鄹醣醞醜鍍鎂錨鍵鍊鍥鍋錘鍾鍬鍛鍰鍚鍔闊闋闌闈闆隱隸雖霜霞鞠韓顆颶餵騁"], +["c240","駿鮮鮫鮪鮭鴻鴿麋黏點黜黝黛鼾齋叢嚕嚮壙壘嬸彝懣戳擴擲擾攆擺擻擷斷曜朦檳檬櫃檻檸櫂檮檯歟歸殯瀉瀋濾瀆濺瀑瀏燻燼燾燸獷獵璧璿甕癖癘"], +["c2a1","癒瞽瞿瞻瞼礎禮穡穢穠竄竅簫簧簪簞簣簡糧織繕繞繚繡繒繙罈翹翻職聶臍臏舊藏薩藍藐藉薰薺薹薦蟯蟬蟲蟠覆覲觴謨謹謬謫豐贅蹙蹣蹦蹤蹟蹕軀轉轍邇邃邈醫醬釐鎔鎊鎖鎢鎳鎮鎬鎰鎘鎚鎗闔闖闐闕離雜雙雛雞霤鞣鞦"], +["c340","鞭韹額顏題顎顓颺餾餿餽餮馥騎髁鬃鬆魏魎魍鯊鯉鯽鯈鯀鵑鵝鵠黠鼕鼬儳嚥壞壟壢寵龐廬懲懷懶懵攀攏曠曝櫥櫝櫚櫓瀛瀟瀨瀚瀝瀕瀘爆爍牘犢獸"], +["c3a1","獺璽瓊瓣疇疆癟癡矇礙禱穫穩簾簿簸簽簷籀繫繭繹繩繪羅繳羶羹羸臘藩藝藪藕藤藥藷蟻蠅蠍蟹蟾襠襟襖襞譁譜識證譚譎譏譆譙贈贊蹼蹲躇蹶蹬蹺蹴轔轎辭邊邋醱醮鏡鏑鏟鏃鏈鏜鏝鏖鏢鏍鏘鏤鏗鏨關隴難霪霧靡韜韻類"], +["c440","願顛颼饅饉騖騙鬍鯨鯧鯖鯛鶉鵡鵲鵪鵬麒麗麓麴勸嚨嚷嚶嚴嚼壤孀孃孽寶巉懸懺攘攔攙曦朧櫬瀾瀰瀲爐獻瓏癢癥礦礪礬礫竇競籌籃籍糯糰辮繽繼"], +["c4a1","纂罌耀臚艦藻藹蘑藺蘆蘋蘇蘊蠔蠕襤覺觸議譬警譯譟譫贏贍躉躁躅躂醴釋鐘鐃鏽闡霰飄饒饑馨騫騰騷騵鰓鰍鹹麵黨鼯齟齣齡儷儸囁囀囂夔屬巍懼懾攝攜斕曩櫻欄櫺殲灌爛犧瓖瓔癩矓籐纏續羼蘗蘭蘚蠣蠢蠡蠟襪襬覽譴"], +["c540","護譽贓躊躍躋轟辯醺鐮鐳鐵鐺鐸鐲鐫闢霸霹露響顧顥饗驅驃驀騾髏魔魑鰭鰥鶯鶴鷂鶸麝黯鼙齜齦齧儼儻囈囊囉孿巔巒彎懿攤權歡灑灘玀瓤疊癮癬"], +["c5a1","禳籠籟聾聽臟襲襯觼讀贖贗躑躓轡酈鑄鑑鑒霽霾韃韁顫饕驕驍髒鬚鱉鰱鰾鰻鷓鷗鼴齬齪龔囌巖戀攣攫攪曬欐瓚竊籤籣籥纓纖纔臢蘸蘿蠱變邐邏鑣鑠鑤靨顯饜驚驛驗髓體髑鱔鱗鱖鷥麟黴囑壩攬灞癱癲矗罐羈蠶蠹衢讓讒"], +["c640","讖艷贛釀鑪靂靈靄韆顰驟鬢魘鱟鷹鷺鹼鹽鼇齷齲廳欖灣籬籮蠻觀躡釁鑲鑰顱饞髖鬣黌灤矚讚鑷韉驢驥纜讜躪釅鑽鑾鑼鱷鱸黷豔鑿鸚爨驪鬱鸛鸞籲"], +["c940","乂乜凵匚厂万丌乇亍囗兀屮彳丏冇与丮亓仂仉仈冘勼卬厹圠夃夬尐巿旡殳毌气爿丱丼仨仜仩仡仝仚刌匜卌圢圣夗夯宁宄尒尻屴屳帄庀庂忉戉扐氕"], +["c9a1","氶汃氿氻犮犰玊禸肊阞伎优伬仵伔仱伀价伈伝伂伅伢伓伄仴伒冱刓刉刐劦匢匟卍厊吇囡囟圮圪圴夼妀奼妅奻奾奷奿孖尕尥屼屺屻屾巟幵庄异弚彴忕忔忏扜扞扤扡扦扢扙扠扚扥旯旮朾朹朸朻机朿朼朳氘汆汒汜汏汊汔汋"], +["ca40","汌灱牞犴犵玎甪癿穵网艸艼芀艽艿虍襾邙邗邘邛邔阢阤阠阣佖伻佢佉体佤伾佧佒佟佁佘伭伳伿佡冏冹刜刞刡劭劮匉卣卲厎厏吰吷吪呔呅吙吜吥吘"], +["caa1","吽呏呁吨吤呇囮囧囥坁坅坌坉坋坒夆奀妦妘妠妗妎妢妐妏妧妡宎宒尨尪岍岏岈岋岉岒岊岆岓岕巠帊帎庋庉庌庈庍弅弝彸彶忒忑忐忭忨忮忳忡忤忣忺忯忷忻怀忴戺抃抌抎抏抔抇扱扻扺扰抁抈扷扽扲扴攷旰旴旳旲旵杅杇"], +["cb40","杙杕杌杈杝杍杚杋毐氙氚汸汧汫沄沋沏汱汯汩沚汭沇沕沜汦汳汥汻沎灴灺牣犿犽狃狆狁犺狅玕玗玓玔玒町甹疔疕皁礽耴肕肙肐肒肜芐芏芅芎芑芓"], +["cba1","芊芃芄豸迉辿邟邡邥邞邧邠阰阨阯阭丳侘佼侅佽侀侇佶佴侉侄佷佌侗佪侚佹侁佸侐侜侔侞侒侂侕佫佮冞冼冾刵刲刳剆刱劼匊匋匼厒厔咇呿咁咑咂咈呫呺呾呥呬呴呦咍呯呡呠咘呣呧呤囷囹坯坲坭坫坱坰坶垀坵坻坳坴坢"], +["cc40","坨坽夌奅妵妺姏姎妲姌姁妶妼姃姖妱妽姀姈妴姇孢孥宓宕屄屇岮岤岠岵岯岨岬岟岣岭岢岪岧岝岥岶岰岦帗帔帙弨弢弣弤彔徂彾彽忞忥怭怦怙怲怋"], +["cca1","怴怊怗怳怚怞怬怢怍怐怮怓怑怌怉怜戔戽抭抴拑抾抪抶拊抮抳抯抻抩抰抸攽斨斻昉旼昄昒昈旻昃昋昍昅旽昑昐曶朊枅杬枎枒杶杻枘枆构杴枍枌杺枟枑枙枃杽极杸杹枔欥殀歾毞氝沓泬泫泮泙沶泔沭泧沷泐泂沺泃泆泭泲"], +["cd40","泒泝沴沊沝沀泞泀洰泍泇沰泹泏泩泑炔炘炅炓炆炄炑炖炂炚炃牪狖狋狘狉狜狒狔狚狌狑玤玡玭玦玢玠玬玝瓝瓨甿畀甾疌疘皯盳盱盰盵矸矼矹矻矺"], +["cda1","矷祂礿秅穸穻竻籵糽耵肏肮肣肸肵肭舠芠苀芫芚芘芛芵芧芮芼芞芺芴芨芡芩苂芤苃芶芢虰虯虭虮豖迒迋迓迍迖迕迗邲邴邯邳邰阹阽阼阺陃俍俅俓侲俉俋俁俔俜俙侻侳俛俇俖侺俀侹俬剄剉勀勂匽卼厗厖厙厘咺咡咭咥哏"], +["ce40","哃茍咷咮哖咶哅哆咠呰咼咢咾呲哞咰垵垞垟垤垌垗垝垛垔垘垏垙垥垚垕壴复奓姡姞姮娀姱姝姺姽姼姶姤姲姷姛姩姳姵姠姾姴姭宨屌峐峘峌峗峋峛"], +["cea1","峞峚峉峇峊峖峓峔峏峈峆峎峟峸巹帡帢帣帠帤庰庤庢庛庣庥弇弮彖徆怷怹恔恲恞恅恓恇恉恛恌恀恂恟怤恄恘恦恮扂扃拏挍挋拵挎挃拫拹挏挌拸拶挀挓挔拺挕拻拰敁敃斪斿昶昡昲昵昜昦昢昳昫昺昝昴昹昮朏朐柁柲柈枺"], +["cf40","柜枻柸柘柀枷柅柫柤柟枵柍枳柷柶柮柣柂枹柎柧柰枲柼柆柭柌枮柦柛柺柉柊柃柪柋欨殂殄殶毖毘毠氠氡洨洴洭洟洼洿洒洊泚洳洄洙洺洚洑洀洝浂"], +["cfa1","洁洘洷洃洏浀洇洠洬洈洢洉洐炷炟炾炱炰炡炴炵炩牁牉牊牬牰牳牮狊狤狨狫狟狪狦狣玅珌珂珈珅玹玶玵玴珫玿珇玾珃珆玸珋瓬瓮甮畇畈疧疪癹盄眈眃眄眅眊盷盻盺矧矨砆砑砒砅砐砏砎砉砃砓祊祌祋祅祄秕种秏秖秎窀"], +["d040","穾竑笀笁籺籸籹籿粀粁紃紈紁罘羑羍羾耇耎耏耔耷胘胇胠胑胈胂胐胅胣胙胜胊胕胉胏胗胦胍臿舡芔苙苾苹茇苨茀苕茺苫苖苴苬苡苲苵茌苻苶苰苪"], +["d0a1","苤苠苺苳苭虷虴虼虳衁衎衧衪衩觓訄訇赲迣迡迮迠郱邽邿郕郅邾郇郋郈釔釓陔陏陑陓陊陎倞倅倇倓倢倰倛俵俴倳倷倬俶俷倗倜倠倧倵倯倱倎党冔冓凊凄凅凈凎剡剚剒剞剟剕剢勍匎厞唦哢唗唒哧哳哤唚哿唄唈哫唑唅哱"], +["d140","唊哻哷哸哠唎唃唋圁圂埌堲埕埒垺埆垽垼垸垶垿埇埐垹埁夎奊娙娖娭娮娕娏娗娊娞娳孬宧宭宬尃屖屔峬峿峮峱峷崀峹帩帨庨庮庪庬弳弰彧恝恚恧"], +["d1a1","恁悢悈悀悒悁悝悃悕悛悗悇悜悎戙扆拲挐捖挬捄捅挶捃揤挹捋捊挼挩捁挴捘捔捙挭捇挳捚捑挸捗捀捈敊敆旆旃旄旂晊晟晇晑朒朓栟栚桉栲栳栻桋桏栖栱栜栵栫栭栯桎桄栴栝栒栔栦栨栮桍栺栥栠欬欯欭欱欴歭肂殈毦毤"], +["d240","毨毣毢毧氥浺浣浤浶洍浡涒浘浢浭浯涑涍淯浿涆浞浧浠涗浰浼浟涂涘洯浨涋浾涀涄洖涃浻浽浵涐烜烓烑烝烋缹烢烗烒烞烠烔烍烅烆烇烚烎烡牂牸"], +["d2a1","牷牶猀狺狴狾狶狳狻猁珓珙珥珖玼珧珣珩珜珒珛珔珝珚珗珘珨瓞瓟瓴瓵甡畛畟疰痁疻痄痀疿疶疺皊盉眝眛眐眓眒眣眑眕眙眚眢眧砣砬砢砵砯砨砮砫砡砩砳砪砱祔祛祏祜祓祒祑秫秬秠秮秭秪秜秞秝窆窉窅窋窌窊窇竘笐"], +["d340","笄笓笅笏笈笊笎笉笒粄粑粊粌粈粍粅紞紝紑紎紘紖紓紟紒紏紌罜罡罞罠罝罛羖羒翃翂翀耖耾耹胺胲胹胵脁胻脀舁舯舥茳茭荄茙荑茥荖茿荁茦茜茢"], +["d3a1","荂荎茛茪茈茼荍茖茤茠茷茯茩荇荅荌荓茞茬荋茧荈虓虒蚢蚨蚖蚍蚑蚞蚇蚗蚆蚋蚚蚅蚥蚙蚡蚧蚕蚘蚎蚝蚐蚔衃衄衭衵衶衲袀衱衿衯袃衾衴衼訒豇豗豻貤貣赶赸趵趷趶軑軓迾迵适迿迻逄迼迶郖郠郙郚郣郟郥郘郛郗郜郤酐"], +["d440","酎酏釕釢釚陜陟隼飣髟鬯乿偰偪偡偞偠偓偋偝偲偈偍偁偛偊偢倕偅偟偩偫偣偤偆偀偮偳偗偑凐剫剭剬剮勖勓匭厜啵啶唼啍啐唴唪啑啢唶唵唰啒啅"], +["d4a1","唌唲啥啎唹啈唭唻啀啋圊圇埻堔埢埶埜埴堀埭埽堈埸堋埳埏堇埮埣埲埥埬埡堎埼堐埧堁堌埱埩埰堍堄奜婠婘婕婧婞娸娵婭婐婟婥婬婓婤婗婃婝婒婄婛婈媎娾婍娹婌婰婩婇婑婖婂婜孲孮寁寀屙崞崋崝崚崠崌崨崍崦崥崏"], +["d540","崰崒崣崟崮帾帴庱庴庹庲庳弶弸徛徖徟悊悐悆悾悰悺惓惔惏惤惙惝惈悱惛悷惊悿惃惍惀挲捥掊掂捽掽掞掭掝掗掫掎捯掇掐据掯捵掜捭掮捼掤挻掟"], +["d5a1","捸掅掁掑掍捰敓旍晥晡晛晙晜晢朘桹梇梐梜桭桮梮梫楖桯梣梬梩桵桴梲梏桷梒桼桫桲梪梀桱桾梛梖梋梠梉梤桸桻梑梌梊桽欶欳欷欸殑殏殍殎殌氪淀涫涴涳湴涬淩淢涷淶淔渀淈淠淟淖涾淥淜淝淛淴淊涽淭淰涺淕淂淏淉"], +["d640","淐淲淓淽淗淍淣涻烺焍烷焗烴焌烰焄烳焐烼烿焆焓焀烸烶焋焂焎牾牻牼牿猝猗猇猑猘猊猈狿猏猞玈珶珸珵琄琁珽琇琀珺珼珿琌琋珴琈畤畣痎痒痏"], +["d6a1","痋痌痑痐皏皉盓眹眯眭眱眲眴眳眽眥眻眵硈硒硉硍硊硌砦硅硐祤祧祩祪祣祫祡离秺秸秶秷窏窔窐笵筇笴笥笰笢笤笳笘笪笝笱笫笭笯笲笸笚笣粔粘粖粣紵紽紸紶紺絅紬紩絁絇紾紿絊紻紨罣羕羜羝羛翊翋翍翐翑翇翏翉耟"], +["d740","耞耛聇聃聈脘脥脙脛脭脟脬脞脡脕脧脝脢舑舸舳舺舴舲艴莐莣莨莍荺荳莤荴莏莁莕莙荵莔莩荽莃莌莝莛莪莋荾莥莯莈莗莰荿莦莇莮荶莚虙虖蚿蚷"], +["d7a1","蛂蛁蛅蚺蚰蛈蚹蚳蚸蛌蚴蚻蚼蛃蚽蚾衒袉袕袨袢袪袚袑袡袟袘袧袙袛袗袤袬袌袓袎覂觖觙觕訰訧訬訞谹谻豜豝豽貥赽赻赹趼跂趹趿跁軘軞軝軜軗軠軡逤逋逑逜逌逡郯郪郰郴郲郳郔郫郬郩酖酘酚酓酕釬釴釱釳釸釤釹釪"], +["d840","釫釷釨釮镺閆閈陼陭陫陱陯隿靪頄飥馗傛傕傔傞傋傣傃傌傎傝偨傜傒傂傇兟凔匒匑厤厧喑喨喥喭啷噅喢喓喈喏喵喁喣喒喤啽喌喦啿喕喡喎圌堩堷"], +["d8a1","堙堞堧堣堨埵塈堥堜堛堳堿堶堮堹堸堭堬堻奡媯媔媟婺媢媞婸媦婼媥媬媕媮娷媄媊媗媃媋媩婻婽媌媜媏媓媝寪寍寋寔寑寊寎尌尰崷嵃嵫嵁嵋崿崵嵑嵎嵕崳崺嵒崽崱嵙嵂崹嵉崸崼崲崶嵀嵅幄幁彘徦徥徫惉悹惌惢惎惄愔"], +["d940","惲愊愖愅惵愓惸惼惾惁愃愘愝愐惿愄愋扊掔掱掰揎揥揨揯揃撝揳揊揠揶揕揲揵摡揟掾揝揜揄揘揓揂揇揌揋揈揰揗揙攲敧敪敤敜敨敥斌斝斞斮旐旒"], +["d9a1","晼晬晻暀晱晹晪晲朁椌棓椄棜椪棬棪棱椏棖棷棫棤棶椓椐棳棡椇棌椈楰梴椑棯棆椔棸棐棽棼棨椋椊椗棎棈棝棞棦棴棑椆棔棩椕椥棇欹欻欿欼殔殗殙殕殽毰毲毳氰淼湆湇渟湉溈渼渽湅湢渫渿湁湝湳渜渳湋湀湑渻渃渮湞"], +["da40","湨湜湡渱渨湠湱湫渹渢渰湓湥渧湸湤湷湕湹湒湦渵渶湚焠焞焯烻焮焱焣焥焢焲焟焨焺焛牋牚犈犉犆犅犋猒猋猰猢猱猳猧猲猭猦猣猵猌琮琬琰琫琖"], +["daa1","琚琡琭琱琤琣琝琩琠琲瓻甯畯畬痧痚痡痦痝痟痤痗皕皒盚睆睇睄睍睅睊睎睋睌矞矬硠硤硥硜硭硱硪确硰硩硨硞硢祴祳祲祰稂稊稃稌稄窙竦竤筊笻筄筈筌筎筀筘筅粢粞粨粡絘絯絣絓絖絧絪絏絭絜絫絒絔絩絑絟絎缾缿罥"], +["db40","罦羢羠羡翗聑聏聐胾胔腃腊腒腏腇脽腍脺臦臮臷臸臹舄舼舽舿艵茻菏菹萣菀菨萒菧菤菼菶萐菆菈菫菣莿萁菝菥菘菿菡菋菎菖菵菉萉萏菞萑萆菂菳"], +["dba1","菕菺菇菑菪萓菃菬菮菄菻菗菢萛菛菾蛘蛢蛦蛓蛣蛚蛪蛝蛫蛜蛬蛩蛗蛨蛑衈衖衕袺裗袹袸裀袾袶袼袷袽袲褁裉覕覘覗觝觚觛詎詍訹詙詀詗詘詄詅詒詈詑詊詌詏豟貁貀貺貾貰貹貵趄趀趉跘跓跍跇跖跜跏跕跙跈跗跅軯軷軺"], +["dc40","軹軦軮軥軵軧軨軶軫軱軬軴軩逭逴逯鄆鄬鄄郿郼鄈郹郻鄁鄀鄇鄅鄃酡酤酟酢酠鈁鈊鈥鈃鈚鈦鈏鈌鈀鈒釿釽鈆鈄鈧鈂鈜鈤鈙鈗鈅鈖镻閍閌閐隇陾隈"], +["dca1","隉隃隀雂雈雃雱雰靬靰靮頇颩飫鳦黹亃亄亶傽傿僆傮僄僊傴僈僂傰僁傺傱僋僉傶傸凗剺剸剻剼嗃嗛嗌嗐嗋嗊嗝嗀嗔嗄嗩喿嗒喍嗏嗕嗢嗖嗈嗲嗍嗙嗂圔塓塨塤塏塍塉塯塕塎塝塙塥塛堽塣塱壼嫇嫄嫋媺媸媱媵媰媿嫈媻嫆"], +["dd40","媷嫀嫊媴媶嫍媹媐寖寘寙尟尳嵱嵣嵊嵥嵲嵬嵞嵨嵧嵢巰幏幎幊幍幋廅廌廆廋廇彀徯徭惷慉慊愫慅愶愲愮慆愯慏愩慀戠酨戣戥戤揅揱揫搐搒搉搠搤"], +["dda1","搳摃搟搕搘搹搷搢搣搌搦搰搨摁搵搯搊搚摀搥搧搋揧搛搮搡搎敯斒旓暆暌暕暐暋暊暙暔晸朠楦楟椸楎楢楱椿楅楪椹楂楗楙楺楈楉椵楬椳椽楥棰楸椴楩楀楯楄楶楘楁楴楌椻楋椷楜楏楑椲楒椯楻椼歆歅歃歂歈歁殛嗀毻毼"], +["de40","毹毷毸溛滖滈溏滀溟溓溔溠溱溹滆滒溽滁溞滉溷溰滍溦滏溲溾滃滜滘溙溒溎溍溤溡溿溳滐滊溗溮溣煇煔煒煣煠煁煝煢煲煸煪煡煂煘煃煋煰煟煐煓"], +["dea1","煄煍煚牏犍犌犑犐犎猼獂猻猺獀獊獉瑄瑊瑋瑒瑑瑗瑀瑏瑐瑎瑂瑆瑍瑔瓡瓿瓾瓽甝畹畷榃痯瘏瘃痷痾痼痹痸瘐痻痶痭痵痽皙皵盝睕睟睠睒睖睚睩睧睔睙睭矠碇碚碔碏碄碕碅碆碡碃硹碙碀碖硻祼禂祽祹稑稘稙稒稗稕稢稓"], +["df40","稛稐窣窢窞竫筦筤筭筴筩筲筥筳筱筰筡筸筶筣粲粴粯綈綆綀綍絿綅絺綎絻綃絼綌綔綄絽綒罭罫罧罨罬羦羥羧翛翜耡腤腠腷腜腩腛腢腲朡腞腶腧腯"], +["dfa1","腄腡舝艉艄艀艂艅蓱萿葖葶葹蒏蒍葥葑葀蒆葧萰葍葽葚葙葴葳葝蔇葞萷萺萴葺葃葸萲葅萩菙葋萯葂萭葟葰萹葎葌葒葯蓅蒎萻葇萶萳葨葾葄萫葠葔葮葐蜋蜄蛷蜌蛺蛖蛵蝍蛸蜎蜉蜁蛶蜍蜅裖裋裍裎裞裛裚裌裐覅覛觟觥觤"], +["e040","觡觠觢觜触詶誆詿詡訿詷誂誄詵誃誁詴詺谼豋豊豥豤豦貆貄貅賌赨赩趑趌趎趏趍趓趔趐趒跰跠跬跱跮跐跩跣跢跧跲跫跴輆軿輁輀輅輇輈輂輋遒逿"], +["e0a1","遄遉逽鄐鄍鄏鄑鄖鄔鄋鄎酮酯鉈鉒鈰鈺鉦鈳鉥鉞銃鈮鉊鉆鉭鉬鉏鉠鉧鉯鈶鉡鉰鈱鉔鉣鉐鉲鉎鉓鉌鉖鈲閟閜閞閛隒隓隑隗雎雺雽雸雵靳靷靸靲頏頍頎颬飶飹馯馲馰馵骭骫魛鳪鳭鳧麀黽僦僔僗僨僳僛僪僝僤僓僬僰僯僣僠"], +["e140","凘劀劁勩勫匰厬嘧嘕嘌嘒嗼嘏嘜嘁嘓嘂嗺嘝嘄嗿嗹墉塼墐墘墆墁塿塴墋塺墇墑墎塶墂墈塻墔墏壾奫嫜嫮嫥嫕嫪嫚嫭嫫嫳嫢嫠嫛嫬嫞嫝嫙嫨嫟孷寠"], +["e1a1","寣屣嶂嶀嵽嶆嵺嶁嵷嶊嶉嶈嵾嵼嶍嵹嵿幘幙幓廘廑廗廎廜廕廙廒廔彄彃彯徶愬愨慁慞慱慳慒慓慲慬憀慴慔慺慛慥愻慪慡慖戩戧戫搫摍摛摝摴摶摲摳摽摵摦撦摎撂摞摜摋摓摠摐摿搿摬摫摙摥摷敳斠暡暠暟朅朄朢榱榶槉"], +["e240","榠槎榖榰榬榼榑榙榎榧榍榩榾榯榿槄榽榤槔榹槊榚槏榳榓榪榡榞槙榗榐槂榵榥槆歊歍歋殞殟殠毃毄毾滎滵滱漃漥滸漷滻漮漉潎漙漚漧漘漻漒滭漊"], +["e2a1","漶潳滹滮漭潀漰漼漵滫漇漎潃漅滽滶漹漜滼漺漟漍漞漈漡熇熐熉熀熅熂熏煻熆熁熗牄牓犗犕犓獃獍獑獌瑢瑳瑱瑵瑲瑧瑮甀甂甃畽疐瘖瘈瘌瘕瘑瘊瘔皸瞁睼瞅瞂睮瞀睯睾瞃碲碪碴碭碨硾碫碞碥碠碬碢碤禘禊禋禖禕禔禓"], +["e340","禗禈禒禐稫穊稰稯稨稦窨窫窬竮箈箜箊箑箐箖箍箌箛箎箅箘劄箙箤箂粻粿粼粺綧綷緂綣綪緁緀緅綝緎緄緆緋緌綯綹綖綼綟綦綮綩綡緉罳翢翣翥翞"], +["e3a1","耤聝聜膉膆膃膇膍膌膋舕蒗蒤蒡蒟蒺蓎蓂蒬蒮蒫蒹蒴蓁蓍蒪蒚蒱蓐蒝蒧蒻蒢蒔蓇蓌蒛蒩蒯蒨蓖蒘蒶蓏蒠蓗蓔蓒蓛蒰蒑虡蜳蜣蜨蝫蝀蜮蜞蜡蜙蜛蝃蜬蝁蜾蝆蜠蜲蜪蜭蜼蜒蜺蜱蜵蝂蜦蜧蜸蜤蜚蜰蜑裷裧裱裲裺裾裮裼裶裻"], +["e440","裰裬裫覝覡覟覞觩觫觨誫誙誋誒誏誖谽豨豩賕賏賗趖踉踂跿踍跽踊踃踇踆踅跾踀踄輐輑輎輍鄣鄜鄠鄢鄟鄝鄚鄤鄡鄛酺酲酹酳銥銤鉶銛鉺銠銔銪銍"], +["e4a1","銦銚銫鉹銗鉿銣鋮銎銂銕銢鉽銈銡銊銆銌銙銧鉾銇銩銝銋鈭隞隡雿靘靽靺靾鞃鞀鞂靻鞄鞁靿韎韍頖颭颮餂餀餇馝馜駃馹馻馺駂馽駇骱髣髧鬾鬿魠魡魟鳱鳲鳵麧僿儃儰僸儆儇僶僾儋儌僽儊劋劌勱勯噈噂噌嘵噁噊噉噆噘"], +["e540","噚噀嘳嘽嘬嘾嘸嘪嘺圚墫墝墱墠墣墯墬墥墡壿嫿嫴嫽嫷嫶嬃嫸嬂嫹嬁嬇嬅嬏屧嶙嶗嶟嶒嶢嶓嶕嶠嶜嶡嶚嶞幩幝幠幜緳廛廞廡彉徲憋憃慹憱憰憢憉"], +["e5a1","憛憓憯憭憟憒憪憡憍慦憳戭摮摰撖撠撅撗撜撏撋撊撌撣撟摨撱撘敶敺敹敻斲斳暵暰暩暲暷暪暯樀樆樗槥槸樕槱槤樠槿槬槢樛樝槾樧槲槮樔槷槧橀樈槦槻樍槼槫樉樄樘樥樏槶樦樇槴樖歑殥殣殢殦氁氀毿氂潁漦潾澇濆澒"], +["e640","澍澉澌潢潏澅潚澖潶潬澂潕潲潒潐潗澔澓潝漀潡潫潽潧澐潓澋潩潿澕潣潷潪潻熲熯熛熰熠熚熩熵熝熥熞熤熡熪熜熧熳犘犚獘獒獞獟獠獝獛獡獚獙"], +["e6a1","獢璇璉璊璆璁瑽璅璈瑼瑹甈甇畾瘥瘞瘙瘝瘜瘣瘚瘨瘛皜皝皞皛瞍瞏瞉瞈磍碻磏磌磑磎磔磈磃磄磉禚禡禠禜禢禛歶稹窲窴窳箷篋箾箬篎箯箹篊箵糅糈糌糋緷緛緪緧緗緡縃緺緦緶緱緰緮緟罶羬羰羭翭翫翪翬翦翨聤聧膣膟"], +["e740","膞膕膢膙膗舖艏艓艒艐艎艑蔤蔻蔏蔀蔩蔎蔉蔍蔟蔊蔧蔜蓻蔫蓺蔈蔌蓴蔪蓲蔕蓷蓫蓳蓼蔒蓪蓩蔖蓾蔨蔝蔮蔂蓽蔞蓶蔱蔦蓧蓨蓰蓯蓹蔘蔠蔰蔋蔙蔯虢"], +["e7a1","蝖蝣蝤蝷蟡蝳蝘蝔蝛蝒蝡蝚蝑蝞蝭蝪蝐蝎蝟蝝蝯蝬蝺蝮蝜蝥蝏蝻蝵蝢蝧蝩衚褅褌褔褋褗褘褙褆褖褑褎褉覢覤覣觭觰觬諏諆誸諓諑諔諕誻諗誾諀諅諘諃誺誽諙谾豍貏賥賟賙賨賚賝賧趠趜趡趛踠踣踥踤踮踕踛踖踑踙踦踧"], +["e840","踔踒踘踓踜踗踚輬輤輘輚輠輣輖輗遳遰遯遧遫鄯鄫鄩鄪鄲鄦鄮醅醆醊醁醂醄醀鋐鋃鋄鋀鋙銶鋏鋱鋟鋘鋩鋗鋝鋌鋯鋂鋨鋊鋈鋎鋦鋍鋕鋉鋠鋞鋧鋑鋓"], +["e8a1","銵鋡鋆銴镼閬閫閮閰隤隢雓霅霈霂靚鞊鞎鞈韐韏頞頝頦頩頨頠頛頧颲餈飺餑餔餖餗餕駜駍駏駓駔駎駉駖駘駋駗駌骳髬髫髳髲髱魆魃魧魴魱魦魶魵魰魨魤魬鳼鳺鳽鳿鳷鴇鴀鳹鳻鴈鴅鴄麃黓鼏鼐儜儓儗儚儑凞匴叡噰噠噮"], +["e940","噳噦噣噭噲噞噷圜圛壈墽壉墿墺壂墼壆嬗嬙嬛嬡嬔嬓嬐嬖嬨嬚嬠嬞寯嶬嶱嶩嶧嶵嶰嶮嶪嶨嶲嶭嶯嶴幧幨幦幯廩廧廦廨廥彋徼憝憨憖懅憴懆懁懌憺"], +["e9a1","憿憸憌擗擖擐擏擉撽撉擃擛擳擙攳敿敼斢曈暾曀曊曋曏暽暻暺曌朣樴橦橉橧樲橨樾橝橭橶橛橑樨橚樻樿橁橪橤橐橏橔橯橩橠樼橞橖橕橍橎橆歕歔歖殧殪殫毈毇氄氃氆澭濋澣濇澼濎濈潞濄澽澞濊澨瀄澥澮澺澬澪濏澿澸"], +["ea40","澢濉澫濍澯澲澰燅燂熿熸燖燀燁燋燔燊燇燏熽燘熼燆燚燛犝犞獩獦獧獬獥獫獪瑿璚璠璔璒璕璡甋疀瘯瘭瘱瘽瘳瘼瘵瘲瘰皻盦瞚瞝瞡瞜瞛瞢瞣瞕瞙"], +["eaa1","瞗磝磩磥磪磞磣磛磡磢磭磟磠禤穄穈穇窶窸窵窱窷篞篣篧篝篕篥篚篨篹篔篪篢篜篫篘篟糒糔糗糐糑縒縡縗縌縟縠縓縎縜縕縚縢縋縏縖縍縔縥縤罃罻罼罺羱翯耪耩聬膱膦膮膹膵膫膰膬膴膲膷膧臲艕艖艗蕖蕅蕫蕍蕓蕡蕘"], +["eb40","蕀蕆蕤蕁蕢蕄蕑蕇蕣蔾蕛蕱蕎蕮蕵蕕蕧蕠薌蕦蕝蕔蕥蕬虣虥虤螛螏螗螓螒螈螁螖螘蝹螇螣螅螐螑螝螄螔螜螚螉褞褦褰褭褮褧褱褢褩褣褯褬褟觱諠"], +["eba1","諢諲諴諵諝謔諤諟諰諈諞諡諨諿諯諻貑貒貐賵賮賱賰賳赬赮趥趧踳踾踸蹀蹅踶踼踽蹁踰踿躽輶輮輵輲輹輷輴遶遹遻邆郺鄳鄵鄶醓醐醑醍醏錧錞錈錟錆錏鍺錸錼錛錣錒錁鍆錭錎錍鋋錝鋺錥錓鋹鋷錴錂錤鋿錩錹錵錪錔錌"], +["ec40","錋鋾錉錀鋻錖閼闍閾閹閺閶閿閵閽隩雔霋霒霐鞙鞗鞔韰韸頵頯頲餤餟餧餩馞駮駬駥駤駰駣駪駩駧骹骿骴骻髶髺髹髷鬳鮀鮅鮇魼魾魻鮂鮓鮒鮐魺鮕"], +["eca1","魽鮈鴥鴗鴠鴞鴔鴩鴝鴘鴢鴐鴙鴟麈麆麇麮麭黕黖黺鼒鼽儦儥儢儤儠儩勴嚓嚌嚍嚆嚄嚃噾嚂噿嚁壖壔壏壒嬭嬥嬲嬣嬬嬧嬦嬯嬮孻寱寲嶷幬幪徾徻懃憵憼懧懠懥懤懨懞擯擩擣擫擤擨斁斀斶旚曒檍檖檁檥檉檟檛檡檞檇檓檎"], +["ed40","檕檃檨檤檑橿檦檚檅檌檒歛殭氉濌澩濴濔濣濜濭濧濦濞濲濝濢濨燡燱燨燲燤燰燢獳獮獯璗璲璫璐璪璭璱璥璯甐甑甒甏疄癃癈癉癇皤盩瞵瞫瞲瞷瞶"], +["eda1","瞴瞱瞨矰磳磽礂磻磼磲礅磹磾礄禫禨穜穛穖穘穔穚窾竀竁簅簏篲簀篿篻簎篴簋篳簂簉簃簁篸篽簆篰篱簐簊糨縭縼繂縳顈縸縪繉繀繇縩繌縰縻縶繄縺罅罿罾罽翴翲耬膻臄臌臊臅臇膼臩艛艚艜薃薀薏薧薕薠薋薣蕻薤薚薞"], +["ee40","蕷蕼薉薡蕺蕸蕗薎薖薆薍薙薝薁薢薂薈薅蕹蕶薘薐薟虨螾螪螭蟅螰螬螹螵螼螮蟉蟃蟂蟌螷螯蟄蟊螴螶螿螸螽蟞螲褵褳褼褾襁襒褷襂覭覯覮觲觳謞"], +["eea1","謘謖謑謅謋謢謏謒謕謇謍謈謆謜謓謚豏豰豲豱豯貕貔賹赯蹎蹍蹓蹐蹌蹇轃轀邅遾鄸醚醢醛醙醟醡醝醠鎡鎃鎯鍤鍖鍇鍼鍘鍜鍶鍉鍐鍑鍠鍭鎏鍌鍪鍹鍗鍕鍒鍏鍱鍷鍻鍡鍞鍣鍧鎀鍎鍙闇闀闉闃闅閷隮隰隬霠霟霘霝霙鞚鞡鞜"], +["ef40","鞞鞝韕韔韱顁顄顊顉顅顃餥餫餬餪餳餲餯餭餱餰馘馣馡騂駺駴駷駹駸駶駻駽駾駼騃骾髾髽鬁髼魈鮚鮨鮞鮛鮦鮡鮥鮤鮆鮢鮠鮯鴳鵁鵧鴶鴮鴯鴱鴸鴰"], +["efa1","鵅鵂鵃鴾鴷鵀鴽翵鴭麊麉麍麰黈黚黻黿鼤鼣鼢齔龠儱儭儮嚘嚜嚗嚚嚝嚙奰嬼屩屪巀幭幮懘懟懭懮懱懪懰懫懖懩擿攄擽擸攁攃擼斔旛曚曛曘櫅檹檽櫡櫆檺檶檷櫇檴檭歞毉氋瀇瀌瀍瀁瀅瀔瀎濿瀀濻瀦濼濷瀊爁燿燹爃燽獶"], +["f040","璸瓀璵瓁璾璶璻瓂甔甓癜癤癙癐癓癗癚皦皽盬矂瞺磿礌礓礔礉礐礒礑禭禬穟簜簩簙簠簟簭簝簦簨簢簥簰繜繐繖繣繘繢繟繑繠繗繓羵羳翷翸聵臑臒"], +["f0a1","臐艟艞薴藆藀藃藂薳薵薽藇藄薿藋藎藈藅薱薶藒蘤薸薷薾虩蟧蟦蟢蟛蟫蟪蟥蟟蟳蟤蟔蟜蟓蟭蟘蟣螤蟗蟙蠁蟴蟨蟝襓襋襏襌襆襐襑襉謪謧謣謳謰謵譇謯謼謾謱謥謷謦謶謮謤謻謽謺豂豵貙貘貗賾贄贂贀蹜蹢蹠蹗蹖蹞蹥蹧"], +["f140","蹛蹚蹡蹝蹩蹔轆轇轈轋鄨鄺鄻鄾醨醥醧醯醪鎵鎌鎒鎷鎛鎝鎉鎧鎎鎪鎞鎦鎕鎈鎙鎟鎍鎱鎑鎲鎤鎨鎴鎣鎥闒闓闑隳雗雚巂雟雘雝霣霢霥鞬鞮鞨鞫鞤鞪"], +["f1a1","鞢鞥韗韙韖韘韺顐顑顒颸饁餼餺騏騋騉騍騄騑騊騅騇騆髀髜鬈鬄鬅鬩鬵魊魌魋鯇鯆鯃鮿鯁鮵鮸鯓鮶鯄鮹鮽鵜鵓鵏鵊鵛鵋鵙鵖鵌鵗鵒鵔鵟鵘鵚麎麌黟鼁鼀鼖鼥鼫鼪鼩鼨齌齕儴儵劖勷厴嚫嚭嚦嚧嚪嚬壚壝壛夒嬽嬾嬿巃幰"], +["f240","徿懻攇攐攍攉攌攎斄旞旝曞櫧櫠櫌櫑櫙櫋櫟櫜櫐櫫櫏櫍櫞歠殰氌瀙瀧瀠瀖瀫瀡瀢瀣瀩瀗瀤瀜瀪爌爊爇爂爅犥犦犤犣犡瓋瓅璷瓃甖癠矉矊矄矱礝礛"], +["f2a1","礡礜礗礞禰穧穨簳簼簹簬簻糬糪繶繵繸繰繷繯繺繲繴繨罋罊羃羆羷翽翾聸臗臕艤艡艣藫藱藭藙藡藨藚藗藬藲藸藘藟藣藜藑藰藦藯藞藢蠀蟺蠃蟶蟷蠉蠌蠋蠆蟼蠈蟿蠊蠂襢襚襛襗襡襜襘襝襙覈覷覶觶譐譈譊譀譓譖譔譋譕"], +["f340","譑譂譒譗豃豷豶貚贆贇贉趬趪趭趫蹭蹸蹳蹪蹯蹻軂轒轑轏轐轓辴酀鄿醰醭鏞鏇鏏鏂鏚鏐鏹鏬鏌鏙鎩鏦鏊鏔鏮鏣鏕鏄鏎鏀鏒鏧镽闚闛雡霩霫霬霨霦"], +["f3a1","鞳鞷鞶韝韞韟顜顙顝顗颿颽颻颾饈饇饃馦馧騚騕騥騝騤騛騢騠騧騣騞騜騔髂鬋鬊鬎鬌鬷鯪鯫鯠鯞鯤鯦鯢鯰鯔鯗鯬鯜鯙鯥鯕鯡鯚鵷鶁鶊鶄鶈鵱鶀鵸鶆鶋鶌鵽鵫鵴鵵鵰鵩鶅鵳鵻鶂鵯鵹鵿鶇鵨麔麑黀黼鼭齀齁齍齖齗齘匷嚲"], +["f440","嚵嚳壣孅巆巇廮廯忀忁懹攗攖攕攓旟曨曣曤櫳櫰櫪櫨櫹櫱櫮櫯瀼瀵瀯瀷瀴瀱灂瀸瀿瀺瀹灀瀻瀳灁爓爔犨獽獼璺皫皪皾盭矌矎矏矍矲礥礣礧礨礤礩"], +["f4a1","禲穮穬穭竷籉籈籊籇籅糮繻繾纁纀羺翿聹臛臙舋艨艩蘢藿蘁藾蘛蘀藶蘄蘉蘅蘌藽蠙蠐蠑蠗蠓蠖襣襦覹觷譠譪譝譨譣譥譧譭趮躆躈躄轙轖轗轕轘轚邍酃酁醷醵醲醳鐋鐓鏻鐠鐏鐔鏾鐕鐐鐨鐙鐍鏵鐀鏷鐇鐎鐖鐒鏺鐉鏸鐊鏿"], +["f540","鏼鐌鏶鐑鐆闞闠闟霮霯鞹鞻韽韾顠顢顣顟飁飂饐饎饙饌饋饓騲騴騱騬騪騶騩騮騸騭髇髊髆鬐鬒鬑鰋鰈鯷鰅鰒鯸鱀鰇鰎鰆鰗鰔鰉鶟鶙鶤鶝鶒鶘鶐鶛"], +["f5a1","鶠鶔鶜鶪鶗鶡鶚鶢鶨鶞鶣鶿鶩鶖鶦鶧麙麛麚黥黤黧黦鼰鼮齛齠齞齝齙龑儺儹劘劗囃嚽嚾孈孇巋巏廱懽攛欂櫼欃櫸欀灃灄灊灈灉灅灆爝爚爙獾甗癪矐礭礱礯籔籓糲纊纇纈纋纆纍罍羻耰臝蘘蘪蘦蘟蘣蘜蘙蘧蘮蘡蘠蘩蘞蘥"], +["f640","蠩蠝蠛蠠蠤蠜蠫衊襭襩襮襫觺譹譸譅譺譻贐贔趯躎躌轞轛轝酆酄酅醹鐿鐻鐶鐩鐽鐼鐰鐹鐪鐷鐬鑀鐱闥闤闣霵霺鞿韡顤飉飆飀饘饖騹騽驆驄驂驁騺"], +["f6a1","騿髍鬕鬗鬘鬖鬺魒鰫鰝鰜鰬鰣鰨鰩鰤鰡鶷鶶鶼鷁鷇鷊鷏鶾鷅鷃鶻鶵鷎鶹鶺鶬鷈鶱鶭鷌鶳鷍鶲鹺麜黫黮黭鼛鼘鼚鼱齎齥齤龒亹囆囅囋奱孋孌巕巑廲攡攠攦攢欋欈欉氍灕灖灗灒爞爟犩獿瓘瓕瓙瓗癭皭礵禴穰穱籗籜籙籛籚"], +["f740","糴糱纑罏羇臞艫蘴蘵蘳蘬蘲蘶蠬蠨蠦蠪蠥襱覿覾觻譾讄讂讆讅譿贕躕躔躚躒躐躖躗轠轢酇鑌鑐鑊鑋鑏鑇鑅鑈鑉鑆霿韣顪顩飋饔饛驎驓驔驌驏驈驊"], +["f7a1","驉驒驐髐鬙鬫鬻魖魕鱆鱈鰿鱄鰹鰳鱁鰼鰷鰴鰲鰽鰶鷛鷒鷞鷚鷋鷐鷜鷑鷟鷩鷙鷘鷖鷵鷕鷝麶黰鼵鼳鼲齂齫龕龢儽劙壨壧奲孍巘蠯彏戁戃戄攩攥斖曫欑欒欏毊灛灚爢玂玁玃癰矔籧籦纕艬蘺虀蘹蘼蘱蘻蘾蠰蠲蠮蠳襶襴襳觾"], +["f840","讌讎讋讈豅贙躘轤轣醼鑢鑕鑝鑗鑞韄韅頀驖驙鬞鬟鬠鱒鱘鱐鱊鱍鱋鱕鱙鱌鱎鷻鷷鷯鷣鷫鷸鷤鷶鷡鷮鷦鷲鷰鷢鷬鷴鷳鷨鷭黂黐黲黳鼆鼜鼸鼷鼶齃齏"], +["f8a1","齱齰齮齯囓囍孎屭攭曭曮欓灟灡灝灠爣瓛瓥矕礸禷禶籪纗羉艭虃蠸蠷蠵衋讔讕躞躟躠躝醾醽釂鑫鑨鑩雥靆靃靇韇韥驞髕魙鱣鱧鱦鱢鱞鱠鸂鷾鸇鸃鸆鸅鸀鸁鸉鷿鷽鸄麠鼞齆齴齵齶囔攮斸欘欙欗欚灢爦犪矘矙礹籩籫糶纚"], +["f940","纘纛纙臠臡虆虇虈襹襺襼襻觿讘讙躥躤躣鑮鑭鑯鑱鑳靉顲饟鱨鱮鱭鸋鸍鸐鸏鸒鸑麡黵鼉齇齸齻齺齹圞灦籯蠼趲躦釃鑴鑸鑶鑵驠鱴鱳鱱鱵鸔鸓黶鼊"], +["f9a1","龤灨灥糷虪蠾蠽蠿讞貜躩軉靋顳顴飌饡馫驤驦驧鬤鸕鸗齈戇欞爧虌躨钂钀钁驩驨鬮鸙爩虋讟钃鱹麷癵驫鱺鸝灩灪麤齾齉龘碁銹裏墻恒粧嫺╔╦╗╠╬╣╚╩╝╒╤╕╞╪╡╘╧╛╓╥╖╟╫╢╙╨╜║═╭╮╰╯▓"] +] diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/tables/eucjp.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/tables/eucjp.json new file mode 100644 index 0000000000000000000000000000000000000000..4fa61ca116009efc18ecbd1531538f31234ad103 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/tables/eucjp.json @@ -0,0 +1,182 @@ +[ +["0","\u0000",127], +["8ea1","。",62], +["a1a1"," 、。,.・:;?!゛゜´`¨^ ̄_ヽヾゝゞ〃仝々〆〇ー―‐/\~∥|…‥‘’“”()〔〕[]{}〈",9,"+-±×÷=≠<>≦≧∞∴♂♀°′″℃¥$¢£%#&*@§☆★○●◎◇"], +["a2a1","◆□■△▲▽▼※〒→←↑↓〓"], +["a2ba","∈∋⊆⊇⊂⊃∪∩"], +["a2ca","∧∨¬⇒⇔∀∃"], +["a2dc","∠⊥⌒∂∇≡≒≪≫√∽∝∵∫∬"], +["a2f2","ʼn♯♭♪†‡¶"], +["a2fe","◯"], +["a3b0","0",9], +["a3c1","A",25], +["a3e1","a",25], +["a4a1","ぁ",82], +["a5a1","ァ",85], +["a6a1","Α",16,"Σ",6], +["a6c1","α",16,"σ",6], +["a7a1","А",5,"ЁЖ",25], +["a7d1","а",5,"ёж",25], +["a8a1","─│┌┐┘└├┬┤┴┼━┃┏┓┛┗┣┳┫┻╋┠┯┨┷┿┝┰┥┸╂"], +["ada1","①",19,"Ⅰ",9], +["adc0","㍉㌔㌢㍍㌘㌧㌃㌶㍑㍗㌍㌦㌣㌫㍊㌻㎜㎝㎞㎎㎏㏄㎡"], +["addf","㍻〝〟№㏍℡㊤",4,"㈱㈲㈹㍾㍽㍼≒≡∫∮∑√⊥∠∟⊿∵∩∪"], +["b0a1","亜唖娃阿哀愛挨姶逢葵茜穐悪握渥旭葦芦鯵梓圧斡扱宛姐虻飴絢綾鮎或粟袷安庵按暗案闇鞍杏以伊位依偉囲夷委威尉惟意慰易椅為畏異移維緯胃萎衣謂違遺医井亥域育郁磯一壱溢逸稲茨芋鰯允印咽員因姻引飲淫胤蔭"], +["b1a1","院陰隠韻吋右宇烏羽迂雨卯鵜窺丑碓臼渦嘘唄欝蔚鰻姥厩浦瓜閏噂云運雲荏餌叡営嬰影映曳栄永泳洩瑛盈穎頴英衛詠鋭液疫益駅悦謁越閲榎厭円園堰奄宴延怨掩援沿演炎焔煙燕猿縁艶苑薗遠鉛鴛塩於汚甥凹央奥往応"], +["b2a1","押旺横欧殴王翁襖鴬鴎黄岡沖荻億屋憶臆桶牡乙俺卸恩温穏音下化仮何伽価佳加可嘉夏嫁家寡科暇果架歌河火珂禍禾稼箇花苛茄荷華菓蝦課嘩貨迦過霞蚊俄峨我牙画臥芽蛾賀雅餓駕介会解回塊壊廻快怪悔恢懐戒拐改"], +["b3a1","魁晦械海灰界皆絵芥蟹開階貝凱劾外咳害崖慨概涯碍蓋街該鎧骸浬馨蛙垣柿蛎鈎劃嚇各廓拡撹格核殻獲確穫覚角赫較郭閣隔革学岳楽額顎掛笠樫橿梶鰍潟割喝恰括活渇滑葛褐轄且鰹叶椛樺鞄株兜竃蒲釜鎌噛鴨栢茅萱"], +["b4a1","粥刈苅瓦乾侃冠寒刊勘勧巻喚堪姦完官寛干幹患感慣憾換敢柑桓棺款歓汗漢澗潅環甘監看竿管簡緩缶翰肝艦莞観諌貫還鑑間閑関陥韓館舘丸含岸巌玩癌眼岩翫贋雁頑顔願企伎危喜器基奇嬉寄岐希幾忌揮机旗既期棋棄"], +["b5a1","機帰毅気汽畿祈季稀紀徽規記貴起軌輝飢騎鬼亀偽儀妓宜戯技擬欺犠疑祇義蟻誼議掬菊鞠吉吃喫桔橘詰砧杵黍却客脚虐逆丘久仇休及吸宮弓急救朽求汲泣灸球究窮笈級糾給旧牛去居巨拒拠挙渠虚許距鋸漁禦魚亨享京"], +["b6a1","供侠僑兇競共凶協匡卿叫喬境峡強彊怯恐恭挟教橋況狂狭矯胸脅興蕎郷鏡響饗驚仰凝尭暁業局曲極玉桐粁僅勤均巾錦斤欣欽琴禁禽筋緊芹菌衿襟謹近金吟銀九倶句区狗玖矩苦躯駆駈駒具愚虞喰空偶寓遇隅串櫛釧屑屈"], +["b7a1","掘窟沓靴轡窪熊隈粂栗繰桑鍬勲君薫訓群軍郡卦袈祁係傾刑兄啓圭珪型契形径恵慶慧憩掲携敬景桂渓畦稽系経継繋罫茎荊蛍計詣警軽頚鶏芸迎鯨劇戟撃激隙桁傑欠決潔穴結血訣月件倹倦健兼券剣喧圏堅嫌建憲懸拳捲"], +["b8a1","検権牽犬献研硯絹県肩見謙賢軒遣鍵険顕験鹸元原厳幻弦減源玄現絃舷言諺限乎個古呼固姑孤己庫弧戸故枯湖狐糊袴股胡菰虎誇跨鈷雇顧鼓五互伍午呉吾娯後御悟梧檎瑚碁語誤護醐乞鯉交佼侯候倖光公功効勾厚口向"], +["b9a1","后喉坑垢好孔孝宏工巧巷幸広庚康弘恒慌抗拘控攻昂晃更杭校梗構江洪浩港溝甲皇硬稿糠紅紘絞綱耕考肯肱腔膏航荒行衡講貢購郊酵鉱砿鋼閤降項香高鴻剛劫号合壕拷濠豪轟麹克刻告国穀酷鵠黒獄漉腰甑忽惚骨狛込"], +["baa1","此頃今困坤墾婚恨懇昏昆根梱混痕紺艮魂些佐叉唆嵯左差査沙瑳砂詐鎖裟坐座挫債催再最哉塞妻宰彩才採栽歳済災采犀砕砦祭斎細菜裁載際剤在材罪財冴坂阪堺榊肴咲崎埼碕鷺作削咋搾昨朔柵窄策索錯桜鮭笹匙冊刷"], +["bba1","察拶撮擦札殺薩雑皐鯖捌錆鮫皿晒三傘参山惨撒散桟燦珊産算纂蚕讃賛酸餐斬暫残仕仔伺使刺司史嗣四士始姉姿子屍市師志思指支孜斯施旨枝止死氏獅祉私糸紙紫肢脂至視詞詩試誌諮資賜雌飼歯事似侍児字寺慈持時"], +["bca1","次滋治爾璽痔磁示而耳自蒔辞汐鹿式識鴫竺軸宍雫七叱執失嫉室悉湿漆疾質実蔀篠偲柴芝屡蕊縞舎写射捨赦斜煮社紗者謝車遮蛇邪借勺尺杓灼爵酌釈錫若寂弱惹主取守手朱殊狩珠種腫趣酒首儒受呪寿授樹綬需囚収周"], +["bda1","宗就州修愁拾洲秀秋終繍習臭舟蒐衆襲讐蹴輯週酋酬集醜什住充十従戎柔汁渋獣縦重銃叔夙宿淑祝縮粛塾熟出術述俊峻春瞬竣舜駿准循旬楯殉淳準潤盾純巡遵醇順処初所暑曙渚庶緒署書薯藷諸助叙女序徐恕鋤除傷償"], +["bea1","勝匠升召哨商唱嘗奨妾娼宵将小少尚庄床廠彰承抄招掌捷昇昌昭晶松梢樟樵沼消渉湘焼焦照症省硝礁祥称章笑粧紹肖菖蒋蕉衝裳訟証詔詳象賞醤鉦鍾鐘障鞘上丈丞乗冗剰城場壌嬢常情擾条杖浄状畳穣蒸譲醸錠嘱埴飾"], +["bfa1","拭植殖燭織職色触食蝕辱尻伸信侵唇娠寝審心慎振新晋森榛浸深申疹真神秦紳臣芯薪親診身辛進針震人仁刃塵壬尋甚尽腎訊迅陣靭笥諏須酢図厨逗吹垂帥推水炊睡粋翠衰遂酔錐錘随瑞髄崇嵩数枢趨雛据杉椙菅頗雀裾"], +["c0a1","澄摺寸世瀬畝是凄制勢姓征性成政整星晴棲栖正清牲生盛精聖声製西誠誓請逝醒青静斉税脆隻席惜戚斥昔析石積籍績脊責赤跡蹟碩切拙接摂折設窃節説雪絶舌蝉仙先千占宣専尖川戦扇撰栓栴泉浅洗染潜煎煽旋穿箭線"], +["c1a1","繊羨腺舛船薦詮賎践選遷銭銑閃鮮前善漸然全禅繕膳糎噌塑岨措曾曽楚狙疏疎礎祖租粗素組蘇訴阻遡鼠僧創双叢倉喪壮奏爽宋層匝惣想捜掃挿掻操早曹巣槍槽漕燥争痩相窓糟総綜聡草荘葬蒼藻装走送遭鎗霜騒像増憎"], +["c2a1","臓蔵贈造促側則即息捉束測足速俗属賊族続卒袖其揃存孫尊損村遜他多太汰詑唾堕妥惰打柁舵楕陀駄騨体堆対耐岱帯待怠態戴替泰滞胎腿苔袋貸退逮隊黛鯛代台大第醍題鷹滝瀧卓啄宅托択拓沢濯琢託鐸濁諾茸凧蛸只"], +["c3a1","叩但達辰奪脱巽竪辿棚谷狸鱈樽誰丹単嘆坦担探旦歎淡湛炭短端箪綻耽胆蛋誕鍛団壇弾断暖檀段男談値知地弛恥智池痴稚置致蜘遅馳築畜竹筑蓄逐秩窒茶嫡着中仲宙忠抽昼柱注虫衷註酎鋳駐樗瀦猪苧著貯丁兆凋喋寵"], +["c4a1","帖帳庁弔張彫徴懲挑暢朝潮牒町眺聴脹腸蝶調諜超跳銚長頂鳥勅捗直朕沈珍賃鎮陳津墜椎槌追鎚痛通塚栂掴槻佃漬柘辻蔦綴鍔椿潰坪壷嬬紬爪吊釣鶴亭低停偵剃貞呈堤定帝底庭廷弟悌抵挺提梯汀碇禎程締艇訂諦蹄逓"], +["c5a1","邸鄭釘鼎泥摘擢敵滴的笛適鏑溺哲徹撤轍迭鉄典填天展店添纏甜貼転顛点伝殿澱田電兎吐堵塗妬屠徒斗杜渡登菟賭途都鍍砥砺努度土奴怒倒党冬凍刀唐塔塘套宕島嶋悼投搭東桃梼棟盗淘湯涛灯燈当痘祷等答筒糖統到"], +["c6a1","董蕩藤討謄豆踏逃透鐙陶頭騰闘働動同堂導憧撞洞瞳童胴萄道銅峠鴇匿得徳涜特督禿篤毒独読栃橡凸突椴届鳶苫寅酉瀞噸屯惇敦沌豚遁頓呑曇鈍奈那内乍凪薙謎灘捺鍋楢馴縄畷南楠軟難汝二尼弐迩匂賑肉虹廿日乳入"], +["c7a1","如尿韮任妊忍認濡禰祢寧葱猫熱年念捻撚燃粘乃廼之埜嚢悩濃納能脳膿農覗蚤巴把播覇杷波派琶破婆罵芭馬俳廃拝排敗杯盃牌背肺輩配倍培媒梅楳煤狽買売賠陪這蝿秤矧萩伯剥博拍柏泊白箔粕舶薄迫曝漠爆縛莫駁麦"], +["c8a1","函箱硲箸肇筈櫨幡肌畑畠八鉢溌発醗髪伐罰抜筏閥鳩噺塙蛤隼伴判半反叛帆搬斑板氾汎版犯班畔繁般藩販範釆煩頒飯挽晩番盤磐蕃蛮匪卑否妃庇彼悲扉批披斐比泌疲皮碑秘緋罷肥被誹費避非飛樋簸備尾微枇毘琵眉美"], +["c9a1","鼻柊稗匹疋髭彦膝菱肘弼必畢筆逼桧姫媛紐百謬俵彪標氷漂瓢票表評豹廟描病秒苗錨鋲蒜蛭鰭品彬斌浜瀕貧賓頻敏瓶不付埠夫婦富冨布府怖扶敷斧普浮父符腐膚芙譜負賦赴阜附侮撫武舞葡蕪部封楓風葺蕗伏副復幅服"], +["caa1","福腹複覆淵弗払沸仏物鮒分吻噴墳憤扮焚奮粉糞紛雰文聞丙併兵塀幣平弊柄並蔽閉陛米頁僻壁癖碧別瞥蔑箆偏変片篇編辺返遍便勉娩弁鞭保舗鋪圃捕歩甫補輔穂募墓慕戊暮母簿菩倣俸包呆報奉宝峰峯崩庖抱捧放方朋"], +["cba1","法泡烹砲縫胞芳萌蓬蜂褒訪豊邦鋒飽鳳鵬乏亡傍剖坊妨帽忘忙房暴望某棒冒紡肪膨謀貌貿鉾防吠頬北僕卜墨撲朴牧睦穆釦勃没殆堀幌奔本翻凡盆摩磨魔麻埋妹昧枚毎哩槙幕膜枕鮪柾鱒桝亦俣又抹末沫迄侭繭麿万慢満"], +["cca1","漫蔓味未魅巳箕岬密蜜湊蓑稔脈妙粍民眠務夢無牟矛霧鵡椋婿娘冥名命明盟迷銘鳴姪牝滅免棉綿緬面麺摸模茂妄孟毛猛盲網耗蒙儲木黙目杢勿餅尤戻籾貰問悶紋門匁也冶夜爺耶野弥矢厄役約薬訳躍靖柳薮鑓愉愈油癒"], +["cda1","諭輸唯佑優勇友宥幽悠憂揖有柚湧涌猶猷由祐裕誘遊邑郵雄融夕予余与誉輿預傭幼妖容庸揚揺擁曜楊様洋溶熔用窯羊耀葉蓉要謡踊遥陽養慾抑欲沃浴翌翼淀羅螺裸来莱頼雷洛絡落酪乱卵嵐欄濫藍蘭覧利吏履李梨理璃"], +["cea1","痢裏裡里離陸律率立葎掠略劉流溜琉留硫粒隆竜龍侶慮旅虜了亮僚両凌寮料梁涼猟療瞭稜糧良諒遼量陵領力緑倫厘林淋燐琳臨輪隣鱗麟瑠塁涙累類令伶例冷励嶺怜玲礼苓鈴隷零霊麗齢暦歴列劣烈裂廉恋憐漣煉簾練聯"], +["cfa1","蓮連錬呂魯櫓炉賂路露労婁廊弄朗楼榔浪漏牢狼篭老聾蝋郎六麓禄肋録論倭和話歪賄脇惑枠鷲亙亘鰐詫藁蕨椀湾碗腕"], +["d0a1","弌丐丕个丱丶丼丿乂乖乘亂亅豫亊舒弍于亞亟亠亢亰亳亶从仍仄仆仂仗仞仭仟价伉佚估佛佝佗佇佶侈侏侘佻佩佰侑佯來侖儘俔俟俎俘俛俑俚俐俤俥倚倨倔倪倥倅伜俶倡倩倬俾俯們倆偃假會偕偐偈做偖偬偸傀傚傅傴傲"], +["d1a1","僉僊傳僂僖僞僥僭僣僮價僵儉儁儂儖儕儔儚儡儺儷儼儻儿兀兒兌兔兢竸兩兪兮冀冂囘册冉冏冑冓冕冖冤冦冢冩冪冫决冱冲冰况冽凅凉凛几處凩凭凰凵凾刄刋刔刎刧刪刮刳刹剏剄剋剌剞剔剪剴剩剳剿剽劍劔劒剱劈劑辨"], +["d2a1","辧劬劭劼劵勁勍勗勞勣勦飭勠勳勵勸勹匆匈甸匍匐匏匕匚匣匯匱匳匸區卆卅丗卉卍凖卞卩卮夘卻卷厂厖厠厦厥厮厰厶參簒雙叟曼燮叮叨叭叺吁吽呀听吭吼吮吶吩吝呎咏呵咎呟呱呷呰咒呻咀呶咄咐咆哇咢咸咥咬哄哈咨"], +["d3a1","咫哂咤咾咼哘哥哦唏唔哽哮哭哺哢唹啀啣啌售啜啅啖啗唸唳啝喙喀咯喊喟啻啾喘喞單啼喃喩喇喨嗚嗅嗟嗄嗜嗤嗔嘔嗷嘖嗾嗽嘛嗹噎噐營嘴嘶嘲嘸噫噤嘯噬噪嚆嚀嚊嚠嚔嚏嚥嚮嚶嚴囂嚼囁囃囀囈囎囑囓囗囮囹圀囿圄圉"], +["d4a1","圈國圍圓團圖嗇圜圦圷圸坎圻址坏坩埀垈坡坿垉垓垠垳垤垪垰埃埆埔埒埓堊埖埣堋堙堝塲堡塢塋塰毀塒堽塹墅墹墟墫墺壞墻墸墮壅壓壑壗壙壘壥壜壤壟壯壺壹壻壼壽夂夊夐夛梦夥夬夭夲夸夾竒奕奐奎奚奘奢奠奧奬奩"], +["d5a1","奸妁妝佞侫妣妲姆姨姜妍姙姚娥娟娑娜娉娚婀婬婉娵娶婢婪媚媼媾嫋嫂媽嫣嫗嫦嫩嫖嫺嫻嬌嬋嬖嬲嫐嬪嬶嬾孃孅孀孑孕孚孛孥孩孰孳孵學斈孺宀它宦宸寃寇寉寔寐寤實寢寞寥寫寰寶寳尅將專對尓尠尢尨尸尹屁屆屎屓"], +["d6a1","屐屏孱屬屮乢屶屹岌岑岔妛岫岻岶岼岷峅岾峇峙峩峽峺峭嶌峪崋崕崗嵜崟崛崑崔崢崚崙崘嵌嵒嵎嵋嵬嵳嵶嶇嶄嶂嶢嶝嶬嶮嶽嶐嶷嶼巉巍巓巒巖巛巫已巵帋帚帙帑帛帶帷幄幃幀幎幗幔幟幢幤幇幵并幺麼广庠廁廂廈廐廏"], +["d7a1","廖廣廝廚廛廢廡廨廩廬廱廳廰廴廸廾弃弉彝彜弋弑弖弩弭弸彁彈彌彎弯彑彖彗彙彡彭彳彷徃徂彿徊很徑徇從徙徘徠徨徭徼忖忻忤忸忱忝悳忿怡恠怙怐怩怎怱怛怕怫怦怏怺恚恁恪恷恟恊恆恍恣恃恤恂恬恫恙悁悍惧悃悚"], +["d8a1","悄悛悖悗悒悧悋惡悸惠惓悴忰悽惆悵惘慍愕愆惶惷愀惴惺愃愡惻惱愍愎慇愾愨愧慊愿愼愬愴愽慂慄慳慷慘慙慚慫慴慯慥慱慟慝慓慵憙憖憇憬憔憚憊憑憫憮懌懊應懷懈懃懆憺懋罹懍懦懣懶懺懴懿懽懼懾戀戈戉戍戌戔戛"], +["d9a1","戞戡截戮戰戲戳扁扎扞扣扛扠扨扼抂抉找抒抓抖拔抃抔拗拑抻拏拿拆擔拈拜拌拊拂拇抛拉挌拮拱挧挂挈拯拵捐挾捍搜捏掖掎掀掫捶掣掏掉掟掵捫捩掾揩揀揆揣揉插揶揄搖搴搆搓搦搶攝搗搨搏摧摯摶摎攪撕撓撥撩撈撼"], +["daa1","據擒擅擇撻擘擂擱擧舉擠擡抬擣擯攬擶擴擲擺攀擽攘攜攅攤攣攫攴攵攷收攸畋效敖敕敍敘敞敝敲數斂斃變斛斟斫斷旃旆旁旄旌旒旛旙无旡旱杲昊昃旻杳昵昶昴昜晏晄晉晁晞晝晤晧晨晟晢晰暃暈暎暉暄暘暝曁暹曉暾暼"], +["dba1","曄暸曖曚曠昿曦曩曰曵曷朏朖朞朦朧霸朮朿朶杁朸朷杆杞杠杙杣杤枉杰枩杼杪枌枋枦枡枅枷柯枴柬枳柩枸柤柞柝柢柮枹柎柆柧檜栞框栩桀桍栲桎梳栫桙档桷桿梟梏梭梔條梛梃檮梹桴梵梠梺椏梍桾椁棊椈棘椢椦棡椌棍"], +["dca1","棔棧棕椶椒椄棗棣椥棹棠棯椨椪椚椣椡棆楹楷楜楸楫楔楾楮椹楴椽楙椰楡楞楝榁楪榲榮槐榿槁槓榾槎寨槊槝榻槃榧樮榑榠榜榕榴槞槨樂樛槿權槹槲槧樅榱樞槭樔槫樊樒櫁樣樓橄樌橲樶橸橇橢橙橦橈樸樢檐檍檠檄檢檣"], +["dda1","檗蘗檻櫃櫂檸檳檬櫞櫑櫟檪櫚櫪櫻欅蘖櫺欒欖鬱欟欸欷盜欹飮歇歃歉歐歙歔歛歟歡歸歹歿殀殄殃殍殘殕殞殤殪殫殯殲殱殳殷殼毆毋毓毟毬毫毳毯麾氈氓气氛氤氣汞汕汢汪沂沍沚沁沛汾汨汳沒沐泄泱泓沽泗泅泝沮沱沾"], +["dea1","沺泛泯泙泪洟衍洶洫洽洸洙洵洳洒洌浣涓浤浚浹浙涎涕濤涅淹渕渊涵淇淦涸淆淬淞淌淨淒淅淺淙淤淕淪淮渭湮渮渙湲湟渾渣湫渫湶湍渟湃渺湎渤滿渝游溂溪溘滉溷滓溽溯滄溲滔滕溏溥滂溟潁漑灌滬滸滾漿滲漱滯漲滌"], +["dfa1","漾漓滷澆潺潸澁澀潯潛濳潭澂潼潘澎澑濂潦澳澣澡澤澹濆澪濟濕濬濔濘濱濮濛瀉瀋濺瀑瀁瀏濾瀛瀚潴瀝瀘瀟瀰瀾瀲灑灣炙炒炯烱炬炸炳炮烟烋烝烙焉烽焜焙煥煕熈煦煢煌煖煬熏燻熄熕熨熬燗熹熾燒燉燔燎燠燬燧燵燼"], +["e0a1","燹燿爍爐爛爨爭爬爰爲爻爼爿牀牆牋牘牴牾犂犁犇犒犖犢犧犹犲狃狆狄狎狒狢狠狡狹狷倏猗猊猜猖猝猴猯猩猥猾獎獏默獗獪獨獰獸獵獻獺珈玳珎玻珀珥珮珞璢琅瑯琥珸琲琺瑕琿瑟瑙瑁瑜瑩瑰瑣瑪瑶瑾璋璞璧瓊瓏瓔珱"], +["e1a1","瓠瓣瓧瓩瓮瓲瓰瓱瓸瓷甄甃甅甌甎甍甕甓甞甦甬甼畄畍畊畉畛畆畚畩畤畧畫畭畸當疆疇畴疊疉疂疔疚疝疥疣痂疳痃疵疽疸疼疱痍痊痒痙痣痞痾痿痼瘁痰痺痲痳瘋瘍瘉瘟瘧瘠瘡瘢瘤瘴瘰瘻癇癈癆癜癘癡癢癨癩癪癧癬癰"], +["e2a1","癲癶癸發皀皃皈皋皎皖皓皙皚皰皴皸皹皺盂盍盖盒盞盡盥盧盪蘯盻眈眇眄眩眤眞眥眦眛眷眸睇睚睨睫睛睥睿睾睹瞎瞋瞑瞠瞞瞰瞶瞹瞿瞼瞽瞻矇矍矗矚矜矣矮矼砌砒礦砠礪硅碎硴碆硼碚碌碣碵碪碯磑磆磋磔碾碼磅磊磬"], +["e3a1","磧磚磽磴礇礒礑礙礬礫祀祠祗祟祚祕祓祺祿禊禝禧齋禪禮禳禹禺秉秕秧秬秡秣稈稍稘稙稠稟禀稱稻稾稷穃穗穉穡穢穩龝穰穹穽窈窗窕窘窖窩竈窰窶竅竄窿邃竇竊竍竏竕竓站竚竝竡竢竦竭竰笂笏笊笆笳笘笙笞笵笨笶筐"], +["e4a1","筺笄筍笋筌筅筵筥筴筧筰筱筬筮箝箘箟箍箜箚箋箒箏筝箙篋篁篌篏箴篆篝篩簑簔篦篥籠簀簇簓篳篷簗簍篶簣簧簪簟簷簫簽籌籃籔籏籀籐籘籟籤籖籥籬籵粃粐粤粭粢粫粡粨粳粲粱粮粹粽糀糅糂糘糒糜糢鬻糯糲糴糶糺紆"], +["e5a1","紂紜紕紊絅絋紮紲紿紵絆絳絖絎絲絨絮絏絣經綉絛綏絽綛綺綮綣綵緇綽綫總綢綯緜綸綟綰緘緝緤緞緻緲緡縅縊縣縡縒縱縟縉縋縢繆繦縻縵縹繃縷縲縺繧繝繖繞繙繚繹繪繩繼繻纃緕繽辮繿纈纉續纒纐纓纔纖纎纛纜缸缺"], +["e6a1","罅罌罍罎罐网罕罔罘罟罠罨罩罧罸羂羆羃羈羇羌羔羞羝羚羣羯羲羹羮羶羸譱翅翆翊翕翔翡翦翩翳翹飜耆耄耋耒耘耙耜耡耨耿耻聊聆聒聘聚聟聢聨聳聲聰聶聹聽聿肄肆肅肛肓肚肭冐肬胛胥胙胝胄胚胖脉胯胱脛脩脣脯腋"], +["e7a1","隋腆脾腓腑胼腱腮腥腦腴膃膈膊膀膂膠膕膤膣腟膓膩膰膵膾膸膽臀臂膺臉臍臑臙臘臈臚臟臠臧臺臻臾舁舂舅與舊舍舐舖舩舫舸舳艀艙艘艝艚艟艤艢艨艪艫舮艱艷艸艾芍芒芫芟芻芬苡苣苟苒苴苳苺莓范苻苹苞茆苜茉苙"], +["e8a1","茵茴茖茲茱荀茹荐荅茯茫茗茘莅莚莪莟莢莖茣莎莇莊荼莵荳荵莠莉莨菴萓菫菎菽萃菘萋菁菷萇菠菲萍萢萠莽萸蔆菻葭萪萼蕚蒄葷葫蒭葮蒂葩葆萬葯葹萵蓊葢蒹蒿蒟蓙蓍蒻蓚蓐蓁蓆蓖蒡蔡蓿蓴蔗蔘蔬蔟蔕蔔蓼蕀蕣蕘蕈"], +["e9a1","蕁蘂蕋蕕薀薤薈薑薊薨蕭薔薛藪薇薜蕷蕾薐藉薺藏薹藐藕藝藥藜藹蘊蘓蘋藾藺蘆蘢蘚蘰蘿虍乕虔號虧虱蚓蚣蚩蚪蚋蚌蚶蚯蛄蛆蚰蛉蠣蚫蛔蛞蛩蛬蛟蛛蛯蜒蜆蜈蜀蜃蛻蜑蜉蜍蛹蜊蜴蜿蜷蜻蜥蜩蜚蝠蝟蝸蝌蝎蝴蝗蝨蝮蝙"], +["eaa1","蝓蝣蝪蠅螢螟螂螯蟋螽蟀蟐雖螫蟄螳蟇蟆螻蟯蟲蟠蠏蠍蟾蟶蟷蠎蟒蠑蠖蠕蠢蠡蠱蠶蠹蠧蠻衄衂衒衙衞衢衫袁衾袞衵衽袵衲袂袗袒袮袙袢袍袤袰袿袱裃裄裔裘裙裝裹褂裼裴裨裲褄褌褊褓襃褞褥褪褫襁襄褻褶褸襌褝襠襞"], +["eba1","襦襤襭襪襯襴襷襾覃覈覊覓覘覡覩覦覬覯覲覺覽覿觀觚觜觝觧觴觸訃訖訐訌訛訝訥訶詁詛詒詆詈詼詭詬詢誅誂誄誨誡誑誥誦誚誣諄諍諂諚諫諳諧諤諱謔諠諢諷諞諛謌謇謚諡謖謐謗謠謳鞫謦謫謾謨譁譌譏譎證譖譛譚譫"], +["eca1","譟譬譯譴譽讀讌讎讒讓讖讙讚谺豁谿豈豌豎豐豕豢豬豸豺貂貉貅貊貍貎貔豼貘戝貭貪貽貲貳貮貶賈賁賤賣賚賽賺賻贄贅贊贇贏贍贐齎贓賍贔贖赧赭赱赳趁趙跂趾趺跏跚跖跌跛跋跪跫跟跣跼踈踉跿踝踞踐踟蹂踵踰踴蹊"], +["eda1","蹇蹉蹌蹐蹈蹙蹤蹠踪蹣蹕蹶蹲蹼躁躇躅躄躋躊躓躑躔躙躪躡躬躰軆躱躾軅軈軋軛軣軼軻軫軾輊輅輕輒輙輓輜輟輛輌輦輳輻輹轅轂輾轌轉轆轎轗轜轢轣轤辜辟辣辭辯辷迚迥迢迪迯邇迴逅迹迺逑逕逡逍逞逖逋逧逶逵逹迸"], +["eea1","遏遐遑遒逎遉逾遖遘遞遨遯遶隨遲邂遽邁邀邊邉邏邨邯邱邵郢郤扈郛鄂鄒鄙鄲鄰酊酖酘酣酥酩酳酲醋醉醂醢醫醯醪醵醴醺釀釁釉釋釐釖釟釡釛釼釵釶鈞釿鈔鈬鈕鈑鉞鉗鉅鉉鉤鉈銕鈿鉋鉐銜銖銓銛鉚鋏銹銷鋩錏鋺鍄錮"], +["efa1","錙錢錚錣錺錵錻鍜鍠鍼鍮鍖鎰鎬鎭鎔鎹鏖鏗鏨鏥鏘鏃鏝鏐鏈鏤鐚鐔鐓鐃鐇鐐鐶鐫鐵鐡鐺鑁鑒鑄鑛鑠鑢鑞鑪鈩鑰鑵鑷鑽鑚鑼鑾钁鑿閂閇閊閔閖閘閙閠閨閧閭閼閻閹閾闊濶闃闍闌闕闔闖關闡闥闢阡阨阮阯陂陌陏陋陷陜陞"], +["f0a1","陝陟陦陲陬隍隘隕隗險隧隱隲隰隴隶隸隹雎雋雉雍襍雜霍雕雹霄霆霈霓霎霑霏霖霙霤霪霰霹霽霾靄靆靈靂靉靜靠靤靦靨勒靫靱靹鞅靼鞁靺鞆鞋鞏鞐鞜鞨鞦鞣鞳鞴韃韆韈韋韜韭齏韲竟韶韵頏頌頸頤頡頷頽顆顏顋顫顯顰"], +["f1a1","顱顴顳颪颯颱颶飄飃飆飩飫餃餉餒餔餘餡餝餞餤餠餬餮餽餾饂饉饅饐饋饑饒饌饕馗馘馥馭馮馼駟駛駝駘駑駭駮駱駲駻駸騁騏騅駢騙騫騷驅驂驀驃騾驕驍驛驗驟驢驥驤驩驫驪骭骰骼髀髏髑髓體髞髟髢髣髦髯髫髮髴髱髷"], +["f2a1","髻鬆鬘鬚鬟鬢鬣鬥鬧鬨鬩鬪鬮鬯鬲魄魃魏魍魎魑魘魴鮓鮃鮑鮖鮗鮟鮠鮨鮴鯀鯊鮹鯆鯏鯑鯒鯣鯢鯤鯔鯡鰺鯲鯱鯰鰕鰔鰉鰓鰌鰆鰈鰒鰊鰄鰮鰛鰥鰤鰡鰰鱇鰲鱆鰾鱚鱠鱧鱶鱸鳧鳬鳰鴉鴈鳫鴃鴆鴪鴦鶯鴣鴟鵄鴕鴒鵁鴿鴾鵆鵈"], +["f3a1","鵝鵞鵤鵑鵐鵙鵲鶉鶇鶫鵯鵺鶚鶤鶩鶲鷄鷁鶻鶸鶺鷆鷏鷂鷙鷓鷸鷦鷭鷯鷽鸚鸛鸞鹵鹹鹽麁麈麋麌麒麕麑麝麥麩麸麪麭靡黌黎黏黐黔黜點黝黠黥黨黯黴黶黷黹黻黼黽鼇鼈皷鼕鼡鼬鼾齊齒齔齣齟齠齡齦齧齬齪齷齲齶龕龜龠"], +["f4a1","堯槇遙瑤凜熙"], +["f9a1","纊褜鍈銈蓜俉炻昱棈鋹曻彅丨仡仼伀伃伹佖侒侊侚侔俍偀倢俿倞偆偰偂傔僴僘兊兤冝冾凬刕劜劦勀勛匀匇匤卲厓厲叝﨎咜咊咩哿喆坙坥垬埈埇﨏塚增墲夋奓奛奝奣妤妺孖寀甯寘寬尞岦岺峵崧嵓﨑嵂嵭嶸嶹巐弡弴彧德"], +["faa1","忞恝悅悊惞惕愠惲愑愷愰憘戓抦揵摠撝擎敎昀昕昻昉昮昞昤晥晗晙晴晳暙暠暲暿曺朎朗杦枻桒柀栁桄棏﨓楨﨔榘槢樰橫橆橳橾櫢櫤毖氿汜沆汯泚洄涇浯涖涬淏淸淲淼渹湜渧渼溿澈澵濵瀅瀇瀨炅炫焏焄煜煆煇凞燁燾犱"], +["fba1","犾猤猪獷玽珉珖珣珒琇珵琦琪琩琮瑢璉璟甁畯皂皜皞皛皦益睆劯砡硎硤硺礰礼神祥禔福禛竑竧靖竫箞精絈絜綷綠緖繒罇羡羽茁荢荿菇菶葈蒴蕓蕙蕫﨟薰蘒﨡蠇裵訒訷詹誧誾諟諸諶譓譿賰賴贒赶﨣軏﨤逸遧郞都鄕鄧釚"], +["fca1","釗釞釭釮釤釥鈆鈐鈊鈺鉀鈼鉎鉙鉑鈹鉧銧鉷鉸鋧鋗鋙鋐﨧鋕鋠鋓錥錡鋻﨨錞鋿錝錂鍰鍗鎤鏆鏞鏸鐱鑅鑈閒隆﨩隝隯霳霻靃靍靏靑靕顗顥飯飼餧館馞驎髙髜魵魲鮏鮱鮻鰀鵰鵫鶴鸙黑"], +["fcf1","ⅰ",9,"¬¦'""], +["8fa2af","˘ˇ¸˙˝¯˛˚~΄΅"], +["8fa2c2","¡¦¿"], +["8fa2eb","ºª©®™¤№"], +["8fa6e1","ΆΈΉΊΪ"], +["8fa6e7","Ό"], +["8fa6e9","ΎΫ"], +["8fa6ec","Ώ"], +["8fa6f1","άέήίϊΐόςύϋΰώ"], +["8fa7c2","Ђ",10,"ЎЏ"], +["8fa7f2","ђ",10,"ўџ"], +["8fa9a1","ÆĐ"], +["8fa9a4","Ħ"], +["8fa9a6","IJ"], +["8fa9a8","ŁĿ"], +["8fa9ab","ŊØŒ"], +["8fa9af","ŦÞ"], +["8fa9c1","æđðħıijĸłŀʼnŋøœßŧþ"], +["8faaa1","ÁÀÄÂĂǍĀĄÅÃĆĈČÇĊĎÉÈËÊĚĖĒĘ"], +["8faaba","ĜĞĢĠĤÍÌÏÎǏİĪĮĨĴĶĹĽĻŃŇŅÑÓÒÖÔǑŐŌÕŔŘŖŚŜŠŞŤŢÚÙÜÛŬǓŰŪŲŮŨǗǛǙǕŴÝŸŶŹŽŻ"], +["8faba1","áàäâăǎāąåãćĉčçċďéèëêěėēęǵĝğ"], +["8fabbd","ġĥíìïîǐ"], +["8fabc5","īįĩĵķĺľļńňņñóòöôǒőōõŕřŗśŝšşťţúùüûŭǔűūųůũǘǜǚǖŵýÿŷźžż"], +["8fb0a1","丂丄丅丌丒丟丣两丨丫丮丯丰丵乀乁乄乇乑乚乜乣乨乩乴乵乹乿亍亖亗亝亯亹仃仐仚仛仠仡仢仨仯仱仳仵份仾仿伀伂伃伈伋伌伒伕伖众伙伮伱你伳伵伷伹伻伾佀佂佈佉佋佌佒佔佖佘佟佣佪佬佮佱佷佸佹佺佽佾侁侂侄"], +["8fb1a1","侅侉侊侌侎侐侒侓侔侗侙侚侞侟侲侷侹侻侼侽侾俀俁俅俆俈俉俋俌俍俏俒俜俠俢俰俲俼俽俿倀倁倄倇倊倌倎倐倓倗倘倛倜倝倞倢倧倮倰倲倳倵偀偁偂偅偆偊偌偎偑偒偓偗偙偟偠偢偣偦偧偪偭偰偱倻傁傃傄傆傊傎傏傐"], +["8fb2a1","傒傓傔傖傛傜傞",4,"傪傯傰傹傺傽僀僃僄僇僌僎僐僓僔僘僜僝僟僢僤僦僨僩僯僱僶僺僾儃儆儇儈儋儌儍儎僲儐儗儙儛儜儝儞儣儧儨儬儭儯儱儳儴儵儸儹兂兊兏兓兕兗兘兟兤兦兾冃冄冋冎冘冝冡冣冭冸冺冼冾冿凂"], +["8fb3a1","凈减凑凒凓凕凘凞凢凥凮凲凳凴凷刁刂刅划刓刕刖刘刢刨刱刲刵刼剅剉剕剗剘剚剜剟剠剡剦剮剷剸剹劀劂劅劊劌劓劕劖劗劘劚劜劤劥劦劧劯劰劶劷劸劺劻劽勀勄勆勈勌勏勑勔勖勛勜勡勥勨勩勪勬勰勱勴勶勷匀匃匊匋"], +["8fb4a1","匌匑匓匘匛匜匞匟匥匧匨匩匫匬匭匰匲匵匼匽匾卂卌卋卙卛卡卣卥卬卭卲卹卾厃厇厈厎厓厔厙厝厡厤厪厫厯厲厴厵厷厸厺厽叀叅叏叒叓叕叚叝叞叠另叧叵吂吓吚吡吧吨吪启吱吴吵呃呄呇呍呏呞呢呤呦呧呩呫呭呮呴呿"], +["8fb5a1","咁咃咅咈咉咍咑咕咖咜咟咡咦咧咩咪咭咮咱咷咹咺咻咿哆哊响哎哠哪哬哯哶哼哾哿唀唁唅唈唉唌唍唎唕唪唫唲唵唶唻唼唽啁啇啉啊啍啐啑啘啚啛啞啠啡啤啦啿喁喂喆喈喎喏喑喒喓喔喗喣喤喭喲喿嗁嗃嗆嗉嗋嗌嗎嗑嗒"], +["8fb6a1","嗓嗗嗘嗛嗞嗢嗩嗶嗿嘅嘈嘊嘍",5,"嘙嘬嘰嘳嘵嘷嘹嘻嘼嘽嘿噀噁噃噄噆噉噋噍噏噔噞噠噡噢噣噦噩噭噯噱噲噵嚄嚅嚈嚋嚌嚕嚙嚚嚝嚞嚟嚦嚧嚨嚩嚫嚬嚭嚱嚳嚷嚾囅囉囊囋囏囐囌囍囙囜囝囟囡囤",4,"囱囫园"], +["8fb7a1","囶囷圁圂圇圊圌圑圕圚圛圝圠圢圣圤圥圩圪圬圮圯圳圴圽圾圿坅坆坌坍坒坢坥坧坨坫坭",4,"坳坴坵坷坹坺坻坼坾垁垃垌垔垗垙垚垜垝垞垟垡垕垧垨垩垬垸垽埇埈埌埏埕埝埞埤埦埧埩埭埰埵埶埸埽埾埿堃堄堈堉埡"], +["8fb8a1","堌堍堛堞堟堠堦堧堭堲堹堿塉塌塍塏塐塕塟塡塤塧塨塸塼塿墀墁墇墈墉墊墌墍墏墐墔墖墝墠墡墢墦墩墱墲壄墼壂壈壍壎壐壒壔壖壚壝壡壢壩壳夅夆夋夌夒夓夔虁夝夡夣夤夨夯夰夳夵夶夿奃奆奒奓奙奛奝奞奟奡奣奫奭"], +["8fb9a1","奯奲奵奶她奻奼妋妌妎妒妕妗妟妤妧妭妮妯妰妳妷妺妼姁姃姄姈姊姍姒姝姞姟姣姤姧姮姯姱姲姴姷娀娄娌娍娎娒娓娞娣娤娧娨娪娭娰婄婅婇婈婌婐婕婞婣婥婧婭婷婺婻婾媋媐媓媖媙媜媞媟媠媢媧媬媱媲媳媵媸媺媻媿"], +["8fbaa1","嫄嫆嫈嫏嫚嫜嫠嫥嫪嫮嫵嫶嫽嬀嬁嬈嬗嬴嬙嬛嬝嬡嬥嬭嬸孁孋孌孒孖孞孨孮孯孼孽孾孿宁宄宆宊宎宐宑宓宔宖宨宩宬宭宯宱宲宷宺宼寀寁寍寏寖",4,"寠寯寱寴寽尌尗尞尟尣尦尩尫尬尮尰尲尵尶屙屚屜屢屣屧屨屩"], +["8fbba1","屭屰屴屵屺屻屼屽岇岈岊岏岒岝岟岠岢岣岦岪岲岴岵岺峉峋峒峝峗峮峱峲峴崁崆崍崒崫崣崤崦崧崱崴崹崽崿嵂嵃嵆嵈嵕嵑嵙嵊嵟嵠嵡嵢嵤嵪嵭嵰嵹嵺嵾嵿嶁嶃嶈嶊嶒嶓嶔嶕嶙嶛嶟嶠嶧嶫嶰嶴嶸嶹巃巇巋巐巎巘巙巠巤"], +["8fbca1","巩巸巹帀帇帍帒帔帕帘帟帠帮帨帲帵帾幋幐幉幑幖幘幛幜幞幨幪",4,"幰庀庋庎庢庤庥庨庪庬庱庳庽庾庿廆廌廋廎廑廒廔廕廜廞廥廫异弆弇弈弎弙弜弝弡弢弣弤弨弫弬弮弰弴弶弻弽弿彀彄彅彇彍彐彔彘彛彠彣彤彧"], +["8fbda1","彯彲彴彵彸彺彽彾徉徍徏徖徜徝徢徧徫徤徬徯徰徱徸忄忇忈忉忋忐",4,"忞忡忢忨忩忪忬忭忮忯忲忳忶忺忼怇怊怍怓怔怗怘怚怟怤怭怳怵恀恇恈恉恌恑恔恖恗恝恡恧恱恾恿悂悆悈悊悎悑悓悕悘悝悞悢悤悥您悰悱悷"], +["8fbea1","悻悾惂惄惈惉惊惋惎惏惔惕惙惛惝惞惢惥惲惵惸惼惽愂愇愊愌愐",4,"愖愗愙愜愞愢愪愫愰愱愵愶愷愹慁慅慆慉慞慠慬慲慸慻慼慿憀憁憃憄憋憍憒憓憗憘憜憝憟憠憥憨憪憭憸憹憼懀懁懂懎懏懕懜懝懞懟懡懢懧懩懥"], +["8fbfa1","懬懭懯戁戃戄戇戓戕戜戠戢戣戧戩戫戹戽扂扃扄扆扌扐扑扒扔扖扚扜扤扭扯扳扺扽抍抎抏抐抦抨抳抶抷抺抾抿拄拎拕拖拚拪拲拴拼拽挃挄挊挋挍挐挓挖挘挩挪挭挵挶挹挼捁捂捃捄捆捊捋捎捒捓捔捘捛捥捦捬捭捱捴捵"], +["8fc0a1","捸捼捽捿掂掄掇掊掐掔掕掙掚掞掤掦掭掮掯掽揁揅揈揎揑揓揔揕揜揠揥揪揬揲揳揵揸揹搉搊搐搒搔搘搞搠搢搤搥搩搪搯搰搵搽搿摋摏摑摒摓摔摚摛摜摝摟摠摡摣摭摳摴摻摽撅撇撏撐撑撘撙撛撝撟撡撣撦撨撬撳撽撾撿"], +["8fc1a1","擄擉擊擋擌擎擐擑擕擗擤擥擩擪擭擰擵擷擻擿攁攄攈攉攊攏攓攔攖攙攛攞攟攢攦攩攮攱攺攼攽敃敇敉敐敒敔敟敠敧敫敺敽斁斅斊斒斕斘斝斠斣斦斮斲斳斴斿旂旈旉旎旐旔旖旘旟旰旲旴旵旹旾旿昀昄昈昉昍昑昒昕昖昝"], +["8fc2a1","昞昡昢昣昤昦昩昪昫昬昮昰昱昳昹昷晀晅晆晊晌晑晎晗晘晙晛晜晠晡曻晪晫晬晾晳晵晿晷晸晹晻暀晼暋暌暍暐暒暙暚暛暜暟暠暤暭暱暲暵暻暿曀曂曃曈曌曎曏曔曛曟曨曫曬曮曺朅朇朎朓朙朜朠朢朳朾杅杇杈杌杔杕杝"], +["8fc3a1","杦杬杮杴杶杻极构枎枏枑枓枖枘枙枛枰枱枲枵枻枼枽柹柀柂柃柅柈柉柒柗柙柜柡柦柰柲柶柷桒栔栙栝栟栨栧栬栭栯栰栱栳栻栿桄桅桊桌桕桗桘桛桫桮",4,"桵桹桺桻桼梂梄梆梈梖梘梚梜梡梣梥梩梪梮梲梻棅棈棌棏"], +["8fc4a1","棐棑棓棖棙棜棝棥棨棪棫棬棭棰棱棵棶棻棼棽椆椉椊椐椑椓椖椗椱椳椵椸椻楂楅楉楎楗楛楣楤楥楦楨楩楬楰楱楲楺楻楿榀榍榒榖榘榡榥榦榨榫榭榯榷榸榺榼槅槈槑槖槗槢槥槮槯槱槳槵槾樀樁樃樏樑樕樚樝樠樤樨樰樲"], +["8fc5a1","樴樷樻樾樿橅橆橉橊橎橐橑橒橕橖橛橤橧橪橱橳橾檁檃檆檇檉檋檑檛檝檞檟檥檫檯檰檱檴檽檾檿櫆櫉櫈櫌櫐櫔櫕櫖櫜櫝櫤櫧櫬櫰櫱櫲櫼櫽欂欃欆欇欉欏欐欑欗欛欞欤欨欫欬欯欵欶欻欿歆歊歍歒歖歘歝歠歧歫歮歰歵歽"], +["8fc6a1","歾殂殅殗殛殟殠殢殣殨殩殬殭殮殰殸殹殽殾毃毄毉毌毖毚毡毣毦毧毮毱毷毹毿氂氄氅氉氍氎氐氒氙氟氦氧氨氬氮氳氵氶氺氻氿汊汋汍汏汒汔汙汛汜汫汭汯汴汶汸汹汻沅沆沇沉沔沕沗沘沜沟沰沲沴泂泆泍泏泐泑泒泔泖"], +["8fc7a1","泚泜泠泧泩泫泬泮泲泴洄洇洊洎洏洑洓洚洦洧洨汧洮洯洱洹洼洿浗浞浟浡浥浧浯浰浼涂涇涑涒涔涖涗涘涪涬涴涷涹涽涿淄淈淊淎淏淖淛淝淟淠淢淥淩淯淰淴淶淼渀渄渞渢渧渲渶渹渻渼湄湅湈湉湋湏湑湒湓湔湗湜湝湞"], +["8fc8a1","湢湣湨湳湻湽溍溓溙溠溧溭溮溱溳溻溿滀滁滃滇滈滊滍滎滏滫滭滮滹滻滽漄漈漊漌漍漖漘漚漛漦漩漪漯漰漳漶漻漼漭潏潑潒潓潗潙潚潝潞潡潢潨潬潽潾澃澇澈澋澌澍澐澒澓澔澖澚澟澠澥澦澧澨澮澯澰澵澶澼濅濇濈濊"], +["8fc9a1","濚濞濨濩濰濵濹濼濽瀀瀅瀆瀇瀍瀗瀠瀣瀯瀴瀷瀹瀼灃灄灈灉灊灋灔灕灝灞灎灤灥灬灮灵灶灾炁炅炆炔",4,"炛炤炫炰炱炴炷烊烑烓烔烕烖烘烜烤烺焃",4,"焋焌焏焞焠焫焭焯焰焱焸煁煅煆煇煊煋煐煒煗煚煜煞煠"], +["8fcaa1","煨煹熀熅熇熌熒熚熛熠熢熯熰熲熳熺熿燀燁燄燋燌燓燖燙燚燜燸燾爀爇爈爉爓爗爚爝爟爤爫爯爴爸爹牁牂牃牅牎牏牐牓牕牖牚牜牞牠牣牨牫牮牯牱牷牸牻牼牿犄犉犍犎犓犛犨犭犮犱犴犾狁狇狉狌狕狖狘狟狥狳狴狺狻"], +["8fcba1","狾猂猄猅猇猋猍猒猓猘猙猞猢猤猧猨猬猱猲猵猺猻猽獃獍獐獒獖獘獝獞獟獠獦獧獩獫獬獮獯獱獷獹獼玀玁玃玅玆玎玐玓玕玗玘玜玞玟玠玢玥玦玪玫玭玵玷玹玼玽玿珅珆珉珋珌珏珒珓珖珙珝珡珣珦珧珩珴珵珷珹珺珻珽"], +["8fcca1","珿琀琁琄琇琊琑琚琛琤琦琨",9,"琹瑀瑃瑄瑆瑇瑋瑍瑑瑒瑗瑝瑢瑦瑧瑨瑫瑭瑮瑱瑲璀璁璅璆璇璉璏璐璑璒璘璙璚璜璟璠璡璣璦璨璩璪璫璮璯璱璲璵璹璻璿瓈瓉瓌瓐瓓瓘瓚瓛瓞瓟瓤瓨瓪瓫瓯瓴瓺瓻瓼瓿甆"], +["8fcda1","甒甖甗甠甡甤甧甩甪甯甶甹甽甾甿畀畃畇畈畎畐畒畗畞畟畡畯畱畹",5,"疁疅疐疒疓疕疙疜疢疤疴疺疿痀痁痄痆痌痎痏痗痜痟痠痡痤痧痬痮痯痱痹瘀瘂瘃瘄瘇瘈瘊瘌瘏瘒瘓瘕瘖瘙瘛瘜瘝瘞瘣瘥瘦瘩瘭瘲瘳瘵瘸瘹"], +["8fcea1","瘺瘼癊癀癁癃癄癅癉癋癕癙癟癤癥癭癮癯癱癴皁皅皌皍皕皛皜皝皟皠皢",6,"皪皭皽盁盅盉盋盌盎盔盙盠盦盨盬盰盱盶盹盼眀眆眊眎眒眔眕眗眙眚眜眢眨眭眮眯眴眵眶眹眽眾睂睅睆睊睍睎睏睒睖睗睜睞睟睠睢"], +["8fcfa1","睤睧睪睬睰睲睳睴睺睽瞀瞄瞌瞍瞔瞕瞖瞚瞟瞢瞧瞪瞮瞯瞱瞵瞾矃矉矑矒矕矙矞矟矠矤矦矪矬矰矱矴矸矻砅砆砉砍砎砑砝砡砢砣砭砮砰砵砷硃硄硇硈硌硎硒硜硞硠硡硣硤硨硪确硺硾碊碏碔碘碡碝碞碟碤碨碬碭碰碱碲碳"], +["8fd0a1","碻碽碿磇磈磉磌磎磒磓磕磖磤磛磟磠磡磦磪磲磳礀磶磷磺磻磿礆礌礐礚礜礞礟礠礥礧礩礭礱礴礵礻礽礿祄祅祆祊祋祏祑祔祘祛祜祧祩祫祲祹祻祼祾禋禌禑禓禔禕禖禘禛禜禡禨禩禫禯禱禴禸离秂秄秇秈秊秏秔秖秚秝秞"], +["8fd1a1","秠秢秥秪秫秭秱秸秼稂稃稇稉稊稌稑稕稛稞稡稧稫稭稯稰稴稵稸稹稺穄穅穇穈穌穕穖穙穜穝穟穠穥穧穪穭穵穸穾窀窂窅窆窊窋窐窑窔窞窠窣窬窳窵窹窻窼竆竉竌竎竑竛竨竩竫竬竱竴竻竽竾笇笔笟笣笧笩笪笫笭笮笯笰"], +["8fd2a1","笱笴笽笿筀筁筇筎筕筠筤筦筩筪筭筯筲筳筷箄箉箎箐箑箖箛箞箠箥箬箯箰箲箵箶箺箻箼箽篂篅篈篊篔篖篗篙篚篛篨篪篲篴篵篸篹篺篼篾簁簂簃簄簆簉簋簌簎簏簙簛簠簥簦簨簬簱簳簴簶簹簺籆籊籕籑籒籓籙",5], +["8fd3a1","籡籣籧籩籭籮籰籲籹籼籽粆粇粏粔粞粠粦粰粶粷粺粻粼粿糄糇糈糉糍糏糓糔糕糗糙糚糝糦糩糫糵紃紇紈紉紏紑紒紓紖紝紞紣紦紪紭紱紼紽紾絀絁絇絈絍絑絓絗絙絚絜絝絥絧絪絰絸絺絻絿綁綂綃綅綆綈綋綌綍綑綖綗綝"], +["8fd4a1","綞綦綧綪綳綶綷綹緂",4,"緌緍緎緗緙縀緢緥緦緪緫緭緱緵緶緹緺縈縐縑縕縗縜縝縠縧縨縬縭縯縳縶縿繄繅繇繎繐繒繘繟繡繢繥繫繮繯繳繸繾纁纆纇纊纍纑纕纘纚纝纞缼缻缽缾缿罃罄罇罏罒罓罛罜罝罡罣罤罥罦罭"], +["8fd5a1","罱罽罾罿羀羋羍羏羐羑羖羗羜羡羢羦羪羭羴羼羿翀翃翈翎翏翛翟翣翥翨翬翮翯翲翺翽翾翿耇耈耊耍耎耏耑耓耔耖耝耞耟耠耤耦耬耮耰耴耵耷耹耺耼耾聀聄聠聤聦聭聱聵肁肈肎肜肞肦肧肫肸肹胈胍胏胒胔胕胗胘胠胭胮"], +["8fd6a1","胰胲胳胶胹胺胾脃脋脖脗脘脜脞脠脤脧脬脰脵脺脼腅腇腊腌腒腗腠腡腧腨腩腭腯腷膁膐膄膅膆膋膎膖膘膛膞膢膮膲膴膻臋臃臅臊臎臏臕臗臛臝臞臡臤臫臬臰臱臲臵臶臸臹臽臿舀舃舏舓舔舙舚舝舡舢舨舲舴舺艃艄艅艆"], +["8fd7a1","艋艎艏艑艖艜艠艣艧艭艴艻艽艿芀芁芃芄芇芉芊芎芑芔芖芘芚芛芠芡芣芤芧芨芩芪芮芰芲芴芷芺芼芾芿苆苐苕苚苠苢苤苨苪苭苯苶苷苽苾茀茁茇茈茊茋荔茛茝茞茟茡茢茬茭茮茰茳茷茺茼茽荂荃荄荇荍荎荑荕荖荗荰荸"], +["8fd8a1","荽荿莀莂莄莆莍莒莔莕莘莙莛莜莝莦莧莩莬莾莿菀菇菉菏菐菑菔菝荓菨菪菶菸菹菼萁萆萊萏萑萕萙莭萯萹葅葇葈葊葍葏葑葒葖葘葙葚葜葠葤葥葧葪葰葳葴葶葸葼葽蒁蒅蒒蒓蒕蒞蒦蒨蒩蒪蒯蒱蒴蒺蒽蒾蓀蓂蓇蓈蓌蓏蓓"], +["8fd9a1","蓜蓧蓪蓯蓰蓱蓲蓷蔲蓺蓻蓽蔂蔃蔇蔌蔎蔐蔜蔞蔢蔣蔤蔥蔧蔪蔫蔯蔳蔴蔶蔿蕆蕏",4,"蕖蕙蕜",6,"蕤蕫蕯蕹蕺蕻蕽蕿薁薅薆薉薋薌薏薓薘薝薟薠薢薥薧薴薶薷薸薼薽薾薿藂藇藊藋藎薭藘藚藟藠藦藨藭藳藶藼"], +["8fdaa1","藿蘀蘄蘅蘍蘎蘐蘑蘒蘘蘙蘛蘞蘡蘧蘩蘶蘸蘺蘼蘽虀虂虆虒虓虖虗虘虙虝虠",4,"虩虬虯虵虶虷虺蚍蚑蚖蚘蚚蚜蚡蚦蚧蚨蚭蚱蚳蚴蚵蚷蚸蚹蚿蛀蛁蛃蛅蛑蛒蛕蛗蛚蛜蛠蛣蛥蛧蚈蛺蛼蛽蜄蜅蜇蜋蜎蜏蜐蜓蜔蜙蜞蜟蜡蜣"], +["8fdba1","蜨蜮蜯蜱蜲蜹蜺蜼蜽蜾蝀蝃蝅蝍蝘蝝蝡蝤蝥蝯蝱蝲蝻螃",6,"螋螌螐螓螕螗螘螙螞螠螣螧螬螭螮螱螵螾螿蟁蟈蟉蟊蟎蟕蟖蟙蟚蟜蟟蟢蟣蟤蟪蟫蟭蟱蟳蟸蟺蟿蠁蠃蠆蠉蠊蠋蠐蠙蠒蠓蠔蠘蠚蠛蠜蠞蠟蠨蠭蠮蠰蠲蠵"], +["8fdca1","蠺蠼衁衃衅衈衉衊衋衎衑衕衖衘衚衜衟衠衤衩衱衹衻袀袘袚袛袜袟袠袨袪袺袽袾裀裊",4,"裑裒裓裛裞裧裯裰裱裵裷褁褆褍褎褏褕褖褘褙褚褜褠褦褧褨褰褱褲褵褹褺褾襀襂襅襆襉襏襒襗襚襛襜襡襢襣襫襮襰襳襵襺"], +["8fdda1","襻襼襽覉覍覐覔覕覛覜覟覠覥覰覴覵覶覷覼觔",4,"觥觩觫觭觱觳觶觹觽觿訄訅訇訏訑訒訔訕訞訠訢訤訦訫訬訯訵訷訽訾詀詃詅詇詉詍詎詓詖詗詘詜詝詡詥詧詵詶詷詹詺詻詾詿誀誃誆誋誏誐誒誖誗誙誟誧誩誮誯誳"], +["8fdea1","誶誷誻誾諃諆諈諉諊諑諓諔諕諗諝諟諬諰諴諵諶諼諿謅謆謋謑謜謞謟謊謭謰謷謼譂",4,"譈譒譓譔譙譍譞譣譭譶譸譹譼譾讁讄讅讋讍讏讔讕讜讞讟谸谹谽谾豅豇豉豋豏豑豓豔豗豘豛豝豙豣豤豦豨豩豭豳豵豶豻豾貆"], +["8fdfa1","貇貋貐貒貓貙貛貜貤貹貺賅賆賉賋賏賖賕賙賝賡賨賬賯賰賲賵賷賸賾賿贁贃贉贒贗贛赥赩赬赮赿趂趄趈趍趐趑趕趞趟趠趦趫趬趯趲趵趷趹趻跀跅跆跇跈跊跎跑跔跕跗跙跤跥跧跬跰趼跱跲跴跽踁踄踅踆踋踑踔踖踠踡踢"], +["8fe0a1","踣踦踧踱踳踶踷踸踹踽蹀蹁蹋蹍蹎蹏蹔蹛蹜蹝蹞蹡蹢蹩蹬蹭蹯蹰蹱蹹蹺蹻躂躃躉躐躒躕躚躛躝躞躢躧躩躭躮躳躵躺躻軀軁軃軄軇軏軑軔軜軨軮軰軱軷軹軺軭輀輂輇輈輏輐輖輗輘輞輠輡輣輥輧輨輬輭輮輴輵輶輷輺轀轁"], +["8fe1a1","轃轇轏轑",4,"轘轝轞轥辝辠辡辤辥辦辵辶辸达迀迁迆迊迋迍运迒迓迕迠迣迤迨迮迱迵迶迻迾适逄逈逌逘逛逨逩逯逪逬逭逳逴逷逿遃遄遌遛遝遢遦遧遬遰遴遹邅邈邋邌邎邐邕邗邘邙邛邠邡邢邥邰邲邳邴邶邽郌邾郃"], +["8fe2a1","郄郅郇郈郕郗郘郙郜郝郟郥郒郶郫郯郰郴郾郿鄀鄄鄅鄆鄈鄍鄐鄔鄖鄗鄘鄚鄜鄞鄠鄥鄢鄣鄧鄩鄮鄯鄱鄴鄶鄷鄹鄺鄼鄽酃酇酈酏酓酗酙酚酛酡酤酧酭酴酹酺酻醁醃醅醆醊醎醑醓醔醕醘醞醡醦醨醬醭醮醰醱醲醳醶醻醼醽醿"], +["8fe3a1","釂釃釅釓釔釗釙釚釞釤釥釩釪釬",5,"釷釹釻釽鈀鈁鈄鈅鈆鈇鈉鈊鈌鈐鈒鈓鈖鈘鈜鈝鈣鈤鈥鈦鈨鈮鈯鈰鈳鈵鈶鈸鈹鈺鈼鈾鉀鉂鉃鉆鉇鉊鉍鉎鉏鉑鉘鉙鉜鉝鉠鉡鉥鉧鉨鉩鉮鉯鉰鉵",4,"鉻鉼鉽鉿銈銉銊銍銎銒銗"], +["8fe4a1","銙銟銠銤銥銧銨銫銯銲銶銸銺銻銼銽銿",4,"鋅鋆鋇鋈鋋鋌鋍鋎鋐鋓鋕鋗鋘鋙鋜鋝鋟鋠鋡鋣鋥鋧鋨鋬鋮鋰鋹鋻鋿錀錂錈錍錑錔錕錜錝錞錟錡錤錥錧錩錪錳錴錶錷鍇鍈鍉鍐鍑鍒鍕鍗鍘鍚鍞鍤鍥鍧鍩鍪鍭鍯鍰鍱鍳鍴鍶"], +["8fe5a1","鍺鍽鍿鎀鎁鎂鎈鎊鎋鎍鎏鎒鎕鎘鎛鎞鎡鎣鎤鎦鎨鎫鎴鎵鎶鎺鎩鏁鏄鏅鏆鏇鏉",4,"鏓鏙鏜鏞鏟鏢鏦鏧鏹鏷鏸鏺鏻鏽鐁鐂鐄鐈鐉鐍鐎鐏鐕鐖鐗鐟鐮鐯鐱鐲鐳鐴鐻鐿鐽鑃鑅鑈鑊鑌鑕鑙鑜鑟鑡鑣鑨鑫鑭鑮鑯鑱鑲钄钃镸镹"], +["8fe6a1","镾閄閈閌閍閎閝閞閟閡閦閩閫閬閴閶閺閽閿闆闈闉闋闐闑闒闓闙闚闝闞闟闠闤闦阝阞阢阤阥阦阬阱阳阷阸阹阺阼阽陁陒陔陖陗陘陡陮陴陻陼陾陿隁隂隃隄隉隑隖隚隝隟隤隥隦隩隮隯隳隺雊雒嶲雘雚雝雞雟雩雯雱雺霂"], +["8fe7a1","霃霅霉霚霛霝霡霢霣霨霱霳靁靃靊靎靏靕靗靘靚靛靣靧靪靮靳靶靷靸靻靽靿鞀鞉鞕鞖鞗鞙鞚鞞鞟鞢鞬鞮鞱鞲鞵鞶鞸鞹鞺鞼鞾鞿韁韄韅韇韉韊韌韍韎韐韑韔韗韘韙韝韞韠韛韡韤韯韱韴韷韸韺頇頊頙頍頎頔頖頜頞頠頣頦"], +["8fe8a1","頫頮頯頰頲頳頵頥頾顄顇顊顑顒顓顖顗顙顚顢顣顥顦顪顬颫颭颮颰颴颷颸颺颻颿飂飅飈飌飡飣飥飦飧飪飳飶餂餇餈餑餕餖餗餚餛餜餟餢餦餧餫餱",4,"餹餺餻餼饀饁饆饇饈饍饎饔饘饙饛饜饞饟饠馛馝馟馦馰馱馲馵"], +["8fe9a1","馹馺馽馿駃駉駓駔駙駚駜駞駧駪駫駬駰駴駵駹駽駾騂騃騄騋騌騐騑騖騞騠騢騣騤騧騭騮騳騵騶騸驇驁驄驊驋驌驎驑驔驖驝骪骬骮骯骲骴骵骶骹骻骾骿髁髃髆髈髎髐髒髕髖髗髛髜髠髤髥髧髩髬髲髳髵髹髺髽髿",4], +["8feaa1","鬄鬅鬈鬉鬋鬌鬍鬎鬐鬒鬖鬙鬛鬜鬠鬦鬫鬭鬳鬴鬵鬷鬹鬺鬽魈魋魌魕魖魗魛魞魡魣魥魦魨魪",4,"魳魵魷魸魹魿鮀鮄鮅鮆鮇鮉鮊鮋鮍鮏鮐鮔鮚鮝鮞鮦鮧鮩鮬鮰鮱鮲鮷鮸鮻鮼鮾鮿鯁鯇鯈鯎鯐鯗鯘鯝鯟鯥鯧鯪鯫鯯鯳鯷鯸"], +["8feba1","鯹鯺鯽鯿鰀鰂鰋鰏鰑鰖鰘鰙鰚鰜鰞鰢鰣鰦",4,"鰱鰵鰶鰷鰽鱁鱃鱄鱅鱉鱊鱎鱏鱐鱓鱔鱖鱘鱛鱝鱞鱟鱣鱩鱪鱜鱫鱨鱮鱰鱲鱵鱷鱻鳦鳲鳷鳹鴋鴂鴑鴗鴘鴜鴝鴞鴯鴰鴲鴳鴴鴺鴼鵅鴽鵂鵃鵇鵊鵓鵔鵟鵣鵢鵥鵩鵪鵫鵰鵶鵷鵻"], +["8feca1","鵼鵾鶃鶄鶆鶊鶍鶎鶒鶓鶕鶖鶗鶘鶡鶪鶬鶮鶱鶵鶹鶼鶿鷃鷇鷉鷊鷔鷕鷖鷗鷚鷞鷟鷠鷥鷧鷩鷫鷮鷰鷳鷴鷾鸊鸂鸇鸎鸐鸑鸒鸕鸖鸙鸜鸝鹺鹻鹼麀麂麃麄麅麇麎麏麖麘麛麞麤麨麬麮麯麰麳麴麵黆黈黋黕黟黤黧黬黭黮黰黱黲黵"], +["8feda1","黸黿鼂鼃鼉鼏鼐鼑鼒鼔鼖鼗鼙鼚鼛鼟鼢鼦鼪鼫鼯鼱鼲鼴鼷鼹鼺鼼鼽鼿齁齃",4,"齓齕齖齗齘齚齝齞齨齩齭",4,"齳齵齺齽龏龐龑龒龔龖龗龞龡龢龣龥"] +] diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/tables/gb18030-ranges.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/tables/gb18030-ranges.json new file mode 100644 index 0000000000000000000000000000000000000000..85c6934757761e98580abf0c26c351b6fdfd6ad5 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/tables/gb18030-ranges.json @@ -0,0 +1 @@ +{"uChars":[128,165,169,178,184,216,226,235,238,244,248,251,253,258,276,284,300,325,329,334,364,463,465,467,469,471,473,475,477,506,594,610,712,716,730,930,938,962,970,1026,1104,1106,8209,8215,8218,8222,8231,8241,8244,8246,8252,8365,8452,8454,8458,8471,8482,8556,8570,8596,8602,8713,8720,8722,8726,8731,8737,8740,8742,8748,8751,8760,8766,8777,8781,8787,8802,8808,8816,8854,8858,8870,8896,8979,9322,9372,9548,9588,9616,9622,9634,9652,9662,9672,9676,9680,9702,9735,9738,9793,9795,11906,11909,11913,11917,11928,11944,11947,11951,11956,11960,11964,11979,12284,12292,12312,12319,12330,12351,12436,12447,12535,12543,12586,12842,12850,12964,13200,13215,13218,13253,13263,13267,13270,13384,13428,13727,13839,13851,14617,14703,14801,14816,14964,15183,15471,15585,16471,16736,17208,17325,17330,17374,17623,17997,18018,18212,18218,18301,18318,18760,18811,18814,18820,18823,18844,18848,18872,19576,19620,19738,19887,40870,59244,59336,59367,59413,59417,59423,59431,59437,59443,59452,59460,59478,59493,63789,63866,63894,63976,63986,64016,64018,64021,64025,64034,64037,64042,65074,65093,65107,65112,65127,65132,65375,65510,65536],"gbChars":[0,36,38,45,50,81,89,95,96,100,103,104,105,109,126,133,148,172,175,179,208,306,307,308,309,310,311,312,313,341,428,443,544,545,558,741,742,749,750,805,819,820,7922,7924,7925,7927,7934,7943,7944,7945,7950,8062,8148,8149,8152,8164,8174,8236,8240,8262,8264,8374,8380,8381,8384,8388,8390,8392,8393,8394,8396,8401,8406,8416,8419,8424,8437,8439,8445,8482,8485,8496,8521,8603,8936,8946,9046,9050,9063,9066,9076,9092,9100,9108,9111,9113,9131,9162,9164,9218,9219,11329,11331,11334,11336,11346,11361,11363,11366,11370,11372,11375,11389,11682,11686,11687,11692,11694,11714,11716,11723,11725,11730,11736,11982,11989,12102,12336,12348,12350,12384,12393,12395,12397,12510,12553,12851,12962,12973,13738,13823,13919,13933,14080,14298,14585,14698,15583,15847,16318,16434,16438,16481,16729,17102,17122,17315,17320,17402,17418,17859,17909,17911,17915,17916,17936,17939,17961,18664,18703,18814,18962,19043,33469,33470,33471,33484,33485,33490,33497,33501,33505,33513,33520,33536,33550,37845,37921,37948,38029,38038,38064,38065,38066,38069,38075,38076,38078,39108,39109,39113,39114,39115,39116,39265,39394,189000]} \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/tables/gbk-added.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/tables/gbk-added.json new file mode 100644 index 0000000000000000000000000000000000000000..b742e368f560003b9c95c3ae9b96633034d6f9e9 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/tables/gbk-added.json @@ -0,0 +1,56 @@ +[ +["a140","",62], +["a180","",32], +["a240","",62], +["a280","",32], +["a2ab","",5], +["a2e3","€"], +["a2ef",""], +["a2fd",""], +["a340","",62], +["a380","",31," "], +["a440","",62], +["a480","",32], +["a4f4","",10], +["a540","",62], +["a580","",32], +["a5f7","",7], +["a640","",62], +["a680","",32], +["a6b9","",7], +["a6d9","",6], +["a6ec",""], +["a6f3",""], +["a6f6","",8], +["a740","",62], +["a780","",32], +["a7c2","",14], +["a7f2","",12], +["a896","",10], +["a8bc","ḿ"], +["a8bf","ǹ"], +["a8c1",""], +["a8ea","",20], +["a958",""], +["a95b",""], +["a95d",""], +["a989","〾⿰",11], +["a997","",12], +["a9f0","",14], +["aaa1","",93], +["aba1","",93], +["aca1","",93], +["ada1","",93], +["aea1","",93], +["afa1","",93], +["d7fa","",4], +["f8a1","",93], +["f9a1","",93], +["faa1","",93], +["fba1","",93], +["fca1","",93], +["fda1","",93], +["fe50","⺁⺄㑳㑇⺈⺋㖞㘚㘎⺌⺗㥮㤘㧏㧟㩳㧐㭎㱮㳠⺧⺪䁖䅟⺮䌷⺳⺶⺷䎱䎬⺻䏝䓖䙡䙌"], +["fe80","䜣䜩䝼䞍⻊䥇䥺䥽䦂䦃䦅䦆䦟䦛䦷䦶䲣䲟䲠䲡䱷䲢䴓",6,"䶮",93], +["8135f437",""] +] diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/tables/shiftjis.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/tables/shiftjis.json new file mode 100644 index 0000000000000000000000000000000000000000..5a3a43cf8cf6d20324a49b75aff87d1bf902d108 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/tables/shiftjis.json @@ -0,0 +1,125 @@ +[ +["0","\u0000",128], +["a1","。",62], +["8140"," 、。,.・:;?!゛゜´`¨^ ̄_ヽヾゝゞ〃仝々〆〇ー―‐/\~∥|…‥‘’“”()〔〕[]{}〈",9,"+-±×"], +["8180","÷=≠<>≦≧∞∴♂♀°′″℃¥$¢£%#&*@§☆★○●◎◇◆□■△▲▽▼※〒→←↑↓〓"], +["81b8","∈∋⊆⊇⊂⊃∪∩"], +["81c8","∧∨¬⇒⇔∀∃"], +["81da","∠⊥⌒∂∇≡≒≪≫√∽∝∵∫∬"], +["81f0","ʼn♯♭♪†‡¶"], +["81fc","◯"], +["824f","0",9], +["8260","A",25], +["8281","a",25], +["829f","ぁ",82], +["8340","ァ",62], +["8380","ム",22], +["839f","Α",16,"Σ",6], +["83bf","α",16,"σ",6], +["8440","А",5,"ЁЖ",25], +["8470","а",5,"ёж",7], +["8480","о",17], +["849f","─│┌┐┘└├┬┤┴┼━┃┏┓┛┗┣┳┫┻╋┠┯┨┷┿┝┰┥┸╂"], +["8740","①",19,"Ⅰ",9], +["875f","㍉㌔㌢㍍㌘㌧㌃㌶㍑㍗㌍㌦㌣㌫㍊㌻㎜㎝㎞㎎㎏㏄㎡"], +["877e","㍻"], +["8780","〝〟№㏍℡㊤",4,"㈱㈲㈹㍾㍽㍼≒≡∫∮∑√⊥∠∟⊿∵∩∪"], +["889f","亜唖娃阿哀愛挨姶逢葵茜穐悪握渥旭葦芦鯵梓圧斡扱宛姐虻飴絢綾鮎或粟袷安庵按暗案闇鞍杏以伊位依偉囲夷委威尉惟意慰易椅為畏異移維緯胃萎衣謂違遺医井亥域育郁磯一壱溢逸稲茨芋鰯允印咽員因姻引飲淫胤蔭"], +["8940","院陰隠韻吋右宇烏羽迂雨卯鵜窺丑碓臼渦嘘唄欝蔚鰻姥厩浦瓜閏噂云運雲荏餌叡営嬰影映曳栄永泳洩瑛盈穎頴英衛詠鋭液疫益駅悦謁越閲榎厭円"], +["8980","園堰奄宴延怨掩援沿演炎焔煙燕猿縁艶苑薗遠鉛鴛塩於汚甥凹央奥往応押旺横欧殴王翁襖鴬鴎黄岡沖荻億屋憶臆桶牡乙俺卸恩温穏音下化仮何伽価佳加可嘉夏嫁家寡科暇果架歌河火珂禍禾稼箇花苛茄荷華菓蝦課嘩貨迦過霞蚊俄峨我牙画臥芽蛾賀雅餓駕介会解回塊壊廻快怪悔恢懐戒拐改"], +["8a40","魁晦械海灰界皆絵芥蟹開階貝凱劾外咳害崖慨概涯碍蓋街該鎧骸浬馨蛙垣柿蛎鈎劃嚇各廓拡撹格核殻獲確穫覚角赫較郭閣隔革学岳楽額顎掛笠樫"], +["8a80","橿梶鰍潟割喝恰括活渇滑葛褐轄且鰹叶椛樺鞄株兜竃蒲釜鎌噛鴨栢茅萱粥刈苅瓦乾侃冠寒刊勘勧巻喚堪姦完官寛干幹患感慣憾換敢柑桓棺款歓汗漢澗潅環甘監看竿管簡緩缶翰肝艦莞観諌貫還鑑間閑関陥韓館舘丸含岸巌玩癌眼岩翫贋雁頑顔願企伎危喜器基奇嬉寄岐希幾忌揮机旗既期棋棄"], +["8b40","機帰毅気汽畿祈季稀紀徽規記貴起軌輝飢騎鬼亀偽儀妓宜戯技擬欺犠疑祇義蟻誼議掬菊鞠吉吃喫桔橘詰砧杵黍却客脚虐逆丘久仇休及吸宮弓急救"], +["8b80","朽求汲泣灸球究窮笈級糾給旧牛去居巨拒拠挙渠虚許距鋸漁禦魚亨享京供侠僑兇競共凶協匡卿叫喬境峡強彊怯恐恭挟教橋況狂狭矯胸脅興蕎郷鏡響饗驚仰凝尭暁業局曲極玉桐粁僅勤均巾錦斤欣欽琴禁禽筋緊芹菌衿襟謹近金吟銀九倶句区狗玖矩苦躯駆駈駒具愚虞喰空偶寓遇隅串櫛釧屑屈"], +["8c40","掘窟沓靴轡窪熊隈粂栗繰桑鍬勲君薫訓群軍郡卦袈祁係傾刑兄啓圭珪型契形径恵慶慧憩掲携敬景桂渓畦稽系経継繋罫茎荊蛍計詣警軽頚鶏芸迎鯨"], +["8c80","劇戟撃激隙桁傑欠決潔穴結血訣月件倹倦健兼券剣喧圏堅嫌建憲懸拳捲検権牽犬献研硯絹県肩見謙賢軒遣鍵険顕験鹸元原厳幻弦減源玄現絃舷言諺限乎個古呼固姑孤己庫弧戸故枯湖狐糊袴股胡菰虎誇跨鈷雇顧鼓五互伍午呉吾娯後御悟梧檎瑚碁語誤護醐乞鯉交佼侯候倖光公功効勾厚口向"], +["8d40","后喉坑垢好孔孝宏工巧巷幸広庚康弘恒慌抗拘控攻昂晃更杭校梗構江洪浩港溝甲皇硬稿糠紅紘絞綱耕考肯肱腔膏航荒行衡講貢購郊酵鉱砿鋼閤降"], +["8d80","項香高鴻剛劫号合壕拷濠豪轟麹克刻告国穀酷鵠黒獄漉腰甑忽惚骨狛込此頃今困坤墾婚恨懇昏昆根梱混痕紺艮魂些佐叉唆嵯左差査沙瑳砂詐鎖裟坐座挫債催再最哉塞妻宰彩才採栽歳済災采犀砕砦祭斎細菜裁載際剤在材罪財冴坂阪堺榊肴咲崎埼碕鷺作削咋搾昨朔柵窄策索錯桜鮭笹匙冊刷"], +["8e40","察拶撮擦札殺薩雑皐鯖捌錆鮫皿晒三傘参山惨撒散桟燦珊産算纂蚕讃賛酸餐斬暫残仕仔伺使刺司史嗣四士始姉姿子屍市師志思指支孜斯施旨枝止"], +["8e80","死氏獅祉私糸紙紫肢脂至視詞詩試誌諮資賜雌飼歯事似侍児字寺慈持時次滋治爾璽痔磁示而耳自蒔辞汐鹿式識鴫竺軸宍雫七叱執失嫉室悉湿漆疾質実蔀篠偲柴芝屡蕊縞舎写射捨赦斜煮社紗者謝車遮蛇邪借勺尺杓灼爵酌釈錫若寂弱惹主取守手朱殊狩珠種腫趣酒首儒受呪寿授樹綬需囚収周"], +["8f40","宗就州修愁拾洲秀秋終繍習臭舟蒐衆襲讐蹴輯週酋酬集醜什住充十従戎柔汁渋獣縦重銃叔夙宿淑祝縮粛塾熟出術述俊峻春瞬竣舜駿准循旬楯殉淳"], +["8f80","準潤盾純巡遵醇順処初所暑曙渚庶緒署書薯藷諸助叙女序徐恕鋤除傷償勝匠升召哨商唱嘗奨妾娼宵将小少尚庄床廠彰承抄招掌捷昇昌昭晶松梢樟樵沼消渉湘焼焦照症省硝礁祥称章笑粧紹肖菖蒋蕉衝裳訟証詔詳象賞醤鉦鍾鐘障鞘上丈丞乗冗剰城場壌嬢常情擾条杖浄状畳穣蒸譲醸錠嘱埴飾"], +["9040","拭植殖燭織職色触食蝕辱尻伸信侵唇娠寝審心慎振新晋森榛浸深申疹真神秦紳臣芯薪親診身辛進針震人仁刃塵壬尋甚尽腎訊迅陣靭笥諏須酢図厨"], +["9080","逗吹垂帥推水炊睡粋翠衰遂酔錐錘随瑞髄崇嵩数枢趨雛据杉椙菅頗雀裾澄摺寸世瀬畝是凄制勢姓征性成政整星晴棲栖正清牲生盛精聖声製西誠誓請逝醒青静斉税脆隻席惜戚斥昔析石積籍績脊責赤跡蹟碩切拙接摂折設窃節説雪絶舌蝉仙先千占宣専尖川戦扇撰栓栴泉浅洗染潜煎煽旋穿箭線"], +["9140","繊羨腺舛船薦詮賎践選遷銭銑閃鮮前善漸然全禅繕膳糎噌塑岨措曾曽楚狙疏疎礎祖租粗素組蘇訴阻遡鼠僧創双叢倉喪壮奏爽宋層匝惣想捜掃挿掻"], +["9180","操早曹巣槍槽漕燥争痩相窓糟総綜聡草荘葬蒼藻装走送遭鎗霜騒像増憎臓蔵贈造促側則即息捉束測足速俗属賊族続卒袖其揃存孫尊損村遜他多太汰詑唾堕妥惰打柁舵楕陀駄騨体堆対耐岱帯待怠態戴替泰滞胎腿苔袋貸退逮隊黛鯛代台大第醍題鷹滝瀧卓啄宅托択拓沢濯琢託鐸濁諾茸凧蛸只"], +["9240","叩但達辰奪脱巽竪辿棚谷狸鱈樽誰丹単嘆坦担探旦歎淡湛炭短端箪綻耽胆蛋誕鍛団壇弾断暖檀段男談値知地弛恥智池痴稚置致蜘遅馳築畜竹筑蓄"], +["9280","逐秩窒茶嫡着中仲宙忠抽昼柱注虫衷註酎鋳駐樗瀦猪苧著貯丁兆凋喋寵帖帳庁弔張彫徴懲挑暢朝潮牒町眺聴脹腸蝶調諜超跳銚長頂鳥勅捗直朕沈珍賃鎮陳津墜椎槌追鎚痛通塚栂掴槻佃漬柘辻蔦綴鍔椿潰坪壷嬬紬爪吊釣鶴亭低停偵剃貞呈堤定帝底庭廷弟悌抵挺提梯汀碇禎程締艇訂諦蹄逓"], +["9340","邸鄭釘鼎泥摘擢敵滴的笛適鏑溺哲徹撤轍迭鉄典填天展店添纏甜貼転顛点伝殿澱田電兎吐堵塗妬屠徒斗杜渡登菟賭途都鍍砥砺努度土奴怒倒党冬"], +["9380","凍刀唐塔塘套宕島嶋悼投搭東桃梼棟盗淘湯涛灯燈当痘祷等答筒糖統到董蕩藤討謄豆踏逃透鐙陶頭騰闘働動同堂導憧撞洞瞳童胴萄道銅峠鴇匿得徳涜特督禿篤毒独読栃橡凸突椴届鳶苫寅酉瀞噸屯惇敦沌豚遁頓呑曇鈍奈那内乍凪薙謎灘捺鍋楢馴縄畷南楠軟難汝二尼弐迩匂賑肉虹廿日乳入"], +["9440","如尿韮任妊忍認濡禰祢寧葱猫熱年念捻撚燃粘乃廼之埜嚢悩濃納能脳膿農覗蚤巴把播覇杷波派琶破婆罵芭馬俳廃拝排敗杯盃牌背肺輩配倍培媒梅"], +["9480","楳煤狽買売賠陪這蝿秤矧萩伯剥博拍柏泊白箔粕舶薄迫曝漠爆縛莫駁麦函箱硲箸肇筈櫨幡肌畑畠八鉢溌発醗髪伐罰抜筏閥鳩噺塙蛤隼伴判半反叛帆搬斑板氾汎版犯班畔繁般藩販範釆煩頒飯挽晩番盤磐蕃蛮匪卑否妃庇彼悲扉批披斐比泌疲皮碑秘緋罷肥被誹費避非飛樋簸備尾微枇毘琵眉美"], +["9540","鼻柊稗匹疋髭彦膝菱肘弼必畢筆逼桧姫媛紐百謬俵彪標氷漂瓢票表評豹廟描病秒苗錨鋲蒜蛭鰭品彬斌浜瀕貧賓頻敏瓶不付埠夫婦富冨布府怖扶敷"], +["9580","斧普浮父符腐膚芙譜負賦赴阜附侮撫武舞葡蕪部封楓風葺蕗伏副復幅服福腹複覆淵弗払沸仏物鮒分吻噴墳憤扮焚奮粉糞紛雰文聞丙併兵塀幣平弊柄並蔽閉陛米頁僻壁癖碧別瞥蔑箆偏変片篇編辺返遍便勉娩弁鞭保舗鋪圃捕歩甫補輔穂募墓慕戊暮母簿菩倣俸包呆報奉宝峰峯崩庖抱捧放方朋"], +["9640","法泡烹砲縫胞芳萌蓬蜂褒訪豊邦鋒飽鳳鵬乏亡傍剖坊妨帽忘忙房暴望某棒冒紡肪膨謀貌貿鉾防吠頬北僕卜墨撲朴牧睦穆釦勃没殆堀幌奔本翻凡盆"], +["9680","摩磨魔麻埋妹昧枚毎哩槙幕膜枕鮪柾鱒桝亦俣又抹末沫迄侭繭麿万慢満漫蔓味未魅巳箕岬密蜜湊蓑稔脈妙粍民眠務夢無牟矛霧鵡椋婿娘冥名命明盟迷銘鳴姪牝滅免棉綿緬面麺摸模茂妄孟毛猛盲網耗蒙儲木黙目杢勿餅尤戻籾貰問悶紋門匁也冶夜爺耶野弥矢厄役約薬訳躍靖柳薮鑓愉愈油癒"], +["9740","諭輸唯佑優勇友宥幽悠憂揖有柚湧涌猶猷由祐裕誘遊邑郵雄融夕予余与誉輿預傭幼妖容庸揚揺擁曜楊様洋溶熔用窯羊耀葉蓉要謡踊遥陽養慾抑欲"], +["9780","沃浴翌翼淀羅螺裸来莱頼雷洛絡落酪乱卵嵐欄濫藍蘭覧利吏履李梨理璃痢裏裡里離陸律率立葎掠略劉流溜琉留硫粒隆竜龍侶慮旅虜了亮僚両凌寮料梁涼猟療瞭稜糧良諒遼量陵領力緑倫厘林淋燐琳臨輪隣鱗麟瑠塁涙累類令伶例冷励嶺怜玲礼苓鈴隷零霊麗齢暦歴列劣烈裂廉恋憐漣煉簾練聯"], +["9840","蓮連錬呂魯櫓炉賂路露労婁廊弄朗楼榔浪漏牢狼篭老聾蝋郎六麓禄肋録論倭和話歪賄脇惑枠鷲亙亘鰐詫藁蕨椀湾碗腕"], +["989f","弌丐丕个丱丶丼丿乂乖乘亂亅豫亊舒弍于亞亟亠亢亰亳亶从仍仄仆仂仗仞仭仟价伉佚估佛佝佗佇佶侈侏侘佻佩佰侑佯來侖儘俔俟俎俘俛俑俚俐俤俥倚倨倔倪倥倅伜俶倡倩倬俾俯們倆偃假會偕偐偈做偖偬偸傀傚傅傴傲"], +["9940","僉僊傳僂僖僞僥僭僣僮價僵儉儁儂儖儕儔儚儡儺儷儼儻儿兀兒兌兔兢竸兩兪兮冀冂囘册冉冏冑冓冕冖冤冦冢冩冪冫决冱冲冰况冽凅凉凛几處凩凭"], +["9980","凰凵凾刄刋刔刎刧刪刮刳刹剏剄剋剌剞剔剪剴剩剳剿剽劍劔劒剱劈劑辨辧劬劭劼劵勁勍勗勞勣勦飭勠勳勵勸勹匆匈甸匍匐匏匕匚匣匯匱匳匸區卆卅丗卉卍凖卞卩卮夘卻卷厂厖厠厦厥厮厰厶參簒雙叟曼燮叮叨叭叺吁吽呀听吭吼吮吶吩吝呎咏呵咎呟呱呷呰咒呻咀呶咄咐咆哇咢咸咥咬哄哈咨"], +["9a40","咫哂咤咾咼哘哥哦唏唔哽哮哭哺哢唹啀啣啌售啜啅啖啗唸唳啝喙喀咯喊喟啻啾喘喞單啼喃喩喇喨嗚嗅嗟嗄嗜嗤嗔嘔嗷嘖嗾嗽嘛嗹噎噐營嘴嘶嘲嘸"], +["9a80","噫噤嘯噬噪嚆嚀嚊嚠嚔嚏嚥嚮嚶嚴囂嚼囁囃囀囈囎囑囓囗囮囹圀囿圄圉圈國圍圓團圖嗇圜圦圷圸坎圻址坏坩埀垈坡坿垉垓垠垳垤垪垰埃埆埔埒埓堊埖埣堋堙堝塲堡塢塋塰毀塒堽塹墅墹墟墫墺壞墻墸墮壅壓壑壗壙壘壥壜壤壟壯壺壹壻壼壽夂夊夐夛梦夥夬夭夲夸夾竒奕奐奎奚奘奢奠奧奬奩"], +["9b40","奸妁妝佞侫妣妲姆姨姜妍姙姚娥娟娑娜娉娚婀婬婉娵娶婢婪媚媼媾嫋嫂媽嫣嫗嫦嫩嫖嫺嫻嬌嬋嬖嬲嫐嬪嬶嬾孃孅孀孑孕孚孛孥孩孰孳孵學斈孺宀"], +["9b80","它宦宸寃寇寉寔寐寤實寢寞寥寫寰寶寳尅將專對尓尠尢尨尸尹屁屆屎屓屐屏孱屬屮乢屶屹岌岑岔妛岫岻岶岼岷峅岾峇峙峩峽峺峭嶌峪崋崕崗嵜崟崛崑崔崢崚崙崘嵌嵒嵎嵋嵬嵳嵶嶇嶄嶂嶢嶝嶬嶮嶽嶐嶷嶼巉巍巓巒巖巛巫已巵帋帚帙帑帛帶帷幄幃幀幎幗幔幟幢幤幇幵并幺麼广庠廁廂廈廐廏"], +["9c40","廖廣廝廚廛廢廡廨廩廬廱廳廰廴廸廾弃弉彝彜弋弑弖弩弭弸彁彈彌彎弯彑彖彗彙彡彭彳彷徃徂彿徊很徑徇從徙徘徠徨徭徼忖忻忤忸忱忝悳忿怡恠"], +["9c80","怙怐怩怎怱怛怕怫怦怏怺恚恁恪恷恟恊恆恍恣恃恤恂恬恫恙悁悍惧悃悚悄悛悖悗悒悧悋惡悸惠惓悴忰悽惆悵惘慍愕愆惶惷愀惴惺愃愡惻惱愍愎慇愾愨愧慊愿愼愬愴愽慂慄慳慷慘慙慚慫慴慯慥慱慟慝慓慵憙憖憇憬憔憚憊憑憫憮懌懊應懷懈懃懆憺懋罹懍懦懣懶懺懴懿懽懼懾戀戈戉戍戌戔戛"], +["9d40","戞戡截戮戰戲戳扁扎扞扣扛扠扨扼抂抉找抒抓抖拔抃抔拗拑抻拏拿拆擔拈拜拌拊拂拇抛拉挌拮拱挧挂挈拯拵捐挾捍搜捏掖掎掀掫捶掣掏掉掟掵捫"], +["9d80","捩掾揩揀揆揣揉插揶揄搖搴搆搓搦搶攝搗搨搏摧摯摶摎攪撕撓撥撩撈撼據擒擅擇撻擘擂擱擧舉擠擡抬擣擯攬擶擴擲擺攀擽攘攜攅攤攣攫攴攵攷收攸畋效敖敕敍敘敞敝敲數斂斃變斛斟斫斷旃旆旁旄旌旒旛旙无旡旱杲昊昃旻杳昵昶昴昜晏晄晉晁晞晝晤晧晨晟晢晰暃暈暎暉暄暘暝曁暹曉暾暼"], +["9e40","曄暸曖曚曠昿曦曩曰曵曷朏朖朞朦朧霸朮朿朶杁朸朷杆杞杠杙杣杤枉杰枩杼杪枌枋枦枡枅枷柯枴柬枳柩枸柤柞柝柢柮枹柎柆柧檜栞框栩桀桍栲桎"], +["9e80","梳栫桙档桷桿梟梏梭梔條梛梃檮梹桴梵梠梺椏梍桾椁棊椈棘椢椦棡椌棍棔棧棕椶椒椄棗棣椥棹棠棯椨椪椚椣椡棆楹楷楜楸楫楔楾楮椹楴椽楙椰楡楞楝榁楪榲榮槐榿槁槓榾槎寨槊槝榻槃榧樮榑榠榜榕榴槞槨樂樛槿權槹槲槧樅榱樞槭樔槫樊樒櫁樣樓橄樌橲樶橸橇橢橙橦橈樸樢檐檍檠檄檢檣"], +["9f40","檗蘗檻櫃櫂檸檳檬櫞櫑櫟檪櫚櫪櫻欅蘖櫺欒欖鬱欟欸欷盜欹飮歇歃歉歐歙歔歛歟歡歸歹歿殀殄殃殍殘殕殞殤殪殫殯殲殱殳殷殼毆毋毓毟毬毫毳毯"], +["9f80","麾氈氓气氛氤氣汞汕汢汪沂沍沚沁沛汾汨汳沒沐泄泱泓沽泗泅泝沮沱沾沺泛泯泙泪洟衍洶洫洽洸洙洵洳洒洌浣涓浤浚浹浙涎涕濤涅淹渕渊涵淇淦涸淆淬淞淌淨淒淅淺淙淤淕淪淮渭湮渮渙湲湟渾渣湫渫湶湍渟湃渺湎渤滿渝游溂溪溘滉溷滓溽溯滄溲滔滕溏溥滂溟潁漑灌滬滸滾漿滲漱滯漲滌"], +["e040","漾漓滷澆潺潸澁澀潯潛濳潭澂潼潘澎澑濂潦澳澣澡澤澹濆澪濟濕濬濔濘濱濮濛瀉瀋濺瀑瀁瀏濾瀛瀚潴瀝瀘瀟瀰瀾瀲灑灣炙炒炯烱炬炸炳炮烟烋烝"], +["e080","烙焉烽焜焙煥煕熈煦煢煌煖煬熏燻熄熕熨熬燗熹熾燒燉燔燎燠燬燧燵燼燹燿爍爐爛爨爭爬爰爲爻爼爿牀牆牋牘牴牾犂犁犇犒犖犢犧犹犲狃狆狄狎狒狢狠狡狹狷倏猗猊猜猖猝猴猯猩猥猾獎獏默獗獪獨獰獸獵獻獺珈玳珎玻珀珥珮珞璢琅瑯琥珸琲琺瑕琿瑟瑙瑁瑜瑩瑰瑣瑪瑶瑾璋璞璧瓊瓏瓔珱"], +["e140","瓠瓣瓧瓩瓮瓲瓰瓱瓸瓷甄甃甅甌甎甍甕甓甞甦甬甼畄畍畊畉畛畆畚畩畤畧畫畭畸當疆疇畴疊疉疂疔疚疝疥疣痂疳痃疵疽疸疼疱痍痊痒痙痣痞痾痿"], +["e180","痼瘁痰痺痲痳瘋瘍瘉瘟瘧瘠瘡瘢瘤瘴瘰瘻癇癈癆癜癘癡癢癨癩癪癧癬癰癲癶癸發皀皃皈皋皎皖皓皙皚皰皴皸皹皺盂盍盖盒盞盡盥盧盪蘯盻眈眇眄眩眤眞眥眦眛眷眸睇睚睨睫睛睥睿睾睹瞎瞋瞑瞠瞞瞰瞶瞹瞿瞼瞽瞻矇矍矗矚矜矣矮矼砌砒礦砠礪硅碎硴碆硼碚碌碣碵碪碯磑磆磋磔碾碼磅磊磬"], +["e240","磧磚磽磴礇礒礑礙礬礫祀祠祗祟祚祕祓祺祿禊禝禧齋禪禮禳禹禺秉秕秧秬秡秣稈稍稘稙稠稟禀稱稻稾稷穃穗穉穡穢穩龝穰穹穽窈窗窕窘窖窩竈窰"], +["e280","窶竅竄窿邃竇竊竍竏竕竓站竚竝竡竢竦竭竰笂笏笊笆笳笘笙笞笵笨笶筐筺笄筍笋筌筅筵筥筴筧筰筱筬筮箝箘箟箍箜箚箋箒箏筝箙篋篁篌篏箴篆篝篩簑簔篦篥籠簀簇簓篳篷簗簍篶簣簧簪簟簷簫簽籌籃籔籏籀籐籘籟籤籖籥籬籵粃粐粤粭粢粫粡粨粳粲粱粮粹粽糀糅糂糘糒糜糢鬻糯糲糴糶糺紆"], +["e340","紂紜紕紊絅絋紮紲紿紵絆絳絖絎絲絨絮絏絣經綉絛綏絽綛綺綮綣綵緇綽綫總綢綯緜綸綟綰緘緝緤緞緻緲緡縅縊縣縡縒縱縟縉縋縢繆繦縻縵縹繃縷"], +["e380","縲縺繧繝繖繞繙繚繹繪繩繼繻纃緕繽辮繿纈纉續纒纐纓纔纖纎纛纜缸缺罅罌罍罎罐网罕罔罘罟罠罨罩罧罸羂羆羃羈羇羌羔羞羝羚羣羯羲羹羮羶羸譱翅翆翊翕翔翡翦翩翳翹飜耆耄耋耒耘耙耜耡耨耿耻聊聆聒聘聚聟聢聨聳聲聰聶聹聽聿肄肆肅肛肓肚肭冐肬胛胥胙胝胄胚胖脉胯胱脛脩脣脯腋"], +["e440","隋腆脾腓腑胼腱腮腥腦腴膃膈膊膀膂膠膕膤膣腟膓膩膰膵膾膸膽臀臂膺臉臍臑臙臘臈臚臟臠臧臺臻臾舁舂舅與舊舍舐舖舩舫舸舳艀艙艘艝艚艟艤"], +["e480","艢艨艪艫舮艱艷艸艾芍芒芫芟芻芬苡苣苟苒苴苳苺莓范苻苹苞茆苜茉苙茵茴茖茲茱荀茹荐荅茯茫茗茘莅莚莪莟莢莖茣莎莇莊荼莵荳荵莠莉莨菴萓菫菎菽萃菘萋菁菷萇菠菲萍萢萠莽萸蔆菻葭萪萼蕚蒄葷葫蒭葮蒂葩葆萬葯葹萵蓊葢蒹蒿蒟蓙蓍蒻蓚蓐蓁蓆蓖蒡蔡蓿蓴蔗蔘蔬蔟蔕蔔蓼蕀蕣蕘蕈"], +["e540","蕁蘂蕋蕕薀薤薈薑薊薨蕭薔薛藪薇薜蕷蕾薐藉薺藏薹藐藕藝藥藜藹蘊蘓蘋藾藺蘆蘢蘚蘰蘿虍乕虔號虧虱蚓蚣蚩蚪蚋蚌蚶蚯蛄蛆蚰蛉蠣蚫蛔蛞蛩蛬"], +["e580","蛟蛛蛯蜒蜆蜈蜀蜃蛻蜑蜉蜍蛹蜊蜴蜿蜷蜻蜥蜩蜚蝠蝟蝸蝌蝎蝴蝗蝨蝮蝙蝓蝣蝪蠅螢螟螂螯蟋螽蟀蟐雖螫蟄螳蟇蟆螻蟯蟲蟠蠏蠍蟾蟶蟷蠎蟒蠑蠖蠕蠢蠡蠱蠶蠹蠧蠻衄衂衒衙衞衢衫袁衾袞衵衽袵衲袂袗袒袮袙袢袍袤袰袿袱裃裄裔裘裙裝裹褂裼裴裨裲褄褌褊褓襃褞褥褪褫襁襄褻褶褸襌褝襠襞"], +["e640","襦襤襭襪襯襴襷襾覃覈覊覓覘覡覩覦覬覯覲覺覽覿觀觚觜觝觧觴觸訃訖訐訌訛訝訥訶詁詛詒詆詈詼詭詬詢誅誂誄誨誡誑誥誦誚誣諄諍諂諚諫諳諧"], +["e680","諤諱謔諠諢諷諞諛謌謇謚諡謖謐謗謠謳鞫謦謫謾謨譁譌譏譎證譖譛譚譫譟譬譯譴譽讀讌讎讒讓讖讙讚谺豁谿豈豌豎豐豕豢豬豸豺貂貉貅貊貍貎貔豼貘戝貭貪貽貲貳貮貶賈賁賤賣賚賽賺賻贄贅贊贇贏贍贐齎贓賍贔贖赧赭赱赳趁趙跂趾趺跏跚跖跌跛跋跪跫跟跣跼踈踉跿踝踞踐踟蹂踵踰踴蹊"], +["e740","蹇蹉蹌蹐蹈蹙蹤蹠踪蹣蹕蹶蹲蹼躁躇躅躄躋躊躓躑躔躙躪躡躬躰軆躱躾軅軈軋軛軣軼軻軫軾輊輅輕輒輙輓輜輟輛輌輦輳輻輹轅轂輾轌轉轆轎轗轜"], +["e780","轢轣轤辜辟辣辭辯辷迚迥迢迪迯邇迴逅迹迺逑逕逡逍逞逖逋逧逶逵逹迸遏遐遑遒逎遉逾遖遘遞遨遯遶隨遲邂遽邁邀邊邉邏邨邯邱邵郢郤扈郛鄂鄒鄙鄲鄰酊酖酘酣酥酩酳酲醋醉醂醢醫醯醪醵醴醺釀釁釉釋釐釖釟釡釛釼釵釶鈞釿鈔鈬鈕鈑鉞鉗鉅鉉鉤鉈銕鈿鉋鉐銜銖銓銛鉚鋏銹銷鋩錏鋺鍄錮"], +["e840","錙錢錚錣錺錵錻鍜鍠鍼鍮鍖鎰鎬鎭鎔鎹鏖鏗鏨鏥鏘鏃鏝鏐鏈鏤鐚鐔鐓鐃鐇鐐鐶鐫鐵鐡鐺鑁鑒鑄鑛鑠鑢鑞鑪鈩鑰鑵鑷鑽鑚鑼鑾钁鑿閂閇閊閔閖閘閙"], +["e880","閠閨閧閭閼閻閹閾闊濶闃闍闌闕闔闖關闡闥闢阡阨阮阯陂陌陏陋陷陜陞陝陟陦陲陬隍隘隕隗險隧隱隲隰隴隶隸隹雎雋雉雍襍雜霍雕雹霄霆霈霓霎霑霏霖霙霤霪霰霹霽霾靄靆靈靂靉靜靠靤靦靨勒靫靱靹鞅靼鞁靺鞆鞋鞏鞐鞜鞨鞦鞣鞳鞴韃韆韈韋韜韭齏韲竟韶韵頏頌頸頤頡頷頽顆顏顋顫顯顰"], +["e940","顱顴顳颪颯颱颶飄飃飆飩飫餃餉餒餔餘餡餝餞餤餠餬餮餽餾饂饉饅饐饋饑饒饌饕馗馘馥馭馮馼駟駛駝駘駑駭駮駱駲駻駸騁騏騅駢騙騫騷驅驂驀驃"], +["e980","騾驕驍驛驗驟驢驥驤驩驫驪骭骰骼髀髏髑髓體髞髟髢髣髦髯髫髮髴髱髷髻鬆鬘鬚鬟鬢鬣鬥鬧鬨鬩鬪鬮鬯鬲魄魃魏魍魎魑魘魴鮓鮃鮑鮖鮗鮟鮠鮨鮴鯀鯊鮹鯆鯏鯑鯒鯣鯢鯤鯔鯡鰺鯲鯱鯰鰕鰔鰉鰓鰌鰆鰈鰒鰊鰄鰮鰛鰥鰤鰡鰰鱇鰲鱆鰾鱚鱠鱧鱶鱸鳧鳬鳰鴉鴈鳫鴃鴆鴪鴦鶯鴣鴟鵄鴕鴒鵁鴿鴾鵆鵈"], +["ea40","鵝鵞鵤鵑鵐鵙鵲鶉鶇鶫鵯鵺鶚鶤鶩鶲鷄鷁鶻鶸鶺鷆鷏鷂鷙鷓鷸鷦鷭鷯鷽鸚鸛鸞鹵鹹鹽麁麈麋麌麒麕麑麝麥麩麸麪麭靡黌黎黏黐黔黜點黝黠黥黨黯"], +["ea80","黴黶黷黹黻黼黽鼇鼈皷鼕鼡鼬鼾齊齒齔齣齟齠齡齦齧齬齪齷齲齶龕龜龠堯槇遙瑤凜熙"], +["ed40","纊褜鍈銈蓜俉炻昱棈鋹曻彅丨仡仼伀伃伹佖侒侊侚侔俍偀倢俿倞偆偰偂傔僴僘兊兤冝冾凬刕劜劦勀勛匀匇匤卲厓厲叝﨎咜咊咩哿喆坙坥垬埈埇﨏"], +["ed80","塚增墲夋奓奛奝奣妤妺孖寀甯寘寬尞岦岺峵崧嵓﨑嵂嵭嶸嶹巐弡弴彧德忞恝悅悊惞惕愠惲愑愷愰憘戓抦揵摠撝擎敎昀昕昻昉昮昞昤晥晗晙晴晳暙暠暲暿曺朎朗杦枻桒柀栁桄棏﨓楨﨔榘槢樰橫橆橳橾櫢櫤毖氿汜沆汯泚洄涇浯涖涬淏淸淲淼渹湜渧渼溿澈澵濵瀅瀇瀨炅炫焏焄煜煆煇凞燁燾犱"], +["ee40","犾猤猪獷玽珉珖珣珒琇珵琦琪琩琮瑢璉璟甁畯皂皜皞皛皦益睆劯砡硎硤硺礰礼神祥禔福禛竑竧靖竫箞精絈絜綷綠緖繒罇羡羽茁荢荿菇菶葈蒴蕓蕙"], +["ee80","蕫﨟薰蘒﨡蠇裵訒訷詹誧誾諟諸諶譓譿賰賴贒赶﨣軏﨤逸遧郞都鄕鄧釚釗釞釭釮釤釥鈆鈐鈊鈺鉀鈼鉎鉙鉑鈹鉧銧鉷鉸鋧鋗鋙鋐﨧鋕鋠鋓錥錡鋻﨨錞鋿錝錂鍰鍗鎤鏆鏞鏸鐱鑅鑈閒隆﨩隝隯霳霻靃靍靏靑靕顗顥飯飼餧館馞驎髙髜魵魲鮏鮱鮻鰀鵰鵫鶴鸙黑"], +["eeef","ⅰ",9,"¬¦'""], +["f040","",62], +["f080","",124], +["f140","",62], +["f180","",124], +["f240","",62], +["f280","",124], +["f340","",62], +["f380","",124], +["f440","",62], +["f480","",124], +["f540","",62], +["f580","",124], +["f640","",62], +["f680","",124], +["f740","",62], +["f780","",124], +["f840","",62], +["f880","",124], +["f940",""], +["fa40","ⅰ",9,"Ⅰ",9,"¬¦'"㈱№℡∵纊褜鍈銈蓜俉炻昱棈鋹曻彅丨仡仼伀伃伹佖侒侊侚侔俍偀倢俿倞偆偰偂傔僴僘兊"], +["fa80","兤冝冾凬刕劜劦勀勛匀匇匤卲厓厲叝﨎咜咊咩哿喆坙坥垬埈埇﨏塚增墲夋奓奛奝奣妤妺孖寀甯寘寬尞岦岺峵崧嵓﨑嵂嵭嶸嶹巐弡弴彧德忞恝悅悊惞惕愠惲愑愷愰憘戓抦揵摠撝擎敎昀昕昻昉昮昞昤晥晗晙晴晳暙暠暲暿曺朎朗杦枻桒柀栁桄棏﨓楨﨔榘槢樰橫橆橳橾櫢櫤毖氿汜沆汯泚洄涇浯"], +["fb40","涖涬淏淸淲淼渹湜渧渼溿澈澵濵瀅瀇瀨炅炫焏焄煜煆煇凞燁燾犱犾猤猪獷玽珉珖珣珒琇珵琦琪琩琮瑢璉璟甁畯皂皜皞皛皦益睆劯砡硎硤硺礰礼神"], +["fb80","祥禔福禛竑竧靖竫箞精絈絜綷綠緖繒罇羡羽茁荢荿菇菶葈蒴蕓蕙蕫﨟薰蘒﨡蠇裵訒訷詹誧誾諟諸諶譓譿賰賴贒赶﨣軏﨤逸遧郞都鄕鄧釚釗釞釭釮釤釥鈆鈐鈊鈺鉀鈼鉎鉙鉑鈹鉧銧鉷鉸鋧鋗鋙鋐﨧鋕鋠鋓錥錡鋻﨨錞鋿錝錂鍰鍗鎤鏆鏞鏸鐱鑅鑈閒隆﨩隝隯霳霻靃靍靏靑靕顗顥飯飼餧館馞驎髙"], +["fc40","髜魵魲鮏鮱鮻鰀鵰鵫鶴鸙黑"] +] diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/utf16.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/utf16.js new file mode 100644 index 0000000000000000000000000000000000000000..97d066925bbd5dfaa7213e0433570a113c461f3e --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/utf16.js @@ -0,0 +1,197 @@ +"use strict"; +var Buffer = require("safer-buffer").Buffer; + +// Note: UTF16-LE (or UCS2) codec is Node.js native. See encodings/internal.js + +// == UTF16-BE codec. ========================================================== + +exports.utf16be = Utf16BECodec; +function Utf16BECodec() { +} + +Utf16BECodec.prototype.encoder = Utf16BEEncoder; +Utf16BECodec.prototype.decoder = Utf16BEDecoder; +Utf16BECodec.prototype.bomAware = true; + + +// -- Encoding + +function Utf16BEEncoder() { +} + +Utf16BEEncoder.prototype.write = function(str) { + var buf = Buffer.from(str, 'ucs2'); + for (var i = 0; i < buf.length; i += 2) { + var tmp = buf[i]; buf[i] = buf[i+1]; buf[i+1] = tmp; + } + return buf; +} + +Utf16BEEncoder.prototype.end = function() { +} + + +// -- Decoding + +function Utf16BEDecoder() { + this.overflowByte = -1; +} + +Utf16BEDecoder.prototype.write = function(buf) { + if (buf.length == 0) + return ''; + + var buf2 = Buffer.alloc(buf.length + 1), + i = 0, j = 0; + + if (this.overflowByte !== -1) { + buf2[0] = buf[0]; + buf2[1] = this.overflowByte; + i = 1; j = 2; + } + + for (; i < buf.length-1; i += 2, j+= 2) { + buf2[j] = buf[i+1]; + buf2[j+1] = buf[i]; + } + + this.overflowByte = (i == buf.length-1) ? buf[buf.length-1] : -1; + + return buf2.slice(0, j).toString('ucs2'); +} + +Utf16BEDecoder.prototype.end = function() { + this.overflowByte = -1; +} + + +// == UTF-16 codec ============================================================= +// Decoder chooses automatically from UTF-16LE and UTF-16BE using BOM and space-based heuristic. +// Defaults to UTF-16LE, as it's prevalent and default in Node. +// http://en.wikipedia.org/wiki/UTF-16 and http://encoding.spec.whatwg.org/#utf-16le +// Decoder default can be changed: iconv.decode(buf, 'utf16', {defaultEncoding: 'utf-16be'}); + +// Encoder uses UTF-16LE and prepends BOM (which can be overridden with addBOM: false). + +exports.utf16 = Utf16Codec; +function Utf16Codec(codecOptions, iconv) { + this.iconv = iconv; +} + +Utf16Codec.prototype.encoder = Utf16Encoder; +Utf16Codec.prototype.decoder = Utf16Decoder; + + +// -- Encoding (pass-through) + +function Utf16Encoder(options, codec) { + options = options || {}; + if (options.addBOM === undefined) + options.addBOM = true; + this.encoder = codec.iconv.getEncoder('utf-16le', options); +} + +Utf16Encoder.prototype.write = function(str) { + return this.encoder.write(str); +} + +Utf16Encoder.prototype.end = function() { + return this.encoder.end(); +} + + +// -- Decoding + +function Utf16Decoder(options, codec) { + this.decoder = null; + this.initialBufs = []; + this.initialBufsLen = 0; + + this.options = options || {}; + this.iconv = codec.iconv; +} + +Utf16Decoder.prototype.write = function(buf) { + if (!this.decoder) { + // Codec is not chosen yet. Accumulate initial bytes. + this.initialBufs.push(buf); + this.initialBufsLen += buf.length; + + if (this.initialBufsLen < 16) // We need more bytes to use space heuristic (see below) + return ''; + + // We have enough bytes -> detect endianness. + var encoding = detectEncoding(this.initialBufs, this.options.defaultEncoding); + this.decoder = this.iconv.getDecoder(encoding, this.options); + + var resStr = ''; + for (var i = 0; i < this.initialBufs.length; i++) + resStr += this.decoder.write(this.initialBufs[i]); + + this.initialBufs.length = this.initialBufsLen = 0; + return resStr; + } + + return this.decoder.write(buf); +} + +Utf16Decoder.prototype.end = function() { + if (!this.decoder) { + var encoding = detectEncoding(this.initialBufs, this.options.defaultEncoding); + this.decoder = this.iconv.getDecoder(encoding, this.options); + + var resStr = ''; + for (var i = 0; i < this.initialBufs.length; i++) + resStr += this.decoder.write(this.initialBufs[i]); + + var trail = this.decoder.end(); + if (trail) + resStr += trail; + + this.initialBufs.length = this.initialBufsLen = 0; + return resStr; + } + return this.decoder.end(); +} + +function detectEncoding(bufs, defaultEncoding) { + var b = []; + var charsProcessed = 0; + var asciiCharsLE = 0, asciiCharsBE = 0; // Number of ASCII chars when decoded as LE or BE. + + outer_loop: + for (var i = 0; i < bufs.length; i++) { + var buf = bufs[i]; + for (var j = 0; j < buf.length; j++) { + b.push(buf[j]); + if (b.length === 2) { + if (charsProcessed === 0) { + // Check BOM first. + if (b[0] === 0xFF && b[1] === 0xFE) return 'utf-16le'; + if (b[0] === 0xFE && b[1] === 0xFF) return 'utf-16be'; + } + + if (b[0] === 0 && b[1] !== 0) asciiCharsBE++; + if (b[0] !== 0 && b[1] === 0) asciiCharsLE++; + + b.length = 0; + charsProcessed++; + + if (charsProcessed >= 100) { + break outer_loop; + } + } + } + } + + // Make decisions. + // Most of the time, the content has ASCII chars (U+00**), but the opposite (U+**00) is uncommon. + // So, we count ASCII as if it was LE or BE, and decide from that. + if (asciiCharsBE > asciiCharsLE) return 'utf-16be'; + if (asciiCharsBE < asciiCharsLE) return 'utf-16le'; + + // Couldn't decide (likely all zeros or not enough data). + return defaultEncoding || 'utf-16le'; +} + + diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/utf32.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/utf32.js new file mode 100644 index 0000000000000000000000000000000000000000..2fa900a12eb3562e38fc9442dd3f57ea919b3c74 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/utf32.js @@ -0,0 +1,319 @@ +'use strict'; + +var Buffer = require('safer-buffer').Buffer; + +// == UTF32-LE/BE codec. ========================================================== + +exports._utf32 = Utf32Codec; + +function Utf32Codec(codecOptions, iconv) { + this.iconv = iconv; + this.bomAware = true; + this.isLE = codecOptions.isLE; +} + +exports.utf32le = { type: '_utf32', isLE: true }; +exports.utf32be = { type: '_utf32', isLE: false }; + +// Aliases +exports.ucs4le = 'utf32le'; +exports.ucs4be = 'utf32be'; + +Utf32Codec.prototype.encoder = Utf32Encoder; +Utf32Codec.prototype.decoder = Utf32Decoder; + +// -- Encoding + +function Utf32Encoder(options, codec) { + this.isLE = codec.isLE; + this.highSurrogate = 0; +} + +Utf32Encoder.prototype.write = function(str) { + var src = Buffer.from(str, 'ucs2'); + var dst = Buffer.alloc(src.length * 2); + var write32 = this.isLE ? dst.writeUInt32LE : dst.writeUInt32BE; + var offset = 0; + + for (var i = 0; i < src.length; i += 2) { + var code = src.readUInt16LE(i); + var isHighSurrogate = (0xD800 <= code && code < 0xDC00); + var isLowSurrogate = (0xDC00 <= code && code < 0xE000); + + if (this.highSurrogate) { + if (isHighSurrogate || !isLowSurrogate) { + // There shouldn't be two high surrogates in a row, nor a high surrogate which isn't followed by a low + // surrogate. If this happens, keep the pending high surrogate as a stand-alone semi-invalid character + // (technically wrong, but expected by some applications, like Windows file names). + write32.call(dst, this.highSurrogate, offset); + offset += 4; + } + else { + // Create 32-bit value from high and low surrogates; + var codepoint = (((this.highSurrogate - 0xD800) << 10) | (code - 0xDC00)) + 0x10000; + + write32.call(dst, codepoint, offset); + offset += 4; + this.highSurrogate = 0; + + continue; + } + } + + if (isHighSurrogate) + this.highSurrogate = code; + else { + // Even if the current character is a low surrogate, with no previous high surrogate, we'll + // encode it as a semi-invalid stand-alone character for the same reasons expressed above for + // unpaired high surrogates. + write32.call(dst, code, offset); + offset += 4; + this.highSurrogate = 0; + } + } + + if (offset < dst.length) + dst = dst.slice(0, offset); + + return dst; +}; + +Utf32Encoder.prototype.end = function() { + // Treat any leftover high surrogate as a semi-valid independent character. + if (!this.highSurrogate) + return; + + var buf = Buffer.alloc(4); + + if (this.isLE) + buf.writeUInt32LE(this.highSurrogate, 0); + else + buf.writeUInt32BE(this.highSurrogate, 0); + + this.highSurrogate = 0; + + return buf; +}; + +// -- Decoding + +function Utf32Decoder(options, codec) { + this.isLE = codec.isLE; + this.badChar = codec.iconv.defaultCharUnicode.charCodeAt(0); + this.overflow = []; +} + +Utf32Decoder.prototype.write = function(src) { + if (src.length === 0) + return ''; + + var i = 0; + var codepoint = 0; + var dst = Buffer.alloc(src.length + 4); + var offset = 0; + var isLE = this.isLE; + var overflow = this.overflow; + var badChar = this.badChar; + + if (overflow.length > 0) { + for (; i < src.length && overflow.length < 4; i++) + overflow.push(src[i]); + + if (overflow.length === 4) { + // NOTE: codepoint is a signed int32 and can be negative. + // NOTE: We copied this block from below to help V8 optimize it (it works with array, not buffer). + if (isLE) { + codepoint = overflow[i] | (overflow[i+1] << 8) | (overflow[i+2] << 16) | (overflow[i+3] << 24); + } else { + codepoint = overflow[i+3] | (overflow[i+2] << 8) | (overflow[i+1] << 16) | (overflow[i] << 24); + } + overflow.length = 0; + + offset = _writeCodepoint(dst, offset, codepoint, badChar); + } + } + + // Main loop. Should be as optimized as possible. + for (; i < src.length - 3; i += 4) { + // NOTE: codepoint is a signed int32 and can be negative. + if (isLE) { + codepoint = src[i] | (src[i+1] << 8) | (src[i+2] << 16) | (src[i+3] << 24); + } else { + codepoint = src[i+3] | (src[i+2] << 8) | (src[i+1] << 16) | (src[i] << 24); + } + offset = _writeCodepoint(dst, offset, codepoint, badChar); + } + + // Keep overflowing bytes. + for (; i < src.length; i++) { + overflow.push(src[i]); + } + + return dst.slice(0, offset).toString('ucs2'); +}; + +function _writeCodepoint(dst, offset, codepoint, badChar) { + // NOTE: codepoint is signed int32 and can be negative. We keep it that way to help V8 with optimizations. + if (codepoint < 0 || codepoint > 0x10FFFF) { + // Not a valid Unicode codepoint + codepoint = badChar; + } + + // Ephemeral Planes: Write high surrogate. + if (codepoint >= 0x10000) { + codepoint -= 0x10000; + + var high = 0xD800 | (codepoint >> 10); + dst[offset++] = high & 0xff; + dst[offset++] = high >> 8; + + // Low surrogate is written below. + var codepoint = 0xDC00 | (codepoint & 0x3FF); + } + + // Write BMP char or low surrogate. + dst[offset++] = codepoint & 0xff; + dst[offset++] = codepoint >> 8; + + return offset; +}; + +Utf32Decoder.prototype.end = function() { + this.overflow.length = 0; +}; + +// == UTF-32 Auto codec ============================================================= +// Decoder chooses automatically from UTF-32LE and UTF-32BE using BOM and space-based heuristic. +// Defaults to UTF-32LE. http://en.wikipedia.org/wiki/UTF-32 +// Encoder/decoder default can be changed: iconv.decode(buf, 'utf32', {defaultEncoding: 'utf-32be'}); + +// Encoder prepends BOM (which can be overridden with (addBOM: false}). + +exports.utf32 = Utf32AutoCodec; +exports.ucs4 = 'utf32'; + +function Utf32AutoCodec(options, iconv) { + this.iconv = iconv; +} + +Utf32AutoCodec.prototype.encoder = Utf32AutoEncoder; +Utf32AutoCodec.prototype.decoder = Utf32AutoDecoder; + +// -- Encoding + +function Utf32AutoEncoder(options, codec) { + options = options || {}; + + if (options.addBOM === undefined) + options.addBOM = true; + + this.encoder = codec.iconv.getEncoder(options.defaultEncoding || 'utf-32le', options); +} + +Utf32AutoEncoder.prototype.write = function(str) { + return this.encoder.write(str); +}; + +Utf32AutoEncoder.prototype.end = function() { + return this.encoder.end(); +}; + +// -- Decoding + +function Utf32AutoDecoder(options, codec) { + this.decoder = null; + this.initialBufs = []; + this.initialBufsLen = 0; + this.options = options || {}; + this.iconv = codec.iconv; +} + +Utf32AutoDecoder.prototype.write = function(buf) { + if (!this.decoder) { + // Codec is not chosen yet. Accumulate initial bytes. + this.initialBufs.push(buf); + this.initialBufsLen += buf.length; + + if (this.initialBufsLen < 32) // We need more bytes to use space heuristic (see below) + return ''; + + // We have enough bytes -> detect endianness. + var encoding = detectEncoding(this.initialBufs, this.options.defaultEncoding); + this.decoder = this.iconv.getDecoder(encoding, this.options); + + var resStr = ''; + for (var i = 0; i < this.initialBufs.length; i++) + resStr += this.decoder.write(this.initialBufs[i]); + + this.initialBufs.length = this.initialBufsLen = 0; + return resStr; + } + + return this.decoder.write(buf); +}; + +Utf32AutoDecoder.prototype.end = function() { + if (!this.decoder) { + var encoding = detectEncoding(this.initialBufs, this.options.defaultEncoding); + this.decoder = this.iconv.getDecoder(encoding, this.options); + + var resStr = ''; + for (var i = 0; i < this.initialBufs.length; i++) + resStr += this.decoder.write(this.initialBufs[i]); + + var trail = this.decoder.end(); + if (trail) + resStr += trail; + + this.initialBufs.length = this.initialBufsLen = 0; + return resStr; + } + + return this.decoder.end(); +}; + +function detectEncoding(bufs, defaultEncoding) { + var b = []; + var charsProcessed = 0; + var invalidLE = 0, invalidBE = 0; // Number of invalid chars when decoded as LE or BE. + var bmpCharsLE = 0, bmpCharsBE = 0; // Number of BMP chars when decoded as LE or BE. + + outer_loop: + for (var i = 0; i < bufs.length; i++) { + var buf = bufs[i]; + for (var j = 0; j < buf.length; j++) { + b.push(buf[j]); + if (b.length === 4) { + if (charsProcessed === 0) { + // Check BOM first. + if (b[0] === 0xFF && b[1] === 0xFE && b[2] === 0 && b[3] === 0) { + return 'utf-32le'; + } + if (b[0] === 0 && b[1] === 0 && b[2] === 0xFE && b[3] === 0xFF) { + return 'utf-32be'; + } + } + + if (b[0] !== 0 || b[1] > 0x10) invalidBE++; + if (b[3] !== 0 || b[2] > 0x10) invalidLE++; + + if (b[0] === 0 && b[1] === 0 && (b[2] !== 0 || b[3] !== 0)) bmpCharsBE++; + if ((b[0] !== 0 || b[1] !== 0) && b[2] === 0 && b[3] === 0) bmpCharsLE++; + + b.length = 0; + charsProcessed++; + + if (charsProcessed >= 100) { + break outer_loop; + } + } + } + } + + // Make decisions. + if (bmpCharsBE - invalidBE > bmpCharsLE - invalidLE) return 'utf-32be'; + if (bmpCharsBE - invalidBE < bmpCharsLE - invalidLE) return 'utf-32le'; + + // Couldn't decide (likely all zeros or not enough data). + return defaultEncoding || 'utf-32le'; +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/utf7.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/utf7.js new file mode 100644 index 0000000000000000000000000000000000000000..eacae34d5f80d0b406ad63104406ddd5f3232f4a --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/encodings/utf7.js @@ -0,0 +1,290 @@ +"use strict"; +var Buffer = require("safer-buffer").Buffer; + +// UTF-7 codec, according to https://tools.ietf.org/html/rfc2152 +// See also below a UTF-7-IMAP codec, according to http://tools.ietf.org/html/rfc3501#section-5.1.3 + +exports.utf7 = Utf7Codec; +exports.unicode11utf7 = 'utf7'; // Alias UNICODE-1-1-UTF-7 +function Utf7Codec(codecOptions, iconv) { + this.iconv = iconv; +}; + +Utf7Codec.prototype.encoder = Utf7Encoder; +Utf7Codec.prototype.decoder = Utf7Decoder; +Utf7Codec.prototype.bomAware = true; + + +// -- Encoding + +var nonDirectChars = /[^A-Za-z0-9'\(\),-\.\/:\? \n\r\t]+/g; + +function Utf7Encoder(options, codec) { + this.iconv = codec.iconv; +} + +Utf7Encoder.prototype.write = function(str) { + // Naive implementation. + // Non-direct chars are encoded as "+-"; single "+" char is encoded as "+-". + return Buffer.from(str.replace(nonDirectChars, function(chunk) { + return "+" + (chunk === '+' ? '' : + this.iconv.encode(chunk, 'utf16-be').toString('base64').replace(/=+$/, '')) + + "-"; + }.bind(this))); +} + +Utf7Encoder.prototype.end = function() { +} + + +// -- Decoding + +function Utf7Decoder(options, codec) { + this.iconv = codec.iconv; + this.inBase64 = false; + this.base64Accum = ''; +} + +var base64Regex = /[A-Za-z0-9\/+]/; +var base64Chars = []; +for (var i = 0; i < 256; i++) + base64Chars[i] = base64Regex.test(String.fromCharCode(i)); + +var plusChar = '+'.charCodeAt(0), + minusChar = '-'.charCodeAt(0), + andChar = '&'.charCodeAt(0); + +Utf7Decoder.prototype.write = function(buf) { + var res = "", lastI = 0, + inBase64 = this.inBase64, + base64Accum = this.base64Accum; + + // The decoder is more involved as we must handle chunks in stream. + + for (var i = 0; i < buf.length; i++) { + if (!inBase64) { // We're in direct mode. + // Write direct chars until '+' + if (buf[i] == plusChar) { + res += this.iconv.decode(buf.slice(lastI, i), "ascii"); // Write direct chars. + lastI = i+1; + inBase64 = true; + } + } else { // We decode base64. + if (!base64Chars[buf[i]]) { // Base64 ended. + if (i == lastI && buf[i] == minusChar) {// "+-" -> "+" + res += "+"; + } else { + var b64str = base64Accum + this.iconv.decode(buf.slice(lastI, i), "ascii"); + res += this.iconv.decode(Buffer.from(b64str, 'base64'), "utf16-be"); + } + + if (buf[i] != minusChar) // Minus is absorbed after base64. + i--; + + lastI = i+1; + inBase64 = false; + base64Accum = ''; + } + } + } + + if (!inBase64) { + res += this.iconv.decode(buf.slice(lastI), "ascii"); // Write direct chars. + } else { + var b64str = base64Accum + this.iconv.decode(buf.slice(lastI), "ascii"); + + var canBeDecoded = b64str.length - (b64str.length % 8); // Minimal chunk: 2 quads -> 2x3 bytes -> 3 chars. + base64Accum = b64str.slice(canBeDecoded); // The rest will be decoded in future. + b64str = b64str.slice(0, canBeDecoded); + + res += this.iconv.decode(Buffer.from(b64str, 'base64'), "utf16-be"); + } + + this.inBase64 = inBase64; + this.base64Accum = base64Accum; + + return res; +} + +Utf7Decoder.prototype.end = function() { + var res = ""; + if (this.inBase64 && this.base64Accum.length > 0) + res = this.iconv.decode(Buffer.from(this.base64Accum, 'base64'), "utf16-be"); + + this.inBase64 = false; + this.base64Accum = ''; + return res; +} + + +// UTF-7-IMAP codec. +// RFC3501 Sec. 5.1.3 Modified UTF-7 (http://tools.ietf.org/html/rfc3501#section-5.1.3) +// Differences: +// * Base64 part is started by "&" instead of "+" +// * Direct characters are 0x20-0x7E, except "&" (0x26) +// * In Base64, "," is used instead of "/" +// * Base64 must not be used to represent direct characters. +// * No implicit shift back from Base64 (should always end with '-') +// * String must end in non-shifted position. +// * "-&" while in base64 is not allowed. + + +exports.utf7imap = Utf7IMAPCodec; +function Utf7IMAPCodec(codecOptions, iconv) { + this.iconv = iconv; +}; + +Utf7IMAPCodec.prototype.encoder = Utf7IMAPEncoder; +Utf7IMAPCodec.prototype.decoder = Utf7IMAPDecoder; +Utf7IMAPCodec.prototype.bomAware = true; + + +// -- Encoding + +function Utf7IMAPEncoder(options, codec) { + this.iconv = codec.iconv; + this.inBase64 = false; + this.base64Accum = Buffer.alloc(6); + this.base64AccumIdx = 0; +} + +Utf7IMAPEncoder.prototype.write = function(str) { + var inBase64 = this.inBase64, + base64Accum = this.base64Accum, + base64AccumIdx = this.base64AccumIdx, + buf = Buffer.alloc(str.length*5 + 10), bufIdx = 0; + + for (var i = 0; i < str.length; i++) { + var uChar = str.charCodeAt(i); + if (0x20 <= uChar && uChar <= 0x7E) { // Direct character or '&'. + if (inBase64) { + if (base64AccumIdx > 0) { + bufIdx += buf.write(base64Accum.slice(0, base64AccumIdx).toString('base64').replace(/\//g, ',').replace(/=+$/, ''), bufIdx); + base64AccumIdx = 0; + } + + buf[bufIdx++] = minusChar; // Write '-', then go to direct mode. + inBase64 = false; + } + + if (!inBase64) { + buf[bufIdx++] = uChar; // Write direct character + + if (uChar === andChar) // Ampersand -> '&-' + buf[bufIdx++] = minusChar; + } + + } else { // Non-direct character + if (!inBase64) { + buf[bufIdx++] = andChar; // Write '&', then go to base64 mode. + inBase64 = true; + } + if (inBase64) { + base64Accum[base64AccumIdx++] = uChar >> 8; + base64Accum[base64AccumIdx++] = uChar & 0xFF; + + if (base64AccumIdx == base64Accum.length) { + bufIdx += buf.write(base64Accum.toString('base64').replace(/\//g, ','), bufIdx); + base64AccumIdx = 0; + } + } + } + } + + this.inBase64 = inBase64; + this.base64AccumIdx = base64AccumIdx; + + return buf.slice(0, bufIdx); +} + +Utf7IMAPEncoder.prototype.end = function() { + var buf = Buffer.alloc(10), bufIdx = 0; + if (this.inBase64) { + if (this.base64AccumIdx > 0) { + bufIdx += buf.write(this.base64Accum.slice(0, this.base64AccumIdx).toString('base64').replace(/\//g, ',').replace(/=+$/, ''), bufIdx); + this.base64AccumIdx = 0; + } + + buf[bufIdx++] = minusChar; // Write '-', then go to direct mode. + this.inBase64 = false; + } + + return buf.slice(0, bufIdx); +} + + +// -- Decoding + +function Utf7IMAPDecoder(options, codec) { + this.iconv = codec.iconv; + this.inBase64 = false; + this.base64Accum = ''; +} + +var base64IMAPChars = base64Chars.slice(); +base64IMAPChars[','.charCodeAt(0)] = true; + +Utf7IMAPDecoder.prototype.write = function(buf) { + var res = "", lastI = 0, + inBase64 = this.inBase64, + base64Accum = this.base64Accum; + + // The decoder is more involved as we must handle chunks in stream. + // It is forgiving, closer to standard UTF-7 (for example, '-' is optional at the end). + + for (var i = 0; i < buf.length; i++) { + if (!inBase64) { // We're in direct mode. + // Write direct chars until '&' + if (buf[i] == andChar) { + res += this.iconv.decode(buf.slice(lastI, i), "ascii"); // Write direct chars. + lastI = i+1; + inBase64 = true; + } + } else { // We decode base64. + if (!base64IMAPChars[buf[i]]) { // Base64 ended. + if (i == lastI && buf[i] == minusChar) { // "&-" -> "&" + res += "&"; + } else { + var b64str = base64Accum + this.iconv.decode(buf.slice(lastI, i), "ascii").replace(/,/g, '/'); + res += this.iconv.decode(Buffer.from(b64str, 'base64'), "utf16-be"); + } + + if (buf[i] != minusChar) // Minus may be absorbed after base64. + i--; + + lastI = i+1; + inBase64 = false; + base64Accum = ''; + } + } + } + + if (!inBase64) { + res += this.iconv.decode(buf.slice(lastI), "ascii"); // Write direct chars. + } else { + var b64str = base64Accum + this.iconv.decode(buf.slice(lastI), "ascii").replace(/,/g, '/'); + + var canBeDecoded = b64str.length - (b64str.length % 8); // Minimal chunk: 2 quads -> 2x3 bytes -> 3 chars. + base64Accum = b64str.slice(canBeDecoded); // The rest will be decoded in future. + b64str = b64str.slice(0, canBeDecoded); + + res += this.iconv.decode(Buffer.from(b64str, 'base64'), "utf16-be"); + } + + this.inBase64 = inBase64; + this.base64Accum = base64Accum; + + return res; +} + +Utf7IMAPDecoder.prototype.end = function() { + var res = ""; + if (this.inBase64 && this.base64Accum.length > 0) + res = this.iconv.decode(Buffer.from(this.base64Accum, 'base64'), "utf16-be"); + + this.inBase64 = false; + this.base64Accum = ''; + return res; +} + + diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/lib/index.js new file mode 100644 index 0000000000000000000000000000000000000000..657701c38d243b8af1cd3d4a67056e095a0ede5e --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/iconv-lite/lib/index.js @@ -0,0 +1,180 @@ +"use strict"; + +var Buffer = require("safer-buffer").Buffer; + +var bomHandling = require("./bom-handling"), + iconv = module.exports; + +// All codecs and aliases are kept here, keyed by encoding name/alias. +// They are lazy loaded in `iconv.getCodec` from `encodings/index.js`. +iconv.encodings = null; + +// Characters emitted in case of error. +iconv.defaultCharUnicode = '�'; +iconv.defaultCharSingleByte = '?'; + +// Public API. +iconv.encode = function encode(str, encoding, options) { + str = "" + (str || ""); // Ensure string. + + var encoder = iconv.getEncoder(encoding, options); + + var res = encoder.write(str); + var trail = encoder.end(); + + return (trail && trail.length > 0) ? Buffer.concat([res, trail]) : res; +} + +iconv.decode = function decode(buf, encoding, options) { + if (typeof buf === 'string') { + if (!iconv.skipDecodeWarning) { + console.error('Iconv-lite warning: decode()-ing strings is deprecated. Refer to https://github.com/ashtuchkin/iconv-lite/wiki/Use-Buffers-when-decoding'); + iconv.skipDecodeWarning = true; + } + + buf = Buffer.from("" + (buf || ""), "binary"); // Ensure buffer. + } + + var decoder = iconv.getDecoder(encoding, options); + + var res = decoder.write(buf); + var trail = decoder.end(); + + return trail ? (res + trail) : res; +} + +iconv.encodingExists = function encodingExists(enc) { + try { + iconv.getCodec(enc); + return true; + } catch (e) { + return false; + } +} + +// Legacy aliases to convert functions +iconv.toEncoding = iconv.encode; +iconv.fromEncoding = iconv.decode; + +// Search for a codec in iconv.encodings. Cache codec data in iconv._codecDataCache. +iconv._codecDataCache = {}; +iconv.getCodec = function getCodec(encoding) { + if (!iconv.encodings) + iconv.encodings = require("../encodings"); // Lazy load all encoding definitions. + + // Canonicalize encoding name: strip all non-alphanumeric chars and appended year. + var enc = iconv._canonicalizeEncoding(encoding); + + // Traverse iconv.encodings to find actual codec. + var codecOptions = {}; + while (true) { + var codec = iconv._codecDataCache[enc]; + if (codec) + return codec; + + var codecDef = iconv.encodings[enc]; + + switch (typeof codecDef) { + case "string": // Direct alias to other encoding. + enc = codecDef; + break; + + case "object": // Alias with options. Can be layered. + for (var key in codecDef) + codecOptions[key] = codecDef[key]; + + if (!codecOptions.encodingName) + codecOptions.encodingName = enc; + + enc = codecDef.type; + break; + + case "function": // Codec itself. + if (!codecOptions.encodingName) + codecOptions.encodingName = enc; + + // The codec function must load all tables and return object with .encoder and .decoder methods. + // It'll be called only once (for each different options object). + codec = new codecDef(codecOptions, iconv); + + iconv._codecDataCache[codecOptions.encodingName] = codec; // Save it to be reused later. + return codec; + + default: + throw new Error("Encoding not recognized: '" + encoding + "' (searched as: '"+enc+"')"); + } + } +} + +iconv._canonicalizeEncoding = function(encoding) { + // Canonicalize encoding name: strip all non-alphanumeric chars and appended year. + return (''+encoding).toLowerCase().replace(/:\d{4}$|[^0-9a-z]/g, ""); +} + +iconv.getEncoder = function getEncoder(encoding, options) { + var codec = iconv.getCodec(encoding), + encoder = new codec.encoder(options, codec); + + if (codec.bomAware && options && options.addBOM) + encoder = new bomHandling.PrependBOM(encoder, options); + + return encoder; +} + +iconv.getDecoder = function getDecoder(encoding, options) { + var codec = iconv.getCodec(encoding), + decoder = new codec.decoder(options, codec); + + if (codec.bomAware && !(options && options.stripBOM === false)) + decoder = new bomHandling.StripBOM(decoder, options); + + return decoder; +} + +// Streaming API +// NOTE: Streaming API naturally depends on 'stream' module from Node.js. Unfortunately in browser environments this module can add +// up to 100Kb to the output bundle. To avoid unnecessary code bloat, we don't enable Streaming API in browser by default. +// If you would like to enable it explicitly, please add the following code to your app: +// > iconv.enableStreamingAPI(require('stream')); +iconv.enableStreamingAPI = function enableStreamingAPI(stream_module) { + if (iconv.supportsStreams) + return; + + // Dependency-inject stream module to create IconvLite stream classes. + var streams = require("./streams")(stream_module); + + // Not public API yet, but expose the stream classes. + iconv.IconvLiteEncoderStream = streams.IconvLiteEncoderStream; + iconv.IconvLiteDecoderStream = streams.IconvLiteDecoderStream; + + // Streaming API. + iconv.encodeStream = function encodeStream(encoding, options) { + return new iconv.IconvLiteEncoderStream(iconv.getEncoder(encoding, options), options); + } + + iconv.decodeStream = function decodeStream(encoding, options) { + return new iconv.IconvLiteDecoderStream(iconv.getDecoder(encoding, options), options); + } + + iconv.supportsStreams = true; +} + +// Enable Streaming API automatically if 'stream' module is available and non-empty (the majority of environments). +var stream_module; +try { + stream_module = require("stream"); +} catch (e) {} + +if (stream_module && stream_module.Transform) { + iconv.enableStreamingAPI(stream_module); + +} else { + // In rare cases where 'stream' module is not available by default, throw a helpful exception. + iconv.encodeStream = iconv.decodeStream = function() { + throw new Error("iconv-lite Streaming API is not enabled. Use iconv.enableStreamingAPI(require('stream')); to enable it."); + }; +} + +if ("Ā" != "\u0100") { + console.error("iconv-lite warning: js files use non-utf8 encoding. See https://github.com/ashtuchkin/iconv-lite/wiki/Javascript-source-file-encodings for more info."); +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/ignore-walk/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/ignore-walk/lib/index.js new file mode 100644 index 0000000000000000000000000000000000000000..366d95e2d516cb185182a45cfb89f4d65369bee2 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/ignore-walk/lib/index.js @@ -0,0 +1,310 @@ +'use strict' + +const fs = require('fs') +const path = require('path') +const EE = require('events').EventEmitter +const Minimatch = require('minimatch').Minimatch + +class Walker extends EE { + constructor (opts) { + opts = opts || {} + super(opts) + // set to true if this.path is a symlink, whether follow is true or not + this.isSymbolicLink = opts.isSymbolicLink + this.path = opts.path || process.cwd() + this.basename = path.basename(this.path) + this.ignoreFiles = opts.ignoreFiles || ['.ignore'] + this.ignoreRules = {} + this.parent = opts.parent || null + this.includeEmpty = !!opts.includeEmpty + this.root = this.parent ? this.parent.root : this.path + this.follow = !!opts.follow + this.result = this.parent ? this.parent.result : new Set() + this.entries = null + this.sawError = false + this.exact = opts.exact + } + + sort (a, b) { + return a.localeCompare(b, 'en') + } + + emit (ev, data) { + let ret = false + if (!(this.sawError && ev === 'error')) { + if (ev === 'error') { + this.sawError = true + } else if (ev === 'done' && !this.parent) { + data = Array.from(data) + .map(e => /^@/.test(e) ? `./${e}` : e).sort(this.sort) + this.result = data + } + + if (ev === 'error' && this.parent) { + ret = this.parent.emit('error', data) + } else { + ret = super.emit(ev, data) + } + } + return ret + } + + start () { + fs.readdir(this.path, (er, entries) => + er ? this.emit('error', er) : this.onReaddir(entries)) + return this + } + + isIgnoreFile (e) { + return e !== '.' && + e !== '..' && + this.ignoreFiles.indexOf(e) !== -1 + } + + onReaddir (entries) { + this.entries = entries + if (entries.length === 0) { + if (this.includeEmpty) { + this.result.add(this.path.slice(this.root.length + 1)) + } + this.emit('done', this.result) + } else { + const hasIg = this.entries.some(e => + this.isIgnoreFile(e)) + + if (hasIg) { + this.addIgnoreFiles() + } else { + this.filterEntries() + } + } + } + + addIgnoreFiles () { + const newIg = this.entries + .filter(e => this.isIgnoreFile(e)) + + let igCount = newIg.length + const then = () => { + if (--igCount === 0) { + this.filterEntries() + } + } + + newIg.forEach(e => this.addIgnoreFile(e, then)) + } + + addIgnoreFile (file, then) { + const ig = path.resolve(this.path, file) + fs.readFile(ig, 'utf8', (er, data) => + er ? this.emit('error', er) : this.onReadIgnoreFile(file, data, then)) + } + + onReadIgnoreFile (file, data, then) { + const mmopt = { + matchBase: true, + dot: true, + flipNegate: true, + nocase: true, + } + const rules = data.split(/\r?\n/) + .filter(line => !/^#|^$/.test(line.trim())) + .map(rule => { + return new Minimatch(rule.trim(), mmopt) + }) + + this.ignoreRules[file] = rules + + then() + } + + filterEntries () { + // at this point we either have ignore rules, or just inheriting + // this exclusion is at the point where we know the list of + // entries in the dir, but don't know what they are. since + // some of them *might* be directories, we have to run the + // match in dir-mode as well, so that we'll pick up partials + // of files that will be included later. Anything included + // at this point will be checked again later once we know + // what it is. + const filtered = this.entries.map(entry => { + // at this point, we don't know if it's a dir or not. + const passFile = this.filterEntry(entry) + const passDir = this.filterEntry(entry, true) + return (passFile || passDir) ? [entry, passFile, passDir] : false + }).filter(e => e) + + // now we stat them all + // if it's a dir, and passes as a dir, then recurse + // if it's not a dir, but passes as a file, add to set + let entryCount = filtered.length + if (entryCount === 0) { + this.emit('done', this.result) + } else { + const then = () => { + if (--entryCount === 0) { + this.emit('done', this.result) + } + } + filtered.forEach(filt => { + const entry = filt[0] + const file = filt[1] + const dir = filt[2] + this.stat({ entry, file, dir }, then) + }) + } + } + + onstat ({ st, entry, file, dir, isSymbolicLink }, then) { + const abs = this.path + '/' + entry + if (!st.isDirectory()) { + if (file) { + this.result.add(abs.slice(this.root.length + 1)) + } + then() + } else { + // is a directory + if (dir) { + this.walker(entry, { isSymbolicLink, exact: file || this.filterEntry(entry + '/') }, then) + } else { + then() + } + } + } + + stat ({ entry, file, dir }, then) { + const abs = this.path + '/' + entry + fs.lstat(abs, (lstatErr, lstatResult) => { + if (lstatErr) { + this.emit('error', lstatErr) + } else { + const isSymbolicLink = lstatResult.isSymbolicLink() + if (this.follow && isSymbolicLink) { + fs.stat(abs, (statErr, statResult) => { + if (statErr) { + this.emit('error', statErr) + } else { + this.onstat({ st: statResult, entry, file, dir, isSymbolicLink }, then) + } + }) + } else { + this.onstat({ st: lstatResult, entry, file, dir, isSymbolicLink }, then) + } + } + }) + } + + walkerOpt (entry, opts) { + return { + path: this.path + '/' + entry, + parent: this, + ignoreFiles: this.ignoreFiles, + follow: this.follow, + includeEmpty: this.includeEmpty, + ...opts, + } + } + + walker (entry, opts, then) { + new Walker(this.walkerOpt(entry, opts)).on('done', then).start() + } + + filterEntry (entry, partial, entryBasename) { + let included = true + + // this = /a/b/c + // entry = d + // parent /a/b sees c/d + if (this.parent && this.parent.filterEntry) { + const parentEntry = this.basename + '/' + entry + const parentBasename = entryBasename || entry + included = this.parent.filterEntry(parentEntry, partial, parentBasename) + if (!included && !this.exact) { + return false + } + } + + this.ignoreFiles.forEach(f => { + if (this.ignoreRules[f]) { + this.ignoreRules[f].forEach(rule => { + // negation means inclusion + // so if it's negated, and already included, no need to check + // likewise if it's neither negated nor included + if (rule.negate !== included) { + const isRelativeRule = entryBasename && rule.globParts.some(part => + part.length <= (part.slice(-1)[0] ? 1 : 2) + ) + + // first, match against /foo/bar + // then, against foo/bar + // then, in the case of partials, match with a / + // then, if also the rule is relative, match against basename + const match = rule.match('/' + entry) || + rule.match(entry) || + !!partial && ( + rule.match('/' + entry + '/') || + rule.match(entry + '/') || + rule.negate && ( + rule.match('/' + entry, true) || + rule.match(entry, true)) || + isRelativeRule && ( + rule.match('/' + entryBasename + '/') || + rule.match(entryBasename + '/') || + rule.negate && ( + rule.match('/' + entryBasename, true) || + rule.match(entryBasename, true)))) + + if (match) { + included = rule.negate + } + } + }) + } + }) + + return included + } +} + +class WalkerSync extends Walker { + start () { + this.onReaddir(fs.readdirSync(this.path)) + return this + } + + addIgnoreFile (file, then) { + const ig = path.resolve(this.path, file) + this.onReadIgnoreFile(file, fs.readFileSync(ig, 'utf8'), then) + } + + stat ({ entry, file, dir }, then) { + const abs = this.path + '/' + entry + let st = fs.lstatSync(abs) + const isSymbolicLink = st.isSymbolicLink() + if (this.follow && isSymbolicLink) { + st = fs.statSync(abs) + } + + // console.error('STAT SYNC', {st, entry, file, dir, isSymbolicLink, then}) + this.onstat({ st, entry, file, dir, isSymbolicLink }, then) + } + + walker (entry, opts, then) { + new WalkerSync(this.walkerOpt(entry, opts)).start() + then() + } +} + +const walk = (opts, callback) => { + const p = new Promise((resolve, reject) => { + new Walker(opts).on('done', resolve).on('error', reject).start() + }) + return callback ? p.then(res => callback(null, res), callback) : p +} + +const walkSync = opts => new WalkerSync(opts).start().result + +module.exports = walk +walk.sync = walkSync +walk.Walker = Walker +walk.WalkerSync = WalkerSync diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/imurmurhash/imurmurhash.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/imurmurhash/imurmurhash.js new file mode 100644 index 0000000000000000000000000000000000000000..e63146a2b7e70b6e270fb277fb933b1657669ab8 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/imurmurhash/imurmurhash.js @@ -0,0 +1,138 @@ +/** + * @preserve + * JS Implementation of incremental MurmurHash3 (r150) (as of May 10, 2013) + * + * @author Jens Taylor + * @see http://github.com/homebrewing/brauhaus-diff + * @author Gary Court + * @see http://github.com/garycourt/murmurhash-js + * @author Austin Appleby + * @see http://sites.google.com/site/murmurhash/ + */ +(function(){ + var cache; + + // Call this function without `new` to use the cached object (good for + // single-threaded environments), or with `new` to create a new object. + // + // @param {string} key A UTF-16 or ASCII string + // @param {number} seed An optional positive integer + // @return {object} A MurmurHash3 object for incremental hashing + function MurmurHash3(key, seed) { + var m = this instanceof MurmurHash3 ? this : cache; + m.reset(seed) + if (typeof key === 'string' && key.length > 0) { + m.hash(key); + } + + if (m !== this) { + return m; + } + }; + + // Incrementally add a string to this hash + // + // @param {string} key A UTF-16 or ASCII string + // @return {object} this + MurmurHash3.prototype.hash = function(key) { + var h1, k1, i, top, len; + + len = key.length; + this.len += len; + + k1 = this.k1; + i = 0; + switch (this.rem) { + case 0: k1 ^= len > i ? (key.charCodeAt(i++) & 0xffff) : 0; + case 1: k1 ^= len > i ? (key.charCodeAt(i++) & 0xffff) << 8 : 0; + case 2: k1 ^= len > i ? (key.charCodeAt(i++) & 0xffff) << 16 : 0; + case 3: + k1 ^= len > i ? (key.charCodeAt(i) & 0xff) << 24 : 0; + k1 ^= len > i ? (key.charCodeAt(i++) & 0xff00) >> 8 : 0; + } + + this.rem = (len + this.rem) & 3; // & 3 is same as % 4 + len -= this.rem; + if (len > 0) { + h1 = this.h1; + while (1) { + k1 = (k1 * 0x2d51 + (k1 & 0xffff) * 0xcc9e0000) & 0xffffffff; + k1 = (k1 << 15) | (k1 >>> 17); + k1 = (k1 * 0x3593 + (k1 & 0xffff) * 0x1b870000) & 0xffffffff; + + h1 ^= k1; + h1 = (h1 << 13) | (h1 >>> 19); + h1 = (h1 * 5 + 0xe6546b64) & 0xffffffff; + + if (i >= len) { + break; + } + + k1 = ((key.charCodeAt(i++) & 0xffff)) ^ + ((key.charCodeAt(i++) & 0xffff) << 8) ^ + ((key.charCodeAt(i++) & 0xffff) << 16); + top = key.charCodeAt(i++); + k1 ^= ((top & 0xff) << 24) ^ + ((top & 0xff00) >> 8); + } + + k1 = 0; + switch (this.rem) { + case 3: k1 ^= (key.charCodeAt(i + 2) & 0xffff) << 16; + case 2: k1 ^= (key.charCodeAt(i + 1) & 0xffff) << 8; + case 1: k1 ^= (key.charCodeAt(i) & 0xffff); + } + + this.h1 = h1; + } + + this.k1 = k1; + return this; + }; + + // Get the result of this hash + // + // @return {number} The 32-bit hash + MurmurHash3.prototype.result = function() { + var k1, h1; + + k1 = this.k1; + h1 = this.h1; + + if (k1 > 0) { + k1 = (k1 * 0x2d51 + (k1 & 0xffff) * 0xcc9e0000) & 0xffffffff; + k1 = (k1 << 15) | (k1 >>> 17); + k1 = (k1 * 0x3593 + (k1 & 0xffff) * 0x1b870000) & 0xffffffff; + h1 ^= k1; + } + + h1 ^= this.len; + + h1 ^= h1 >>> 16; + h1 = (h1 * 0xca6b + (h1 & 0xffff) * 0x85eb0000) & 0xffffffff; + h1 ^= h1 >>> 13; + h1 = (h1 * 0xae35 + (h1 & 0xffff) * 0xc2b20000) & 0xffffffff; + h1 ^= h1 >>> 16; + + return h1 >>> 0; + }; + + // Reset the hash object for reuse + // + // @param {number} seed An optional positive integer + MurmurHash3.prototype.reset = function(seed) { + this.h1 = typeof seed === 'number' ? seed : 0; + this.rem = this.k1 = this.len = 0; + return this; + }; + + // A cached object to use. This can be safely used if you're in a single- + // threaded environment, otherwise you need to create new hashes to use. + cache = new MurmurHash3(); + + if (typeof(module) != 'undefined') { + module.exports = MurmurHash3; + } else { + this.MurmurHash3 = MurmurHash3; + } +}()); diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/imurmurhash/imurmurhash.min.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/imurmurhash/imurmurhash.min.js new file mode 100644 index 0000000000000000000000000000000000000000..dc0ee88d6b69c97b7e2389a58fc09e675e6f415f --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/imurmurhash/imurmurhash.min.js @@ -0,0 +1,12 @@ +/** + * @preserve + * JS Implementation of incremental MurmurHash3 (r150) (as of May 10, 2013) + * + * @author Jens Taylor + * @see http://github.com/homebrewing/brauhaus-diff + * @author Gary Court + * @see http://github.com/garycourt/murmurhash-js + * @author Austin Appleby + * @see http://sites.google.com/site/murmurhash/ + */ +!function(){function t(h,r){var s=this instanceof t?this:e;return s.reset(r),"string"==typeof h&&h.length>0&&s.hash(h),s!==this?s:void 0}var e;t.prototype.hash=function(t){var e,h,r,s,i;switch(i=t.length,this.len+=i,h=this.k1,r=0,this.rem){case 0:h^=i>r?65535&t.charCodeAt(r++):0;case 1:h^=i>r?(65535&t.charCodeAt(r++))<<8:0;case 2:h^=i>r?(65535&t.charCodeAt(r++))<<16:0;case 3:h^=i>r?(255&t.charCodeAt(r))<<24:0,h^=i>r?(65280&t.charCodeAt(r++))>>8:0}if(this.rem=3&i+this.rem,i-=this.rem,i>0){for(e=this.h1;;){if(h=4294967295&11601*h+3432906752*(65535&h),h=h<<15|h>>>17,h=4294967295&13715*h+461832192*(65535&h),e^=h,e=e<<13|e>>>19,e=4294967295&5*e+3864292196,r>=i)break;h=65535&t.charCodeAt(r++)^(65535&t.charCodeAt(r++))<<8^(65535&t.charCodeAt(r++))<<16,s=t.charCodeAt(r++),h^=(255&s)<<24^(65280&s)>>8}switch(h=0,this.rem){case 3:h^=(65535&t.charCodeAt(r+2))<<16;case 2:h^=(65535&t.charCodeAt(r+1))<<8;case 1:h^=65535&t.charCodeAt(r)}this.h1=e}return this.k1=h,this},t.prototype.result=function(){var t,e;return t=this.k1,e=this.h1,t>0&&(t=4294967295&11601*t+3432906752*(65535&t),t=t<<15|t>>>17,t=4294967295&13715*t+461832192*(65535&t),e^=t),e^=this.len,e^=e>>>16,e=4294967295&51819*e+2246770688*(65535&e),e^=e>>>13,e=4294967295&44597*e+3266445312*(65535&e),e^=e>>>16,e>>>0},t.prototype.reset=function(t){return this.h1="number"==typeof t?t:0,this.rem=this.k1=this.len=0,this},e=new t,"undefined"!=typeof module?module.exports=t:this.MurmurHash3=t}(); \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/imurmurhash/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/imurmurhash/package.json new file mode 100644 index 0000000000000000000000000000000000000000..8a93edb55a22452c643c85e6ad8e8a8bf0a3fad9 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/imurmurhash/package.json @@ -0,0 +1,40 @@ +{ + "name": "imurmurhash", + "version": "0.1.4", + "description": "An incremental implementation of MurmurHash3", + "homepage": "https://github.com/jensyt/imurmurhash-js", + "main": "imurmurhash.js", + "files": [ + "imurmurhash.js", + "imurmurhash.min.js", + "package.json", + "README.md" + ], + "repository": { + "type": "git", + "url": "https://github.com/jensyt/imurmurhash-js" + }, + "bugs": { + "url": "https://github.com/jensyt/imurmurhash-js/issues" + }, + "keywords": [ + "murmur", + "murmurhash", + "murmurhash3", + "hash", + "incremental" + ], + "author": { + "name": "Jens Taylor", + "email": "jensyt@gmail.com", + "url": "https://github.com/homebrewing" + }, + "license": "MIT", + "dependencies": { + }, + "devDependencies": { + }, + "engines": { + "node": ">=0.8.19" + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/ini/lib/ini.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/ini/lib/ini.js new file mode 100644 index 0000000000000000000000000000000000000000..beb390d0b0ee2c0ec8c67a9d5d6d68025730353b --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/ini/lib/ini.js @@ -0,0 +1,280 @@ +const { hasOwnProperty } = Object.prototype + +const encode = (obj, opt = {}) => { + if (typeof opt === 'string') { + opt = { section: opt } + } + opt.align = opt.align === true + opt.newline = opt.newline === true + opt.sort = opt.sort === true + opt.whitespace = opt.whitespace === true || opt.align === true + // The `typeof` check is required because accessing the `process` directly fails on browsers. + /* istanbul ignore next */ + opt.platform = opt.platform || (typeof process !== 'undefined' && process.platform) + opt.bracketedArray = opt.bracketedArray !== false + + /* istanbul ignore next */ + const eol = opt.platform === 'win32' ? '\r\n' : '\n' + const separator = opt.whitespace ? ' = ' : '=' + const children = [] + + const keys = opt.sort ? Object.keys(obj).sort() : Object.keys(obj) + + let padToChars = 0 + // If aligning on the separator, then padToChars is determined as follows: + // 1. Get the keys + // 2. Exclude keys pointing to objects unless the value is null or an array + // 3. Add `[]` to array keys + // 4. Ensure non empty set of keys + // 5. Reduce the set to the longest `safe` key + // 6. Get the `safe` length + if (opt.align) { + padToChars = safe( + ( + keys + .filter(k => obj[k] === null || Array.isArray(obj[k]) || typeof obj[k] !== 'object') + .map(k => Array.isArray(obj[k]) ? `${k}[]` : k) + ) + .concat(['']) + .reduce((a, b) => safe(a).length >= safe(b).length ? a : b) + ).length + } + + let out = '' + const arraySuffix = opt.bracketedArray ? '[]' : '' + + for (const k of keys) { + const val = obj[k] + if (val && Array.isArray(val)) { + for (const item of val) { + out += safe(`${k}${arraySuffix}`).padEnd(padToChars, ' ') + separator + safe(item) + eol + } + } else if (val && typeof val === 'object') { + children.push(k) + } else { + out += safe(k).padEnd(padToChars, ' ') + separator + safe(val) + eol + } + } + + if (opt.section && out.length) { + out = '[' + safe(opt.section) + ']' + (opt.newline ? eol + eol : eol) + out + } + + for (const k of children) { + const nk = splitSections(k, '.').join('\\.') + const section = (opt.section ? opt.section + '.' : '') + nk + const child = encode(obj[k], { + ...opt, + section, + }) + if (out.length && child.length) { + out += eol + } + + out += child + } + + return out +} + +function splitSections (str, separator) { + var lastMatchIndex = 0 + var lastSeparatorIndex = 0 + var nextIndex = 0 + var sections = [] + + do { + nextIndex = str.indexOf(separator, lastMatchIndex) + + if (nextIndex !== -1) { + lastMatchIndex = nextIndex + separator.length + + if (nextIndex > 0 && str[nextIndex - 1] === '\\') { + continue + } + + sections.push(str.slice(lastSeparatorIndex, nextIndex)) + lastSeparatorIndex = nextIndex + separator.length + } + } while (nextIndex !== -1) + + sections.push(str.slice(lastSeparatorIndex)) + + return sections +} + +const decode = (str, opt = {}) => { + opt.bracketedArray = opt.bracketedArray !== false + const out = Object.create(null) + let p = out + let section = null + // section |key = value + const re = /^\[([^\]]*)\]\s*$|^([^=]+)(=(.*))?$/i + const lines = str.split(/[\r\n]+/g) + const duplicates = {} + + for (const line of lines) { + if (!line || line.match(/^\s*[;#]/) || line.match(/^\s*$/)) { + continue + } + const match = line.match(re) + if (!match) { + continue + } + if (match[1] !== undefined) { + section = unsafe(match[1]) + if (section === '__proto__') { + // not allowed + // keep parsing the section, but don't attach it. + p = Object.create(null) + continue + } + p = out[section] = out[section] || Object.create(null) + continue + } + const keyRaw = unsafe(match[2]) + let isArray + if (opt.bracketedArray) { + isArray = keyRaw.length > 2 && keyRaw.slice(-2) === '[]' + } else { + duplicates[keyRaw] = (duplicates?.[keyRaw] || 0) + 1 + isArray = duplicates[keyRaw] > 1 + } + const key = isArray && keyRaw.endsWith('[]') + ? keyRaw.slice(0, -2) : keyRaw + + if (key === '__proto__') { + continue + } + const valueRaw = match[3] ? unsafe(match[4]) : true + const value = valueRaw === 'true' || + valueRaw === 'false' || + valueRaw === 'null' ? JSON.parse(valueRaw) + : valueRaw + + // Convert keys with '[]' suffix to an array + if (isArray) { + if (!hasOwnProperty.call(p, key)) { + p[key] = [] + } else if (!Array.isArray(p[key])) { + p[key] = [p[key]] + } + } + + // safeguard against resetting a previously defined + // array by accidentally forgetting the brackets + if (Array.isArray(p[key])) { + p[key].push(value) + } else { + p[key] = value + } + } + + // {a:{y:1},"a.b":{x:2}} --> {a:{y:1,b:{x:2}}} + // use a filter to return the keys that have to be deleted. + const remove = [] + for (const k of Object.keys(out)) { + if (!hasOwnProperty.call(out, k) || + typeof out[k] !== 'object' || + Array.isArray(out[k])) { + continue + } + + // see if the parent section is also an object. + // if so, add it to that, and mark this one for deletion + const parts = splitSections(k, '.') + p = out + const l = parts.pop() + const nl = l.replace(/\\\./g, '.') + for (const part of parts) { + if (part === '__proto__') { + continue + } + if (!hasOwnProperty.call(p, part) || typeof p[part] !== 'object') { + p[part] = Object.create(null) + } + p = p[part] + } + if (p === out && nl === l) { + continue + } + + p[nl] = out[k] + remove.push(k) + } + for (const del of remove) { + delete out[del] + } + + return out +} + +const isQuoted = val => { + return (val.startsWith('"') && val.endsWith('"')) || + (val.startsWith("'") && val.endsWith("'")) +} + +const safe = val => { + if ( + typeof val !== 'string' || + val.match(/[=\r\n]/) || + val.match(/^\[/) || + (val.length > 1 && isQuoted(val)) || + val !== val.trim() + ) { + return JSON.stringify(val) + } + return val.split(';').join('\\;').split('#').join('\\#') +} + +const unsafe = val => { + val = (val || '').trim() + if (isQuoted(val)) { + // remove the single quotes before calling JSON.parse + if (val.charAt(0) === "'") { + val = val.slice(1, -1) + } + try { + val = JSON.parse(val) + } catch { + // ignore errors + } + } else { + // walk the val to find the first not-escaped ; character + let esc = false + let unesc = '' + for (let i = 0, l = val.length; i < l; i++) { + const c = val.charAt(i) + if (esc) { + if ('\\;#'.indexOf(c) !== -1) { + unesc += c + } else { + unesc += '\\' + c + } + + esc = false + } else if (';#'.indexOf(c) !== -1) { + break + } else if (c === '\\') { + esc = true + } else { + unesc += c + } + } + if (esc) { + unesc += '\\' + } + + return unesc.trim() + } + return val +} + +module.exports = { + parse: decode, + decode, + stringify: encode, + encode, + safe, + unsafe, +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/is-cidr/dist/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/is-cidr/dist/index.js new file mode 100644 index 0000000000000000000000000000000000000000..35fba31c48c663455b3e7a6fd5612bf967828459 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/is-cidr/dist/index.js @@ -0,0 +1,11 @@ +import { v4 as v4$1, v6 as v6$1 } from "cidr-regex"; +const re4 = v4$1({ exact: true }); +const re6 = v6$1({ exact: true }); +const isCidr = (str) => re4.test(str) ? 4 : re6.test(str) ? 6 : 0; +const v4 = isCidr.v4 = (str) => re4.test(str); +const v6 = isCidr.v6 = (str) => re6.test(str); +export { + isCidr as default, + v4, + v6 +}; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/isexe/dist/cjs/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/isexe/dist/cjs/index.js new file mode 100644 index 0000000000000000000000000000000000000000..cefcb66b5c54349019fe75d984bb1787ac2e8829 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/isexe/dist/cjs/index.js @@ -0,0 +1,46 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.sync = exports.isexe = exports.posix = exports.win32 = void 0; +const posix = __importStar(require("./posix.js")); +exports.posix = posix; +const win32 = __importStar(require("./win32.js")); +exports.win32 = win32; +__exportStar(require("./options.js"), exports); +const platform = process.env._ISEXE_TEST_PLATFORM_ || process.platform; +const impl = platform === 'win32' ? win32 : posix; +/** + * Determine whether a path is executable on the current platform. + */ +exports.isexe = impl.isexe; +/** + * Synchronously determine whether a path is executable on the + * current platform. + */ +exports.sync = impl.sync; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/isexe/dist/cjs/options.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/isexe/dist/cjs/options.js new file mode 100644 index 0000000000000000000000000000000000000000..0dfad0762cc32cc745c1ceb730b92061fcfe42fb --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/isexe/dist/cjs/options.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=options.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/isexe/dist/cjs/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/isexe/dist/cjs/package.json new file mode 100644 index 0000000000000000000000000000000000000000..5bbefffbabee392d1855491b84dc0a716b6a3bf2 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/isexe/dist/cjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/isexe/dist/cjs/posix.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/isexe/dist/cjs/posix.js new file mode 100644 index 0000000000000000000000000000000000000000..3bc5e79d7007e9cf03a2c9c536f0af9d3db1147c --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/isexe/dist/cjs/posix.js @@ -0,0 +1,67 @@ +"use strict"; +/** + * This is the Posix implementation of isexe, which uses the file + * mode and uid/gid values. + * + * @module + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.sync = exports.isexe = void 0; +const fs_1 = require("fs"); +const promises_1 = require("fs/promises"); +/** + * Determine whether a path is executable according to the mode and + * current (or specified) user and group IDs. + */ +const isexe = async (path, options = {}) => { + const { ignoreErrors = false } = options; + try { + return checkStat(await (0, promises_1.stat)(path), options); + } + catch (e) { + const er = e; + if (ignoreErrors || er.code === 'EACCES') + return false; + throw er; + } +}; +exports.isexe = isexe; +/** + * Synchronously determine whether a path is executable according to + * the mode and current (or specified) user and group IDs. + */ +const sync = (path, options = {}) => { + const { ignoreErrors = false } = options; + try { + return checkStat((0, fs_1.statSync)(path), options); + } + catch (e) { + const er = e; + if (ignoreErrors || er.code === 'EACCES') + return false; + throw er; + } +}; +exports.sync = sync; +const checkStat = (stat, options) => stat.isFile() && checkMode(stat, options); +const checkMode = (stat, options) => { + const myUid = options.uid ?? process.getuid?.(); + const myGroups = options.groups ?? process.getgroups?.() ?? []; + const myGid = options.gid ?? process.getgid?.() ?? myGroups[0]; + if (myUid === undefined || myGid === undefined) { + throw new Error('cannot get uid or gid'); + } + const groups = new Set([myGid, ...myGroups]); + const mod = stat.mode; + const uid = stat.uid; + const gid = stat.gid; + const u = parseInt('100', 8); + const g = parseInt('010', 8); + const o = parseInt('001', 8); + const ug = u | g; + return !!(mod & o || + (mod & g && groups.has(gid)) || + (mod & u && uid === myUid) || + (mod & ug && myUid === 0)); +}; +//# sourceMappingURL=posix.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/isexe/dist/cjs/win32.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/isexe/dist/cjs/win32.js new file mode 100644 index 0000000000000000000000000000000000000000..fa7a4d2f7d240dd0ca98bb4608098c3d07372e3d --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/isexe/dist/cjs/win32.js @@ -0,0 +1,62 @@ +"use strict"; +/** + * This is the Windows implementation of isexe, which uses the file + * extension and PATHEXT setting. + * + * @module + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.sync = exports.isexe = void 0; +const fs_1 = require("fs"); +const promises_1 = require("fs/promises"); +/** + * Determine whether a path is executable based on the file extension + * and PATHEXT environment variable (or specified pathExt option) + */ +const isexe = async (path, options = {}) => { + const { ignoreErrors = false } = options; + try { + return checkStat(await (0, promises_1.stat)(path), path, options); + } + catch (e) { + const er = e; + if (ignoreErrors || er.code === 'EACCES') + return false; + throw er; + } +}; +exports.isexe = isexe; +/** + * Synchronously determine whether a path is executable based on the file + * extension and PATHEXT environment variable (or specified pathExt option) + */ +const sync = (path, options = {}) => { + const { ignoreErrors = false } = options; + try { + return checkStat((0, fs_1.statSync)(path), path, options); + } + catch (e) { + const er = e; + if (ignoreErrors || er.code === 'EACCES') + return false; + throw er; + } +}; +exports.sync = sync; +const checkPathExt = (path, options) => { + const { pathExt = process.env.PATHEXT || '' } = options; + const peSplit = pathExt.split(';'); + if (peSplit.indexOf('') !== -1) { + return true; + } + for (let i = 0; i < peSplit.length; i++) { + const p = peSplit[i].toLowerCase(); + const ext = path.substring(path.length - p.length).toLowerCase(); + if (p && ext === p) { + return true; + } + } + return false; +}; +const checkStat = (stat, path, options) => stat.isFile() && checkPathExt(path, options); +//# sourceMappingURL=win32.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/isexe/dist/mjs/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/isexe/dist/mjs/index.js new file mode 100644 index 0000000000000000000000000000000000000000..1e309acd7355ec0f43608e7d49d5761ea10aebf2 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/isexe/dist/mjs/index.js @@ -0,0 +1,16 @@ +import * as posix from './posix.js'; +import * as win32 from './win32.js'; +export * from './options.js'; +export { win32, posix }; +const platform = process.env._ISEXE_TEST_PLATFORM_ || process.platform; +const impl = platform === 'win32' ? win32 : posix; +/** + * Determine whether a path is executable on the current platform. + */ +export const isexe = impl.isexe; +/** + * Synchronously determine whether a path is executable on the + * current platform. + */ +export const sync = impl.sync; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/isexe/dist/mjs/options.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/isexe/dist/mjs/options.js new file mode 100644 index 0000000000000000000000000000000000000000..e9ded40bd5b2cdf3aeef9a444b534f004f92489b --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/isexe/dist/mjs/options.js @@ -0,0 +1,2 @@ +export {}; +//# sourceMappingURL=options.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/isexe/dist/mjs/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/isexe/dist/mjs/package.json new file mode 100644 index 0000000000000000000000000000000000000000..3dbc1ca591c0557e35b6004aeba250e6a70b56e3 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/isexe/dist/mjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/isexe/dist/mjs/posix.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/isexe/dist/mjs/posix.js new file mode 100644 index 0000000000000000000000000000000000000000..c453776c0452f743a4a077ed98ab0f298861badf --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/isexe/dist/mjs/posix.js @@ -0,0 +1,62 @@ +/** + * This is the Posix implementation of isexe, which uses the file + * mode and uid/gid values. + * + * @module + */ +import { statSync } from 'fs'; +import { stat } from 'fs/promises'; +/** + * Determine whether a path is executable according to the mode and + * current (or specified) user and group IDs. + */ +export const isexe = async (path, options = {}) => { + const { ignoreErrors = false } = options; + try { + return checkStat(await stat(path), options); + } + catch (e) { + const er = e; + if (ignoreErrors || er.code === 'EACCES') + return false; + throw er; + } +}; +/** + * Synchronously determine whether a path is executable according to + * the mode and current (or specified) user and group IDs. + */ +export const sync = (path, options = {}) => { + const { ignoreErrors = false } = options; + try { + return checkStat(statSync(path), options); + } + catch (e) { + const er = e; + if (ignoreErrors || er.code === 'EACCES') + return false; + throw er; + } +}; +const checkStat = (stat, options) => stat.isFile() && checkMode(stat, options); +const checkMode = (stat, options) => { + const myUid = options.uid ?? process.getuid?.(); + const myGroups = options.groups ?? process.getgroups?.() ?? []; + const myGid = options.gid ?? process.getgid?.() ?? myGroups[0]; + if (myUid === undefined || myGid === undefined) { + throw new Error('cannot get uid or gid'); + } + const groups = new Set([myGid, ...myGroups]); + const mod = stat.mode; + const uid = stat.uid; + const gid = stat.gid; + const u = parseInt('100', 8); + const g = parseInt('010', 8); + const o = parseInt('001', 8); + const ug = u | g; + return !!(mod & o || + (mod & g && groups.has(gid)) || + (mod & u && uid === myUid) || + (mod & ug && myUid === 0)); +}; +//# sourceMappingURL=posix.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/isexe/dist/mjs/win32.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/isexe/dist/mjs/win32.js new file mode 100644 index 0000000000000000000000000000000000000000..a354ee2a5115c7f71e68fc22278ac43cf96493ee --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/isexe/dist/mjs/win32.js @@ -0,0 +1,57 @@ +/** + * This is the Windows implementation of isexe, which uses the file + * extension and PATHEXT setting. + * + * @module + */ +import { statSync } from 'fs'; +import { stat } from 'fs/promises'; +/** + * Determine whether a path is executable based on the file extension + * and PATHEXT environment variable (or specified pathExt option) + */ +export const isexe = async (path, options = {}) => { + const { ignoreErrors = false } = options; + try { + return checkStat(await stat(path), path, options); + } + catch (e) { + const er = e; + if (ignoreErrors || er.code === 'EACCES') + return false; + throw er; + } +}; +/** + * Synchronously determine whether a path is executable based on the file + * extension and PATHEXT environment variable (or specified pathExt option) + */ +export const sync = (path, options = {}) => { + const { ignoreErrors = false } = options; + try { + return checkStat(statSync(path), path, options); + } + catch (e) { + const er = e; + if (ignoreErrors || er.code === 'EACCES') + return false; + throw er; + } +}; +const checkPathExt = (path, options) => { + const { pathExt = process.env.PATHEXT || '' } = options; + const peSplit = pathExt.split(';'); + if (peSplit.indexOf('') !== -1) { + return true; + } + for (let i = 0; i < peSplit.length; i++) { + const p = peSplit[i].toLowerCase(); + const ext = path.substring(path.length - p.length).toLowerCase(); + if (p && ext === p) { + return true; + } + } + return false; +}; +const checkStat = (stat, path, options) => stat.isFile() && checkPathExt(path, options); +//# sourceMappingURL=win32.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/jackspeak/dist/commonjs/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/jackspeak/dist/commonjs/index.js new file mode 100644 index 0000000000000000000000000000000000000000..543412746cc8feaaeef830911925518bbd53cadc --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/jackspeak/dist/commonjs/index.js @@ -0,0 +1,947 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.jack = exports.Jack = exports.isConfigOption = exports.isConfigOptionOfType = exports.isConfigType = void 0; +const node_util_1 = require("node:util"); +// it's a tiny API, just cast it inline, it's fine +//@ts-ignore +const cliui_1 = __importDefault(require("@isaacs/cliui")); +const node_path_1 = require("node:path"); +const isConfigType = (t) => typeof t === 'string' && + (t === 'string' || t === 'number' || t === 'boolean'); +exports.isConfigType = isConfigType; +const isValidValue = (v, type, multi) => { + if (multi) { + if (!Array.isArray(v)) + return false; + return !v.some((v) => !isValidValue(v, type, false)); + } + if (Array.isArray(v)) + return false; + return typeof v === type; +}; +const isValidOption = (v, vo) => !!vo && + (Array.isArray(v) ? v.every(x => isValidOption(x, vo)) : vo.includes(v)); +/** + * Determine whether an unknown object is a {@link ConfigOption} based only + * on its `type` and `multiple` property + */ +const isConfigOptionOfType = (o, type, multi) => !!o && + typeof o === 'object' && + (0, exports.isConfigType)(o.type) && + o.type === type && + !!o.multiple === multi; +exports.isConfigOptionOfType = isConfigOptionOfType; +/** + * Determine whether an unknown object is a {@link ConfigOption} based on + * it having all valid properties + */ +const isConfigOption = (o, type, multi) => (0, exports.isConfigOptionOfType)(o, type, multi) && + undefOrType(o.short, 'string') && + undefOrType(o.description, 'string') && + undefOrType(o.hint, 'string') && + undefOrType(o.validate, 'function') && + (o.type === 'boolean' ? + o.validOptions === undefined + : undefOrTypeArray(o.validOptions, o.type)) && + (o.default === undefined || isValidValue(o.default, type, multi)); +exports.isConfigOption = isConfigOption; +const isHeading = (r) => r.type === 'heading'; +const isDescription = (r) => r.type === 'description'; +const width = Math.min(process?.stdout?.columns ?? 80, 80); +// indentation spaces from heading level +const indent = (n) => (n - 1) * 2; +const toEnvKey = (pref, key) => [pref, key.replace(/[^a-zA-Z0-9]+/g, ' ')] + .join(' ') + .trim() + .toUpperCase() + .replace(/ /g, '_'); +const toEnvVal = (value, delim = '\n') => { + const str = typeof value === 'string' ? value + : typeof value === 'boolean' ? + value ? '1' + : '0' + : typeof value === 'number' ? String(value) + : Array.isArray(value) ? + value.map((v) => toEnvVal(v)).join(delim) + : /* c8 ignore start */ undefined; + if (typeof str !== 'string') { + throw new Error(`could not serialize value to environment: ${JSON.stringify(value)}`, { cause: { code: 'JACKSPEAK' } }); + } + /* c8 ignore stop */ + return str; +}; +const fromEnvVal = (env, type, multiple, delim = '\n') => (multiple ? + env ? env.split(delim).map(v => fromEnvVal(v, type, false)) + : [] + : type === 'string' ? env + : type === 'boolean' ? env === '1' + : +env.trim()); +const undefOrType = (v, t) => v === undefined || typeof v === t; +const undefOrTypeArray = (v, t) => v === undefined || (Array.isArray(v) && v.every(x => typeof x === t)); +// print the value type, for error message reporting +const valueType = (v) => typeof v === 'string' ? 'string' + : typeof v === 'boolean' ? 'boolean' + : typeof v === 'number' ? 'number' + : Array.isArray(v) ? + `${joinTypes([...new Set(v.map(v => valueType(v)))])}[]` + : `${v.type}${v.multiple ? '[]' : ''}`; +const joinTypes = (types) => types.length === 1 && typeof types[0] === 'string' ? + types[0] + : `(${types.join('|')})`; +const validateFieldMeta = (field, fieldMeta) => { + if (fieldMeta) { + if (field.type !== undefined && field.type !== fieldMeta.type) { + throw new TypeError(`invalid type`, { + cause: { + found: field.type, + wanted: [fieldMeta.type, undefined], + }, + }); + } + if (field.multiple !== undefined && + !!field.multiple !== fieldMeta.multiple) { + throw new TypeError(`invalid multiple`, { + cause: { + found: field.multiple, + wanted: [fieldMeta.multiple, undefined], + }, + }); + } + return fieldMeta; + } + if (!(0, exports.isConfigType)(field.type)) { + throw new TypeError(`invalid type`, { + cause: { + found: field.type, + wanted: ['string', 'number', 'boolean'], + }, + }); + } + return { + type: field.type, + multiple: !!field.multiple, + }; +}; +const validateField = (o, type, multiple) => { + const validateValidOptions = (def, validOptions) => { + if (!undefOrTypeArray(validOptions, type)) { + throw new TypeError('invalid validOptions', { + cause: { + found: validOptions, + wanted: valueType({ type, multiple: true }), + }, + }); + } + if (def !== undefined && validOptions !== undefined) { + const valid = Array.isArray(def) ? + def.every(v => validOptions.includes(v)) + : validOptions.includes(def); + if (!valid) { + throw new TypeError('invalid default value not in validOptions', { + cause: { + found: def, + wanted: validOptions, + }, + }); + } + } + }; + if (o.default !== undefined && + !isValidValue(o.default, type, multiple)) { + throw new TypeError('invalid default value', { + cause: { + found: o.default, + wanted: valueType({ type, multiple }), + }, + }); + } + if ((0, exports.isConfigOptionOfType)(o, 'number', false) || + (0, exports.isConfigOptionOfType)(o, 'number', true)) { + validateValidOptions(o.default, o.validOptions); + } + else if ((0, exports.isConfigOptionOfType)(o, 'string', false) || + (0, exports.isConfigOptionOfType)(o, 'string', true)) { + validateValidOptions(o.default, o.validOptions); + } + else if ((0, exports.isConfigOptionOfType)(o, 'boolean', false) || + (0, exports.isConfigOptionOfType)(o, 'boolean', true)) { + if (o.hint !== undefined) { + throw new TypeError('cannot provide hint for flag'); + } + if (o.validOptions !== undefined) { + throw new TypeError('cannot provide validOptions for flag'); + } + } + return o; +}; +const toParseArgsOptionsConfig = (options) => { + return Object.entries(options).reduce((acc, [longOption, o]) => { + const p = { + type: 'string', + multiple: !!o.multiple, + ...(typeof o.short === 'string' ? { short: o.short } : undefined), + }; + const setNoBool = () => { + if (!longOption.startsWith('no-') && !options[`no-${longOption}`]) { + acc[`no-${longOption}`] = { + type: 'boolean', + multiple: !!o.multiple, + }; + } + }; + const setDefault = (def, fn) => { + if (def !== undefined) { + p.default = fn(def); + } + }; + if ((0, exports.isConfigOption)(o, 'number', false)) { + setDefault(o.default, String); + } + else if ((0, exports.isConfigOption)(o, 'number', true)) { + setDefault(o.default, d => d.map(v => String(v))); + } + else if ((0, exports.isConfigOption)(o, 'string', false) || + (0, exports.isConfigOption)(o, 'string', true)) { + setDefault(o.default, v => v); + } + else if ((0, exports.isConfigOption)(o, 'boolean', false) || + (0, exports.isConfigOption)(o, 'boolean', true)) { + p.type = 'boolean'; + setDefault(o.default, v => v); + setNoBool(); + } + acc[longOption] = p; + return acc; + }, {}); +}; +/** + * Class returned by the {@link jack} function and all configuration + * definition methods. This is what gets chained together. + */ +class Jack { + #configSet; + #shorts; + #options; + #fields = []; + #env; + #envPrefix; + #allowPositionals; + #usage; + #usageMarkdown; + constructor(options = {}) { + this.#options = options; + this.#allowPositionals = options.allowPositionals !== false; + this.#env = + this.#options.env === undefined ? process.env : this.#options.env; + this.#envPrefix = options.envPrefix; + // We need to fib a little, because it's always the same object, but it + // starts out as having an empty config set. Then each method that adds + // fields returns `this as Jack` + this.#configSet = Object.create(null); + this.#shorts = Object.create(null); + } + /** + * Resulting definitions, suitable to be passed to Node's `util.parseArgs`, + * but also including `description` and `short` fields, if set. + */ + get definitions() { + return this.#configSet; + } + /** map of `{ : }` strings for each short name defined */ + get shorts() { + return this.#shorts; + } + /** + * options passed to the {@link Jack} constructor + */ + get jackOptions() { + return this.#options; + } + /** + * the data used to generate {@link Jack#usage} and + * {@link Jack#usageMarkdown} content. + */ + get usageFields() { + return this.#fields; + } + /** + * Set the default value (which will still be overridden by env or cli) + * as if from a parsed config file. The optional `source` param, if + * provided, will be included in error messages if a value is invalid or + * unknown. + */ + setConfigValues(values, source = '') { + try { + this.validate(values); + } + catch (er) { + if (source && er instanceof Error) { + /* c8 ignore next */ + const cause = typeof er.cause === 'object' ? er.cause : {}; + er.cause = { ...cause, path: source }; + Error.captureStackTrace(er, this.setConfigValues); + } + throw er; + } + for (const [field, value] of Object.entries(values)) { + const my = this.#configSet[field]; + // already validated, just for TS's benefit + /* c8 ignore start */ + if (!my) { + throw new Error('unexpected field in config set: ' + field, { + cause: { + code: 'JACKSPEAK', + found: field, + }, + }); + } + /* c8 ignore stop */ + my.default = value; + } + return this; + } + /** + * Parse a string of arguments, and return the resulting + * `{ values, positionals }` object. + * + * If an {@link JackOptions#envPrefix} is set, then it will read default + * values from the environment, and write the resulting values back + * to the environment as well. + * + * Environment values always take precedence over any other value, except + * an explicit CLI setting. + */ + parse(args = process.argv) { + this.loadEnvDefaults(); + const p = this.parseRaw(args); + this.applyDefaults(p); + this.writeEnv(p); + return p; + } + loadEnvDefaults() { + if (this.#envPrefix) { + for (const [field, my] of Object.entries(this.#configSet)) { + const ek = toEnvKey(this.#envPrefix, field); + const env = this.#env[ek]; + if (env !== undefined) { + my.default = fromEnvVal(env, my.type, !!my.multiple, my.delim); + } + } + } + } + applyDefaults(p) { + for (const [field, c] of Object.entries(this.#configSet)) { + if (c.default !== undefined && !(field in p.values)) { + //@ts-ignore + p.values[field] = c.default; + } + } + } + /** + * Only parse the command line arguments passed in. + * Does not strip off the `node script.js` bits, so it must be just the + * arguments you wish to have parsed. + * Does not read from or write to the environment, or set defaults. + */ + parseRaw(args) { + if (args === process.argv) { + args = args.slice(process._eval !== undefined ? 1 : 2); + } + const result = (0, node_util_1.parseArgs)({ + args, + options: toParseArgsOptionsConfig(this.#configSet), + // always strict, but using our own logic + strict: false, + allowPositionals: this.#allowPositionals, + tokens: true, + }); + const p = { + values: {}, + positionals: [], + }; + for (const token of result.tokens) { + if (token.kind === 'positional') { + p.positionals.push(token.value); + if (this.#options.stopAtPositional || + this.#options.stopAtPositionalTest?.(token.value)) { + p.positionals.push(...args.slice(token.index + 1)); + break; + } + } + else if (token.kind === 'option') { + let value = undefined; + if (token.name.startsWith('no-')) { + const my = this.#configSet[token.name]; + const pname = token.name.substring('no-'.length); + const pos = this.#configSet[pname]; + if (pos && + pos.type === 'boolean' && + (!my || + (my.type === 'boolean' && !!my.multiple === !!pos.multiple))) { + value = false; + token.name = pname; + } + } + const my = this.#configSet[token.name]; + if (!my) { + throw new Error(`Unknown option '${token.rawName}'. ` + + `To specify a positional argument starting with a '-', ` + + `place it at the end of the command after '--', as in ` + + `'-- ${token.rawName}'`, { + cause: { + code: 'JACKSPEAK', + found: token.rawName + (token.value ? `=${token.value}` : ''), + }, + }); + } + if (value === undefined) { + if (token.value === undefined) { + if (my.type !== 'boolean') { + throw new Error(`No value provided for ${token.rawName}, expected ${my.type}`, { + cause: { + code: 'JACKSPEAK', + name: token.rawName, + wanted: valueType(my), + }, + }); + } + value = true; + } + else { + if (my.type === 'boolean') { + throw new Error(`Flag ${token.rawName} does not take a value, received '${token.value}'`, { cause: { code: 'JACKSPEAK', found: token } }); + } + if (my.type === 'string') { + value = token.value; + } + else { + value = +token.value; + if (value !== value) { + throw new Error(`Invalid value '${token.value}' provided for ` + + `'${token.rawName}' option, expected number`, { + cause: { + code: 'JACKSPEAK', + name: token.rawName, + found: token.value, + wanted: 'number', + }, + }); + } + } + } + } + if (my.multiple) { + const pv = p.values; + const tn = pv[token.name] ?? []; + pv[token.name] = tn; + tn.push(value); + } + else { + const pv = p.values; + pv[token.name] = value; + } + } + } + for (const [field, value] of Object.entries(p.values)) { + const valid = this.#configSet[field]?.validate; + const validOptions = this.#configSet[field]?.validOptions; + const cause = validOptions && !isValidOption(value, validOptions) ? + { name: field, found: value, validOptions } + : valid && !valid(value) ? { name: field, found: value } + : undefined; + if (cause) { + throw new Error(`Invalid value provided for --${field}: ${JSON.stringify(value)}`, { cause: { ...cause, code: 'JACKSPEAK' } }); + } + } + return p; + } + /** + * do not set fields as 'no-foo' if 'foo' exists and both are bools + * just set foo. + */ + #noNoFields(f, val, s = f) { + if (!f.startsWith('no-') || typeof val !== 'boolean') + return; + const yes = f.substring('no-'.length); + // recurse so we get the core config key we care about. + this.#noNoFields(yes, val, s); + if (this.#configSet[yes]?.type === 'boolean') { + throw new Error(`do not set '${s}', instead set '${yes}' as desired.`, { cause: { code: 'JACKSPEAK', found: s, wanted: yes } }); + } + } + /** + * Validate that any arbitrary object is a valid configuration `values` + * object. Useful when loading config files or other sources. + */ + validate(o) { + if (!o || typeof o !== 'object') { + throw new Error('Invalid config: not an object', { + cause: { code: 'JACKSPEAK', found: o }, + }); + } + const opts = o; + for (const field in o) { + const value = opts[field]; + /* c8 ignore next - for TS */ + if (value === undefined) + continue; + this.#noNoFields(field, value); + const config = this.#configSet[field]; + if (!config) { + throw new Error(`Unknown config option: ${field}`, { + cause: { code: 'JACKSPEAK', found: field }, + }); + } + if (!isValidValue(value, config.type, !!config.multiple)) { + throw new Error(`Invalid value ${valueType(value)} for ${field}, expected ${valueType(config)}`, { + cause: { + code: 'JACKSPEAK', + name: field, + found: value, + wanted: valueType(config), + }, + }); + } + const cause = config.validOptions && !isValidOption(value, config.validOptions) ? + { name: field, found: value, validOptions: config.validOptions } + : config.validate && !config.validate(value) ? + { name: field, found: value } + : undefined; + if (cause) { + throw new Error(`Invalid config value for ${field}: ${value}`, { + cause: { ...cause, code: 'JACKSPEAK' }, + }); + } + } + } + writeEnv(p) { + if (!this.#env || !this.#envPrefix) + return; + for (const [field, value] of Object.entries(p.values)) { + const my = this.#configSet[field]; + this.#env[toEnvKey(this.#envPrefix, field)] = toEnvVal(value, my?.delim); + } + } + /** + * Add a heading to the usage output banner + */ + heading(text, level, { pre = false } = {}) { + if (level === undefined) { + level = this.#fields.some(r => isHeading(r)) ? 2 : 1; + } + this.#fields.push({ type: 'heading', text, level, pre }); + return this; + } + /** + * Add a long-form description to the usage output at this position. + */ + description(text, { pre } = {}) { + this.#fields.push({ type: 'description', text, pre }); + return this; + } + /** + * Add one or more number fields. + */ + num(fields) { + return this.#addFieldsWith(fields, 'number', false); + } + /** + * Add one or more multiple number fields. + */ + numList(fields) { + return this.#addFieldsWith(fields, 'number', true); + } + /** + * Add one or more string option fields. + */ + opt(fields) { + return this.#addFieldsWith(fields, 'string', false); + } + /** + * Add one or more multiple string option fields. + */ + optList(fields) { + return this.#addFieldsWith(fields, 'string', true); + } + /** + * Add one or more flag fields. + */ + flag(fields) { + return this.#addFieldsWith(fields, 'boolean', false); + } + /** + * Add one or more multiple flag fields. + */ + flagList(fields) { + return this.#addFieldsWith(fields, 'boolean', true); + } + /** + * Generic field definition method. Similar to flag/flagList/number/etc, + * but you must specify the `type` (and optionally `multiple` and `delim`) + * fields on each one, or Jack won't know how to define them. + */ + addFields(fields) { + return this.#addFields(this, fields); + } + #addFieldsWith(fields, type, multiple) { + return this.#addFields(this, fields, { + type, + multiple, + }); + } + #addFields(next, fields, opt) { + Object.assign(next.#configSet, Object.fromEntries(Object.entries(fields).map(([name, field]) => { + this.#validateName(name, field); + const { type, multiple } = validateFieldMeta(field, opt); + const value = { ...field, type, multiple }; + validateField(value, type, multiple); + next.#fields.push({ type: 'config', name, value }); + return [name, value]; + }))); + return next; + } + #validateName(name, field) { + if (!/^[a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?$/.test(name)) { + throw new TypeError(`Invalid option name: ${name}, ` + + `must be '-' delimited ASCII alphanumeric`); + } + if (this.#configSet[name]) { + throw new TypeError(`Cannot redefine option ${field}`); + } + if (this.#shorts[name]) { + throw new TypeError(`Cannot redefine option ${name}, already ` + + `in use for ${this.#shorts[name]}`); + } + if (field.short) { + if (!/^[a-zA-Z0-9]$/.test(field.short)) { + throw new TypeError(`Invalid ${name} short option: ${field.short}, ` + + 'must be 1 ASCII alphanumeric character'); + } + if (this.#shorts[field.short]) { + throw new TypeError(`Invalid ${name} short option: ${field.short}, ` + + `already in use for ${this.#shorts[field.short]}`); + } + this.#shorts[field.short] = name; + this.#shorts[name] = name; + } + } + /** + * Return the usage banner for the given configuration + */ + usage() { + if (this.#usage) + return this.#usage; + let headingLevel = 1; + //@ts-ignore + const ui = (0, cliui_1.default)({ width }); + const first = this.#fields[0]; + let start = first?.type === 'heading' ? 1 : 0; + if (first?.type === 'heading') { + ui.div({ + padding: [0, 0, 0, 0], + text: normalize(first.text), + }); + } + ui.div({ padding: [0, 0, 0, 0], text: 'Usage:' }); + if (this.#options.usage) { + ui.div({ + text: this.#options.usage, + padding: [0, 0, 0, 2], + }); + } + else { + const cmd = (0, node_path_1.basename)(String(process.argv[1])); + const shortFlags = []; + const shorts = []; + const flags = []; + const opts = []; + for (const [field, config] of Object.entries(this.#configSet)) { + if (config.short) { + if (config.type === 'boolean') + shortFlags.push(config.short); + else + shorts.push([config.short, config.hint || field]); + } + else { + if (config.type === 'boolean') + flags.push(field); + else + opts.push([field, config.hint || field]); + } + } + const sf = shortFlags.length ? ' -' + shortFlags.join('') : ''; + const so = shorts.map(([k, v]) => ` --${k}=<${v}>`).join(''); + const lf = flags.map(k => ` --${k}`).join(''); + const lo = opts.map(([k, v]) => ` --${k}=<${v}>`).join(''); + const usage = `${cmd}${sf}${so}${lf}${lo}`.trim(); + ui.div({ + text: usage, + padding: [0, 0, 0, 2], + }); + } + ui.div({ padding: [0, 0, 0, 0], text: '' }); + const maybeDesc = this.#fields[start]; + if (maybeDesc && isDescription(maybeDesc)) { + const print = normalize(maybeDesc.text, maybeDesc.pre); + start++; + ui.div({ padding: [0, 0, 0, 0], text: print }); + ui.div({ padding: [0, 0, 0, 0], text: '' }); + } + const { rows, maxWidth } = this.#usageRows(start); + // every heading/description after the first gets indented by 2 + // extra spaces. + for (const row of rows) { + if (row.left) { + // If the row is too long, don't wrap it + // Bump the right-hand side down a line to make room + const configIndent = indent(Math.max(headingLevel, 2)); + if (row.left.length > maxWidth - 3) { + ui.div({ text: row.left, padding: [0, 0, 0, configIndent] }); + ui.div({ text: row.text, padding: [0, 0, 0, maxWidth] }); + } + else { + ui.div({ + text: row.left, + padding: [0, 1, 0, configIndent], + width: maxWidth, + }, { padding: [0, 0, 0, 0], text: row.text }); + } + if (row.skipLine) { + ui.div({ padding: [0, 0, 0, 0], text: '' }); + } + } + else { + if (isHeading(row)) { + const { level } = row; + headingLevel = level; + // only h1 and h2 have bottom padding + // h3-h6 do not + const b = level <= 2 ? 1 : 0; + ui.div({ ...row, padding: [0, 0, b, indent(level)] }); + } + else { + ui.div({ ...row, padding: [0, 0, 1, indent(headingLevel + 1)] }); + } + } + } + return (this.#usage = ui.toString()); + } + /** + * Return the usage banner markdown for the given configuration + */ + usageMarkdown() { + if (this.#usageMarkdown) + return this.#usageMarkdown; + const out = []; + let headingLevel = 1; + const first = this.#fields[0]; + let start = first?.type === 'heading' ? 1 : 0; + if (first?.type === 'heading') { + out.push(`# ${normalizeOneLine(first.text)}`); + } + out.push('Usage:'); + if (this.#options.usage) { + out.push(normalizeMarkdown(this.#options.usage, true)); + } + else { + const cmd = (0, node_path_1.basename)(String(process.argv[1])); + const shortFlags = []; + const shorts = []; + const flags = []; + const opts = []; + for (const [field, config] of Object.entries(this.#configSet)) { + if (config.short) { + if (config.type === 'boolean') + shortFlags.push(config.short); + else + shorts.push([config.short, config.hint || field]); + } + else { + if (config.type === 'boolean') + flags.push(field); + else + opts.push([field, config.hint || field]); + } + } + const sf = shortFlags.length ? ' -' + shortFlags.join('') : ''; + const so = shorts.map(([k, v]) => ` --${k}=<${v}>`).join(''); + const lf = flags.map(k => ` --${k}`).join(''); + const lo = opts.map(([k, v]) => ` --${k}=<${v}>`).join(''); + const usage = `${cmd}${sf}${so}${lf}${lo}`.trim(); + out.push(normalizeMarkdown(usage, true)); + } + const maybeDesc = this.#fields[start]; + if (maybeDesc && isDescription(maybeDesc)) { + out.push(normalizeMarkdown(maybeDesc.text, maybeDesc.pre)); + start++; + } + const { rows } = this.#usageRows(start); + // heading level in markdown is number of # ahead of text + for (const row of rows) { + if (row.left) { + out.push('#'.repeat(headingLevel + 1) + + ' ' + + normalizeOneLine(row.left, true)); + if (row.text) + out.push(normalizeMarkdown(row.text)); + } + else if (isHeading(row)) { + const { level } = row; + headingLevel = level; + out.push(`${'#'.repeat(headingLevel)} ${normalizeOneLine(row.text, row.pre)}`); + } + else { + out.push(normalizeMarkdown(row.text, !!row.pre)); + } + } + return (this.#usageMarkdown = out.join('\n\n') + '\n'); + } + #usageRows(start) { + // turn each config type into a row, and figure out the width of the + // left hand indentation for the option descriptions. + let maxMax = Math.max(12, Math.min(26, Math.floor(width / 3))); + let maxWidth = 8; + let prev = undefined; + const rows = []; + for (const field of this.#fields.slice(start)) { + if (field.type !== 'config') { + if (prev?.type === 'config') + prev.skipLine = true; + prev = undefined; + field.text = normalize(field.text, !!field.pre); + rows.push(field); + continue; + } + const { value } = field; + const desc = value.description || ''; + const mult = value.multiple ? 'Can be set multiple times' : ''; + const opts = value.validOptions?.length ? + `Valid options:${value.validOptions.map(v => ` ${JSON.stringify(v)}`)}` + : ''; + const dmDelim = desc.includes('\n') ? '\n\n' : '\n'; + const extra = [opts, mult].join(dmDelim).trim(); + const text = (normalize(desc) + dmDelim + extra).trim(); + const hint = value.hint || + (value.type === 'number' ? 'n' + : value.type === 'string' ? field.name + : undefined); + const short = !value.short ? '' + : value.type === 'boolean' ? `-${value.short} ` + : `-${value.short}<${hint}> `; + const left = value.type === 'boolean' ? + `${short}--${field.name}` + : `${short}--${field.name}=<${hint}>`; + const row = { text, left, type: 'config' }; + if (text.length > width - maxMax) { + row.skipLine = true; + } + if (prev && left.length > maxMax) + prev.skipLine = true; + prev = row; + const len = left.length + 4; + if (len > maxWidth && len < maxMax) { + maxWidth = len; + } + rows.push(row); + } + return { rows, maxWidth }; + } + /** + * Return the configuration options as a plain object + */ + toJSON() { + return Object.fromEntries(Object.entries(this.#configSet).map(([field, def]) => [ + field, + { + type: def.type, + ...(def.multiple ? { multiple: true } : {}), + ...(def.delim ? { delim: def.delim } : {}), + ...(def.short ? { short: def.short } : {}), + ...(def.description ? + { description: normalize(def.description) } + : {}), + ...(def.validate ? { validate: def.validate } : {}), + ...(def.validOptions ? { validOptions: def.validOptions } : {}), + ...(def.default !== undefined ? { default: def.default } : {}), + ...(def.hint ? { hint: def.hint } : {}), + }, + ])); + } + /** + * Custom printer for `util.inspect` + */ + [node_util_1.inspect.custom](_, options) { + return `Jack ${(0, node_util_1.inspect)(this.toJSON(), options)}`; + } +} +exports.Jack = Jack; +/** + * Main entry point. Create and return a {@link Jack} object. + */ +const jack = (options = {}) => new Jack(options); +exports.jack = jack; +// Unwrap and un-indent, so we can wrap description +// strings however makes them look nice in the code. +const normalize = (s, pre = false) => { + if (pre) + // prepend a ZWSP to each line so cliui doesn't strip it. + return s + .split('\n') + .map(l => `\u200b${l}`) + .join('\n'); + return s + .split(/^\s*```\s*$/gm) + .map((s, i) => { + if (i % 2 === 1) { + if (!s.trim()) { + return `\`\`\`\n\`\`\`\n`; + } + // outdent the ``` blocks, but preserve whitespace otherwise. + const split = s.split('\n'); + // throw out the \n at the start and end + split.pop(); + split.shift(); + const si = split.reduce((shortest, l) => { + /* c8 ignore next */ + const ind = l.match(/^\s*/)?.[0] ?? ''; + if (ind.length) + return Math.min(ind.length, shortest); + else + return shortest; + }, Infinity); + /* c8 ignore next */ + const i = isFinite(si) ? si : 0; + return ('\n```\n' + + split.map(s => `\u200b${s.substring(i)}`).join('\n') + + '\n```\n'); + } + return (s + // remove single line breaks, except for lists + .replace(/([^\n])\n[ \t]*([^\n])/g, (_, $1, $2) => !/^[-*]/.test($2) ? `${$1} ${$2}` : `${$1}\n${$2}`) + // normalize mid-line whitespace + .replace(/([^\n])[ \t]+([^\n])/g, '$1 $2') + // two line breaks are enough + .replace(/\n{3,}/g, '\n\n') + // remove any spaces at the start of a line + .replace(/\n[ \t]+/g, '\n') + .trim()); + }) + .join('\n'); +}; +// normalize for markdown printing, remove leading spaces on lines +const normalizeMarkdown = (s, pre = false) => { + const n = normalize(s, pre).replace(/\\/g, '\\\\'); + return pre ? + `\`\`\`\n${n.replace(/\u200b/g, '')}\n\`\`\`` + : n.replace(/\n +/g, '\n').trim(); +}; +const normalizeOneLine = (s, pre = false) => { + const n = normalize(s, pre) + .replace(/[\s\u200b]+/g, ' ') + .trim(); + return pre ? `\`${n}\`` : n; +}; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/jackspeak/dist/commonjs/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/jackspeak/dist/commonjs/package.json new file mode 100644 index 0000000000000000000000000000000000000000..5bbefffbabee392d1855491b84dc0a716b6a3bf2 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/jackspeak/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/jackspeak/dist/esm/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/jackspeak/dist/esm/index.js new file mode 100644 index 0000000000000000000000000000000000000000..b959f5126423c0701982c782cf15ef118341ba53 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/jackspeak/dist/esm/index.js @@ -0,0 +1,936 @@ +import { inspect, parseArgs, } from 'node:util'; +// it's a tiny API, just cast it inline, it's fine +//@ts-ignore +import cliui from '@isaacs/cliui'; +import { basename } from 'node:path'; +export const isConfigType = (t) => typeof t === 'string' && + (t === 'string' || t === 'number' || t === 'boolean'); +const isValidValue = (v, type, multi) => { + if (multi) { + if (!Array.isArray(v)) + return false; + return !v.some((v) => !isValidValue(v, type, false)); + } + if (Array.isArray(v)) + return false; + return typeof v === type; +}; +const isValidOption = (v, vo) => !!vo && + (Array.isArray(v) ? v.every(x => isValidOption(x, vo)) : vo.includes(v)); +/** + * Determine whether an unknown object is a {@link ConfigOption} based only + * on its `type` and `multiple` property + */ +export const isConfigOptionOfType = (o, type, multi) => !!o && + typeof o === 'object' && + isConfigType(o.type) && + o.type === type && + !!o.multiple === multi; +/** + * Determine whether an unknown object is a {@link ConfigOption} based on + * it having all valid properties + */ +export const isConfigOption = (o, type, multi) => isConfigOptionOfType(o, type, multi) && + undefOrType(o.short, 'string') && + undefOrType(o.description, 'string') && + undefOrType(o.hint, 'string') && + undefOrType(o.validate, 'function') && + (o.type === 'boolean' ? + o.validOptions === undefined + : undefOrTypeArray(o.validOptions, o.type)) && + (o.default === undefined || isValidValue(o.default, type, multi)); +const isHeading = (r) => r.type === 'heading'; +const isDescription = (r) => r.type === 'description'; +const width = Math.min(process?.stdout?.columns ?? 80, 80); +// indentation spaces from heading level +const indent = (n) => (n - 1) * 2; +const toEnvKey = (pref, key) => [pref, key.replace(/[^a-zA-Z0-9]+/g, ' ')] + .join(' ') + .trim() + .toUpperCase() + .replace(/ /g, '_'); +const toEnvVal = (value, delim = '\n') => { + const str = typeof value === 'string' ? value + : typeof value === 'boolean' ? + value ? '1' + : '0' + : typeof value === 'number' ? String(value) + : Array.isArray(value) ? + value.map((v) => toEnvVal(v)).join(delim) + : /* c8 ignore start */ undefined; + if (typeof str !== 'string') { + throw new Error(`could not serialize value to environment: ${JSON.stringify(value)}`, { cause: { code: 'JACKSPEAK' } }); + } + /* c8 ignore stop */ + return str; +}; +const fromEnvVal = (env, type, multiple, delim = '\n') => (multiple ? + env ? env.split(delim).map(v => fromEnvVal(v, type, false)) + : [] + : type === 'string' ? env + : type === 'boolean' ? env === '1' + : +env.trim()); +const undefOrType = (v, t) => v === undefined || typeof v === t; +const undefOrTypeArray = (v, t) => v === undefined || (Array.isArray(v) && v.every(x => typeof x === t)); +// print the value type, for error message reporting +const valueType = (v) => typeof v === 'string' ? 'string' + : typeof v === 'boolean' ? 'boolean' + : typeof v === 'number' ? 'number' + : Array.isArray(v) ? + `${joinTypes([...new Set(v.map(v => valueType(v)))])}[]` + : `${v.type}${v.multiple ? '[]' : ''}`; +const joinTypes = (types) => types.length === 1 && typeof types[0] === 'string' ? + types[0] + : `(${types.join('|')})`; +const validateFieldMeta = (field, fieldMeta) => { + if (fieldMeta) { + if (field.type !== undefined && field.type !== fieldMeta.type) { + throw new TypeError(`invalid type`, { + cause: { + found: field.type, + wanted: [fieldMeta.type, undefined], + }, + }); + } + if (field.multiple !== undefined && + !!field.multiple !== fieldMeta.multiple) { + throw new TypeError(`invalid multiple`, { + cause: { + found: field.multiple, + wanted: [fieldMeta.multiple, undefined], + }, + }); + } + return fieldMeta; + } + if (!isConfigType(field.type)) { + throw new TypeError(`invalid type`, { + cause: { + found: field.type, + wanted: ['string', 'number', 'boolean'], + }, + }); + } + return { + type: field.type, + multiple: !!field.multiple, + }; +}; +const validateField = (o, type, multiple) => { + const validateValidOptions = (def, validOptions) => { + if (!undefOrTypeArray(validOptions, type)) { + throw new TypeError('invalid validOptions', { + cause: { + found: validOptions, + wanted: valueType({ type, multiple: true }), + }, + }); + } + if (def !== undefined && validOptions !== undefined) { + const valid = Array.isArray(def) ? + def.every(v => validOptions.includes(v)) + : validOptions.includes(def); + if (!valid) { + throw new TypeError('invalid default value not in validOptions', { + cause: { + found: def, + wanted: validOptions, + }, + }); + } + } + }; + if (o.default !== undefined && + !isValidValue(o.default, type, multiple)) { + throw new TypeError('invalid default value', { + cause: { + found: o.default, + wanted: valueType({ type, multiple }), + }, + }); + } + if (isConfigOptionOfType(o, 'number', false) || + isConfigOptionOfType(o, 'number', true)) { + validateValidOptions(o.default, o.validOptions); + } + else if (isConfigOptionOfType(o, 'string', false) || + isConfigOptionOfType(o, 'string', true)) { + validateValidOptions(o.default, o.validOptions); + } + else if (isConfigOptionOfType(o, 'boolean', false) || + isConfigOptionOfType(o, 'boolean', true)) { + if (o.hint !== undefined) { + throw new TypeError('cannot provide hint for flag'); + } + if (o.validOptions !== undefined) { + throw new TypeError('cannot provide validOptions for flag'); + } + } + return o; +}; +const toParseArgsOptionsConfig = (options) => { + return Object.entries(options).reduce((acc, [longOption, o]) => { + const p = { + type: 'string', + multiple: !!o.multiple, + ...(typeof o.short === 'string' ? { short: o.short } : undefined), + }; + const setNoBool = () => { + if (!longOption.startsWith('no-') && !options[`no-${longOption}`]) { + acc[`no-${longOption}`] = { + type: 'boolean', + multiple: !!o.multiple, + }; + } + }; + const setDefault = (def, fn) => { + if (def !== undefined) { + p.default = fn(def); + } + }; + if (isConfigOption(o, 'number', false)) { + setDefault(o.default, String); + } + else if (isConfigOption(o, 'number', true)) { + setDefault(o.default, d => d.map(v => String(v))); + } + else if (isConfigOption(o, 'string', false) || + isConfigOption(o, 'string', true)) { + setDefault(o.default, v => v); + } + else if (isConfigOption(o, 'boolean', false) || + isConfigOption(o, 'boolean', true)) { + p.type = 'boolean'; + setDefault(o.default, v => v); + setNoBool(); + } + acc[longOption] = p; + return acc; + }, {}); +}; +/** + * Class returned by the {@link jack} function and all configuration + * definition methods. This is what gets chained together. + */ +export class Jack { + #configSet; + #shorts; + #options; + #fields = []; + #env; + #envPrefix; + #allowPositionals; + #usage; + #usageMarkdown; + constructor(options = {}) { + this.#options = options; + this.#allowPositionals = options.allowPositionals !== false; + this.#env = + this.#options.env === undefined ? process.env : this.#options.env; + this.#envPrefix = options.envPrefix; + // We need to fib a little, because it's always the same object, but it + // starts out as having an empty config set. Then each method that adds + // fields returns `this as Jack` + this.#configSet = Object.create(null); + this.#shorts = Object.create(null); + } + /** + * Resulting definitions, suitable to be passed to Node's `util.parseArgs`, + * but also including `description` and `short` fields, if set. + */ + get definitions() { + return this.#configSet; + } + /** map of `{ : }` strings for each short name defined */ + get shorts() { + return this.#shorts; + } + /** + * options passed to the {@link Jack} constructor + */ + get jackOptions() { + return this.#options; + } + /** + * the data used to generate {@link Jack#usage} and + * {@link Jack#usageMarkdown} content. + */ + get usageFields() { + return this.#fields; + } + /** + * Set the default value (which will still be overridden by env or cli) + * as if from a parsed config file. The optional `source` param, if + * provided, will be included in error messages if a value is invalid or + * unknown. + */ + setConfigValues(values, source = '') { + try { + this.validate(values); + } + catch (er) { + if (source && er instanceof Error) { + /* c8 ignore next */ + const cause = typeof er.cause === 'object' ? er.cause : {}; + er.cause = { ...cause, path: source }; + Error.captureStackTrace(er, this.setConfigValues); + } + throw er; + } + for (const [field, value] of Object.entries(values)) { + const my = this.#configSet[field]; + // already validated, just for TS's benefit + /* c8 ignore start */ + if (!my) { + throw new Error('unexpected field in config set: ' + field, { + cause: { + code: 'JACKSPEAK', + found: field, + }, + }); + } + /* c8 ignore stop */ + my.default = value; + } + return this; + } + /** + * Parse a string of arguments, and return the resulting + * `{ values, positionals }` object. + * + * If an {@link JackOptions#envPrefix} is set, then it will read default + * values from the environment, and write the resulting values back + * to the environment as well. + * + * Environment values always take precedence over any other value, except + * an explicit CLI setting. + */ + parse(args = process.argv) { + this.loadEnvDefaults(); + const p = this.parseRaw(args); + this.applyDefaults(p); + this.writeEnv(p); + return p; + } + loadEnvDefaults() { + if (this.#envPrefix) { + for (const [field, my] of Object.entries(this.#configSet)) { + const ek = toEnvKey(this.#envPrefix, field); + const env = this.#env[ek]; + if (env !== undefined) { + my.default = fromEnvVal(env, my.type, !!my.multiple, my.delim); + } + } + } + } + applyDefaults(p) { + for (const [field, c] of Object.entries(this.#configSet)) { + if (c.default !== undefined && !(field in p.values)) { + //@ts-ignore + p.values[field] = c.default; + } + } + } + /** + * Only parse the command line arguments passed in. + * Does not strip off the `node script.js` bits, so it must be just the + * arguments you wish to have parsed. + * Does not read from or write to the environment, or set defaults. + */ + parseRaw(args) { + if (args === process.argv) { + args = args.slice(process._eval !== undefined ? 1 : 2); + } + const result = parseArgs({ + args, + options: toParseArgsOptionsConfig(this.#configSet), + // always strict, but using our own logic + strict: false, + allowPositionals: this.#allowPositionals, + tokens: true, + }); + const p = { + values: {}, + positionals: [], + }; + for (const token of result.tokens) { + if (token.kind === 'positional') { + p.positionals.push(token.value); + if (this.#options.stopAtPositional || + this.#options.stopAtPositionalTest?.(token.value)) { + p.positionals.push(...args.slice(token.index + 1)); + break; + } + } + else if (token.kind === 'option') { + let value = undefined; + if (token.name.startsWith('no-')) { + const my = this.#configSet[token.name]; + const pname = token.name.substring('no-'.length); + const pos = this.#configSet[pname]; + if (pos && + pos.type === 'boolean' && + (!my || + (my.type === 'boolean' && !!my.multiple === !!pos.multiple))) { + value = false; + token.name = pname; + } + } + const my = this.#configSet[token.name]; + if (!my) { + throw new Error(`Unknown option '${token.rawName}'. ` + + `To specify a positional argument starting with a '-', ` + + `place it at the end of the command after '--', as in ` + + `'-- ${token.rawName}'`, { + cause: { + code: 'JACKSPEAK', + found: token.rawName + (token.value ? `=${token.value}` : ''), + }, + }); + } + if (value === undefined) { + if (token.value === undefined) { + if (my.type !== 'boolean') { + throw new Error(`No value provided for ${token.rawName}, expected ${my.type}`, { + cause: { + code: 'JACKSPEAK', + name: token.rawName, + wanted: valueType(my), + }, + }); + } + value = true; + } + else { + if (my.type === 'boolean') { + throw new Error(`Flag ${token.rawName} does not take a value, received '${token.value}'`, { cause: { code: 'JACKSPEAK', found: token } }); + } + if (my.type === 'string') { + value = token.value; + } + else { + value = +token.value; + if (value !== value) { + throw new Error(`Invalid value '${token.value}' provided for ` + + `'${token.rawName}' option, expected number`, { + cause: { + code: 'JACKSPEAK', + name: token.rawName, + found: token.value, + wanted: 'number', + }, + }); + } + } + } + } + if (my.multiple) { + const pv = p.values; + const tn = pv[token.name] ?? []; + pv[token.name] = tn; + tn.push(value); + } + else { + const pv = p.values; + pv[token.name] = value; + } + } + } + for (const [field, value] of Object.entries(p.values)) { + const valid = this.#configSet[field]?.validate; + const validOptions = this.#configSet[field]?.validOptions; + const cause = validOptions && !isValidOption(value, validOptions) ? + { name: field, found: value, validOptions } + : valid && !valid(value) ? { name: field, found: value } + : undefined; + if (cause) { + throw new Error(`Invalid value provided for --${field}: ${JSON.stringify(value)}`, { cause: { ...cause, code: 'JACKSPEAK' } }); + } + } + return p; + } + /** + * do not set fields as 'no-foo' if 'foo' exists and both are bools + * just set foo. + */ + #noNoFields(f, val, s = f) { + if (!f.startsWith('no-') || typeof val !== 'boolean') + return; + const yes = f.substring('no-'.length); + // recurse so we get the core config key we care about. + this.#noNoFields(yes, val, s); + if (this.#configSet[yes]?.type === 'boolean') { + throw new Error(`do not set '${s}', instead set '${yes}' as desired.`, { cause: { code: 'JACKSPEAK', found: s, wanted: yes } }); + } + } + /** + * Validate that any arbitrary object is a valid configuration `values` + * object. Useful when loading config files or other sources. + */ + validate(o) { + if (!o || typeof o !== 'object') { + throw new Error('Invalid config: not an object', { + cause: { code: 'JACKSPEAK', found: o }, + }); + } + const opts = o; + for (const field in o) { + const value = opts[field]; + /* c8 ignore next - for TS */ + if (value === undefined) + continue; + this.#noNoFields(field, value); + const config = this.#configSet[field]; + if (!config) { + throw new Error(`Unknown config option: ${field}`, { + cause: { code: 'JACKSPEAK', found: field }, + }); + } + if (!isValidValue(value, config.type, !!config.multiple)) { + throw new Error(`Invalid value ${valueType(value)} for ${field}, expected ${valueType(config)}`, { + cause: { + code: 'JACKSPEAK', + name: field, + found: value, + wanted: valueType(config), + }, + }); + } + const cause = config.validOptions && !isValidOption(value, config.validOptions) ? + { name: field, found: value, validOptions: config.validOptions } + : config.validate && !config.validate(value) ? + { name: field, found: value } + : undefined; + if (cause) { + throw new Error(`Invalid config value for ${field}: ${value}`, { + cause: { ...cause, code: 'JACKSPEAK' }, + }); + } + } + } + writeEnv(p) { + if (!this.#env || !this.#envPrefix) + return; + for (const [field, value] of Object.entries(p.values)) { + const my = this.#configSet[field]; + this.#env[toEnvKey(this.#envPrefix, field)] = toEnvVal(value, my?.delim); + } + } + /** + * Add a heading to the usage output banner + */ + heading(text, level, { pre = false } = {}) { + if (level === undefined) { + level = this.#fields.some(r => isHeading(r)) ? 2 : 1; + } + this.#fields.push({ type: 'heading', text, level, pre }); + return this; + } + /** + * Add a long-form description to the usage output at this position. + */ + description(text, { pre } = {}) { + this.#fields.push({ type: 'description', text, pre }); + return this; + } + /** + * Add one or more number fields. + */ + num(fields) { + return this.#addFieldsWith(fields, 'number', false); + } + /** + * Add one or more multiple number fields. + */ + numList(fields) { + return this.#addFieldsWith(fields, 'number', true); + } + /** + * Add one or more string option fields. + */ + opt(fields) { + return this.#addFieldsWith(fields, 'string', false); + } + /** + * Add one or more multiple string option fields. + */ + optList(fields) { + return this.#addFieldsWith(fields, 'string', true); + } + /** + * Add one or more flag fields. + */ + flag(fields) { + return this.#addFieldsWith(fields, 'boolean', false); + } + /** + * Add one or more multiple flag fields. + */ + flagList(fields) { + return this.#addFieldsWith(fields, 'boolean', true); + } + /** + * Generic field definition method. Similar to flag/flagList/number/etc, + * but you must specify the `type` (and optionally `multiple` and `delim`) + * fields on each one, or Jack won't know how to define them. + */ + addFields(fields) { + return this.#addFields(this, fields); + } + #addFieldsWith(fields, type, multiple) { + return this.#addFields(this, fields, { + type, + multiple, + }); + } + #addFields(next, fields, opt) { + Object.assign(next.#configSet, Object.fromEntries(Object.entries(fields).map(([name, field]) => { + this.#validateName(name, field); + const { type, multiple } = validateFieldMeta(field, opt); + const value = { ...field, type, multiple }; + validateField(value, type, multiple); + next.#fields.push({ type: 'config', name, value }); + return [name, value]; + }))); + return next; + } + #validateName(name, field) { + if (!/^[a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?$/.test(name)) { + throw new TypeError(`Invalid option name: ${name}, ` + + `must be '-' delimited ASCII alphanumeric`); + } + if (this.#configSet[name]) { + throw new TypeError(`Cannot redefine option ${field}`); + } + if (this.#shorts[name]) { + throw new TypeError(`Cannot redefine option ${name}, already ` + + `in use for ${this.#shorts[name]}`); + } + if (field.short) { + if (!/^[a-zA-Z0-9]$/.test(field.short)) { + throw new TypeError(`Invalid ${name} short option: ${field.short}, ` + + 'must be 1 ASCII alphanumeric character'); + } + if (this.#shorts[field.short]) { + throw new TypeError(`Invalid ${name} short option: ${field.short}, ` + + `already in use for ${this.#shorts[field.short]}`); + } + this.#shorts[field.short] = name; + this.#shorts[name] = name; + } + } + /** + * Return the usage banner for the given configuration + */ + usage() { + if (this.#usage) + return this.#usage; + let headingLevel = 1; + //@ts-ignore + const ui = cliui({ width }); + const first = this.#fields[0]; + let start = first?.type === 'heading' ? 1 : 0; + if (first?.type === 'heading') { + ui.div({ + padding: [0, 0, 0, 0], + text: normalize(first.text), + }); + } + ui.div({ padding: [0, 0, 0, 0], text: 'Usage:' }); + if (this.#options.usage) { + ui.div({ + text: this.#options.usage, + padding: [0, 0, 0, 2], + }); + } + else { + const cmd = basename(String(process.argv[1])); + const shortFlags = []; + const shorts = []; + const flags = []; + const opts = []; + for (const [field, config] of Object.entries(this.#configSet)) { + if (config.short) { + if (config.type === 'boolean') + shortFlags.push(config.short); + else + shorts.push([config.short, config.hint || field]); + } + else { + if (config.type === 'boolean') + flags.push(field); + else + opts.push([field, config.hint || field]); + } + } + const sf = shortFlags.length ? ' -' + shortFlags.join('') : ''; + const so = shorts.map(([k, v]) => ` --${k}=<${v}>`).join(''); + const lf = flags.map(k => ` --${k}`).join(''); + const lo = opts.map(([k, v]) => ` --${k}=<${v}>`).join(''); + const usage = `${cmd}${sf}${so}${lf}${lo}`.trim(); + ui.div({ + text: usage, + padding: [0, 0, 0, 2], + }); + } + ui.div({ padding: [0, 0, 0, 0], text: '' }); + const maybeDesc = this.#fields[start]; + if (maybeDesc && isDescription(maybeDesc)) { + const print = normalize(maybeDesc.text, maybeDesc.pre); + start++; + ui.div({ padding: [0, 0, 0, 0], text: print }); + ui.div({ padding: [0, 0, 0, 0], text: '' }); + } + const { rows, maxWidth } = this.#usageRows(start); + // every heading/description after the first gets indented by 2 + // extra spaces. + for (const row of rows) { + if (row.left) { + // If the row is too long, don't wrap it + // Bump the right-hand side down a line to make room + const configIndent = indent(Math.max(headingLevel, 2)); + if (row.left.length > maxWidth - 3) { + ui.div({ text: row.left, padding: [0, 0, 0, configIndent] }); + ui.div({ text: row.text, padding: [0, 0, 0, maxWidth] }); + } + else { + ui.div({ + text: row.left, + padding: [0, 1, 0, configIndent], + width: maxWidth, + }, { padding: [0, 0, 0, 0], text: row.text }); + } + if (row.skipLine) { + ui.div({ padding: [0, 0, 0, 0], text: '' }); + } + } + else { + if (isHeading(row)) { + const { level } = row; + headingLevel = level; + // only h1 and h2 have bottom padding + // h3-h6 do not + const b = level <= 2 ? 1 : 0; + ui.div({ ...row, padding: [0, 0, b, indent(level)] }); + } + else { + ui.div({ ...row, padding: [0, 0, 1, indent(headingLevel + 1)] }); + } + } + } + return (this.#usage = ui.toString()); + } + /** + * Return the usage banner markdown for the given configuration + */ + usageMarkdown() { + if (this.#usageMarkdown) + return this.#usageMarkdown; + const out = []; + let headingLevel = 1; + const first = this.#fields[0]; + let start = first?.type === 'heading' ? 1 : 0; + if (first?.type === 'heading') { + out.push(`# ${normalizeOneLine(first.text)}`); + } + out.push('Usage:'); + if (this.#options.usage) { + out.push(normalizeMarkdown(this.#options.usage, true)); + } + else { + const cmd = basename(String(process.argv[1])); + const shortFlags = []; + const shorts = []; + const flags = []; + const opts = []; + for (const [field, config] of Object.entries(this.#configSet)) { + if (config.short) { + if (config.type === 'boolean') + shortFlags.push(config.short); + else + shorts.push([config.short, config.hint || field]); + } + else { + if (config.type === 'boolean') + flags.push(field); + else + opts.push([field, config.hint || field]); + } + } + const sf = shortFlags.length ? ' -' + shortFlags.join('') : ''; + const so = shorts.map(([k, v]) => ` --${k}=<${v}>`).join(''); + const lf = flags.map(k => ` --${k}`).join(''); + const lo = opts.map(([k, v]) => ` --${k}=<${v}>`).join(''); + const usage = `${cmd}${sf}${so}${lf}${lo}`.trim(); + out.push(normalizeMarkdown(usage, true)); + } + const maybeDesc = this.#fields[start]; + if (maybeDesc && isDescription(maybeDesc)) { + out.push(normalizeMarkdown(maybeDesc.text, maybeDesc.pre)); + start++; + } + const { rows } = this.#usageRows(start); + // heading level in markdown is number of # ahead of text + for (const row of rows) { + if (row.left) { + out.push('#'.repeat(headingLevel + 1) + + ' ' + + normalizeOneLine(row.left, true)); + if (row.text) + out.push(normalizeMarkdown(row.text)); + } + else if (isHeading(row)) { + const { level } = row; + headingLevel = level; + out.push(`${'#'.repeat(headingLevel)} ${normalizeOneLine(row.text, row.pre)}`); + } + else { + out.push(normalizeMarkdown(row.text, !!row.pre)); + } + } + return (this.#usageMarkdown = out.join('\n\n') + '\n'); + } + #usageRows(start) { + // turn each config type into a row, and figure out the width of the + // left hand indentation for the option descriptions. + let maxMax = Math.max(12, Math.min(26, Math.floor(width / 3))); + let maxWidth = 8; + let prev = undefined; + const rows = []; + for (const field of this.#fields.slice(start)) { + if (field.type !== 'config') { + if (prev?.type === 'config') + prev.skipLine = true; + prev = undefined; + field.text = normalize(field.text, !!field.pre); + rows.push(field); + continue; + } + const { value } = field; + const desc = value.description || ''; + const mult = value.multiple ? 'Can be set multiple times' : ''; + const opts = value.validOptions?.length ? + `Valid options:${value.validOptions.map(v => ` ${JSON.stringify(v)}`)}` + : ''; + const dmDelim = desc.includes('\n') ? '\n\n' : '\n'; + const extra = [opts, mult].join(dmDelim).trim(); + const text = (normalize(desc) + dmDelim + extra).trim(); + const hint = value.hint || + (value.type === 'number' ? 'n' + : value.type === 'string' ? field.name + : undefined); + const short = !value.short ? '' + : value.type === 'boolean' ? `-${value.short} ` + : `-${value.short}<${hint}> `; + const left = value.type === 'boolean' ? + `${short}--${field.name}` + : `${short}--${field.name}=<${hint}>`; + const row = { text, left, type: 'config' }; + if (text.length > width - maxMax) { + row.skipLine = true; + } + if (prev && left.length > maxMax) + prev.skipLine = true; + prev = row; + const len = left.length + 4; + if (len > maxWidth && len < maxMax) { + maxWidth = len; + } + rows.push(row); + } + return { rows, maxWidth }; + } + /** + * Return the configuration options as a plain object + */ + toJSON() { + return Object.fromEntries(Object.entries(this.#configSet).map(([field, def]) => [ + field, + { + type: def.type, + ...(def.multiple ? { multiple: true } : {}), + ...(def.delim ? { delim: def.delim } : {}), + ...(def.short ? { short: def.short } : {}), + ...(def.description ? + { description: normalize(def.description) } + : {}), + ...(def.validate ? { validate: def.validate } : {}), + ...(def.validOptions ? { validOptions: def.validOptions } : {}), + ...(def.default !== undefined ? { default: def.default } : {}), + ...(def.hint ? { hint: def.hint } : {}), + }, + ])); + } + /** + * Custom printer for `util.inspect` + */ + [inspect.custom](_, options) { + return `Jack ${inspect(this.toJSON(), options)}`; + } +} +/** + * Main entry point. Create and return a {@link Jack} object. + */ +export const jack = (options = {}) => new Jack(options); +// Unwrap and un-indent, so we can wrap description +// strings however makes them look nice in the code. +const normalize = (s, pre = false) => { + if (pre) + // prepend a ZWSP to each line so cliui doesn't strip it. + return s + .split('\n') + .map(l => `\u200b${l}`) + .join('\n'); + return s + .split(/^\s*```\s*$/gm) + .map((s, i) => { + if (i % 2 === 1) { + if (!s.trim()) { + return `\`\`\`\n\`\`\`\n`; + } + // outdent the ``` blocks, but preserve whitespace otherwise. + const split = s.split('\n'); + // throw out the \n at the start and end + split.pop(); + split.shift(); + const si = split.reduce((shortest, l) => { + /* c8 ignore next */ + const ind = l.match(/^\s*/)?.[0] ?? ''; + if (ind.length) + return Math.min(ind.length, shortest); + else + return shortest; + }, Infinity); + /* c8 ignore next */ + const i = isFinite(si) ? si : 0; + return ('\n```\n' + + split.map(s => `\u200b${s.substring(i)}`).join('\n') + + '\n```\n'); + } + return (s + // remove single line breaks, except for lists + .replace(/([^\n])\n[ \t]*([^\n])/g, (_, $1, $2) => !/^[-*]/.test($2) ? `${$1} ${$2}` : `${$1}\n${$2}`) + // normalize mid-line whitespace + .replace(/([^\n])[ \t]+([^\n])/g, '$1 $2') + // two line breaks are enough + .replace(/\n{3,}/g, '\n\n') + // remove any spaces at the start of a line + .replace(/\n[ \t]+/g, '\n') + .trim()); + }) + .join('\n'); +}; +// normalize for markdown printing, remove leading spaces on lines +const normalizeMarkdown = (s, pre = false) => { + const n = normalize(s, pre).replace(/\\/g, '\\\\'); + return pre ? + `\`\`\`\n${n.replace(/\u200b/g, '')}\n\`\`\`` + : n.replace(/\n +/g, '\n').trim(); +}; +const normalizeOneLine = (s, pre = false) => { + const n = normalize(s, pre) + .replace(/[\s\u200b]+/g, ' ') + .trim(); + return pre ? `\`${n}\`` : n; +}; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/jackspeak/dist/esm/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/jackspeak/dist/esm/package.json new file mode 100644 index 0000000000000000000000000000000000000000..3dbc1ca591c0557e35b6004aeba250e6a70b56e3 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/jackspeak/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/json-parse-even-better-errors/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/json-parse-even-better-errors/lib/index.js new file mode 100644 index 0000000000000000000000000000000000000000..3ffdaac96d2dc8056af709605b7b1ad8f7cca250 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/json-parse-even-better-errors/lib/index.js @@ -0,0 +1,137 @@ +'use strict' + +const INDENT = Symbol.for('indent') +const NEWLINE = Symbol.for('newline') + +const DEFAULT_NEWLINE = '\n' +const DEFAULT_INDENT = ' ' +const BOM = /^\uFEFF/ + +// only respect indentation if we got a line break, otherwise squash it +// things other than objects and arrays aren't indented, so ignore those +// Important: in both of these regexps, the $1 capture group is the newline +// or undefined, and the $2 capture group is the indent, or undefined. +const FORMAT = /^\s*[{[]((?:\r?\n)+)([\s\t]*)/ +const EMPTY = /^(?:\{\}|\[\])((?:\r?\n)+)?$/ + +// Node 20 puts single quotes around the token and a comma after it +const UNEXPECTED_TOKEN = /^Unexpected token '?(.)'?(,)? /i + +const hexify = (char) => { + const h = char.charCodeAt(0).toString(16).toUpperCase() + return `0x${h.length % 2 ? '0' : ''}${h}` +} + +// Remove byte order marker. This catches EF BB BF (the UTF-8 BOM) +// because the buffer-to-string conversion in `fs.readFileSync()` +// translates it to FEFF, the UTF-16 BOM. +const stripBOM = (txt) => String(txt).replace(BOM, '') + +const makeParsedError = (msg, parsing, position = 0) => ({ + message: `${msg} while parsing ${parsing}`, + position, +}) + +const parseError = (e, txt, context = 20) => { + let msg = e.message + + if (!txt) { + return makeParsedError(msg, 'empty string') + } + + const badTokenMatch = msg.match(UNEXPECTED_TOKEN) + const badIndexMatch = msg.match(/ position\s+(\d+)/i) + + if (badTokenMatch) { + msg = msg.replace( + UNEXPECTED_TOKEN, + `Unexpected token ${JSON.stringify(badTokenMatch[1])} (${hexify(badTokenMatch[1])})$2 ` + ) + } + + let errIdx + if (badIndexMatch) { + errIdx = +badIndexMatch[1] + } else /* istanbul ignore next - doesnt happen in Node 22 */ if ( + msg.match(/^Unexpected end of JSON.*/i) + ) { + errIdx = txt.length - 1 + } + + if (errIdx == null) { + return makeParsedError(msg, `'${txt.slice(0, context * 2)}'`) + } + + const start = errIdx <= context ? 0 : errIdx - context + const end = errIdx + context >= txt.length ? txt.length : errIdx + context + const slice = `${start ? '...' : ''}${txt.slice(start, end)}${end === txt.length ? '' : '...'}` + + return makeParsedError( + msg, + `${txt === slice ? '' : 'near '}${JSON.stringify(slice)}`, + errIdx + ) +} + +class JSONParseError extends SyntaxError { + constructor (er, txt, context, caller) { + const metadata = parseError(er, txt, context) + super(metadata.message) + Object.assign(this, metadata) + this.code = 'EJSONPARSE' + this.systemError = er + Error.captureStackTrace(this, caller || this.constructor) + } + + get name () { + return this.constructor.name + } + + set name (n) {} + + get [Symbol.toStringTag] () { + return this.constructor.name + } +} + +const parseJson = (txt, reviver) => { + const result = JSON.parse(txt, reviver) + if (result && typeof result === 'object') { + // get the indentation so that we can save it back nicely + // if the file starts with {" then we have an indent of '', ie, none + // otherwise, pick the indentation of the next line after the first \n If the + // pattern doesn't match, then it means no indentation. JSON.stringify ignores + // symbols, so this is reasonably safe. if the string is '{}' or '[]', then + // use the default 2-space indent. + const match = txt.match(EMPTY) || txt.match(FORMAT) || [null, '', ''] + result[NEWLINE] = match[1] ?? DEFAULT_NEWLINE + result[INDENT] = match[2] ?? DEFAULT_INDENT + } + return result +} + +const parseJsonError = (raw, reviver, context) => { + const txt = stripBOM(raw) + try { + return parseJson(txt, reviver) + } catch (e) { + if (typeof raw !== 'string' && !Buffer.isBuffer(raw)) { + const msg = Array.isArray(raw) && raw.length === 0 ? 'an empty array' : String(raw) + throw Object.assign( + new TypeError(`Cannot parse ${msg}`), + { code: 'EJSONPARSE', systemError: e } + ) + } + throw new JSONParseError(e, txt, context, parseJsonError) + } +} + +module.exports = parseJsonError +parseJsonError.JSONParseError = JSONParseError +parseJsonError.noExceptions = (raw, reviver) => { + try { + return parseJson(stripBOM(raw), reviver) + } catch { + // no exceptions + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/jsonparse/examples/twitterfeed.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/jsonparse/examples/twitterfeed.js new file mode 100644 index 0000000000000000000000000000000000000000..10210d470b6d006443a967db9960594d102bf4f7 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/jsonparse/examples/twitterfeed.js @@ -0,0 +1,30 @@ +var Parser = require('../jsonparse'); +var Http = require('http'); +require('./colors'); +var p = new Parser(); +var cred = require('./credentials'); +var client = Http.createClient(80, "stream.twitter.com"); +var request = client.request("GET", "/1/statuses/sample.json", { + "Host": "stream.twitter.com", + "Authorization": (new Buffer(cred.username + ":" + cred.password)).toString("base64") +}); +request.on('response', function (response) { + console.log(response.statusCode); + console.dir(response.headers); + response.on('data', function (chunk) { + p.write(chunk); + }); + response.on('end', function () { + console.log("END"); + }); +}); +request.end(); +var text = "", name = ""; +p.onValue = function (value) { + if (this.stack.length === 1 && this.key === 'text') { text = value; } + if (this.stack.length === 2 && this.key === 'name' && this.stack[1].key === 'user') { name = value; } + if (this.stack.length === 0) { + console.log(text.blue + " - " + name.yellow); + text = name = ""; + } +}; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/jsonparse/samplejson/basic.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/jsonparse/samplejson/basic.json new file mode 100644 index 0000000000000000000000000000000000000000..950dff9e9d93d2eeb9d3a2b36435c81d1d8d3797 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/jsonparse/samplejson/basic.json @@ -0,0 +1,167 @@ +[ + { + }, + { + "image": [ + {"shape": "rect", "fill": "#333", "stroke": "#999", "x": 0.5e+1, "y": 0.5, "z": 0.8e-0, "w": 0.5e5, "u": 2E10, "foo": 2E+1, "bar": 2E-0, "width": 47, "height": 47} + ], + "jumpable": 3, + "solid": { + "1": [2,4], + "2": [], + "3": [2,6], + "4": [], + "5": [2,8,1,3,7,9,4,6], + "6": [], + "7": [4,8], + "8": [], + "9": [6,8] + }, + "corners": {"1": true,"3": true,"7": true,"9": true} + }, + { + "image": [ + {"shape": "polygon", "fill": "#248", "stroke": "#48f", "points": [[0.5,47.5],[47.5,47.5],[47.5,0.5]]} + ], + "solid": { + "1": [2,4], + "2": [1], + "3": [2], + "4": [], + "5": [2,8,1,3,7,9,4,6], + "6": [], + "7": [4,8], + "8": [], + "9": [6,8] + }, + "corners": {"1": true,"3": true,"7": false,"9": true} + }, + { + "image": [ + {"shape": "polygon", "fill": "#248", "stroke": "#48f", "points": [[0.5,0.5],[47.5,47.5],[0.5,47.5]]} + ], + "solid": { + "1": [2], + "2": [3], + "3": [2,6], + "4": [], + "5": [2,8,1,3,7,9,4,6], + "6": [], + "7": [4,8], + "8": [], + "9": [6,8] + }, + "corners": {"1": true,"3": true,"7": true,"9": false} + }, + { + "image": [ + {"shape": "polygon", "fill": "#333", "stroke": "#999", "points": [[0.5,0.5],[47.5,47.5],[47.5,0.5]]} + ], + "jumpable": 3, + "solid": { + "1": [2,4], + "2": [], + "3": [2,6], + "4": [], + "5": [2,8,1,3,7,9,4,6], + "6": [3], + "7": [4,8], + "8": [7], + "9": [6,8] + }, + "corners": {"1": false,"3": true,"7": true,"9": true} + }, + { + "image": [ + {"shape": "polygon", "fill": "#333", "stroke": "#999", "points": [[0.5,0.5],[0.5,47.5],[47.5,0.5]]} + ], + "jumpable": 3, + "solid": { + "1": [2,4], + "2": [], + "3": [2,6], + "4": [1], + "5": [2,8,1,3,7,9,4,6], + "6": [], + "7": [4,8], + "8": [9], + "9": [6,8] + }, + "corners": {"1": true,"3": false,"7": true,"9": true} + }, + { + "image": [ + {"shape": "polygon", "fill": "#482", "stroke": "#8f4", "points": [[0.5,47.5],[0.5,23.5],[24.5,23.5],[24.5,0.5],[47.5,0.5],[47.5,47.5]]} + ], + "jumpable": 3, + "solid": { + "1": [2,4], + "2": [], + "3": [6,2], + "4": [], + "5": [2,8,1,3,7,9,4,6], + "6": [9], + "7": [4,8], + "8": [], + "9": [6,8] + }, + "corners": {"1": true,"3": true,"7": false,"9": true} + }, + { + "image": [ + {"shape": "polygon", "fill": "#482", "stroke": "#8f4", "points": [[0.5,0.5],[23.5,0.5],[23.5,24.5],[47.5,24.5],[47.5,47.5],[0.5,47.5]]} + ], + "jumpable": 3, + "solid": { + "1": [4,2], + "2": [], + "3": [2,6], + "4": [7], + "5": [2,8,1,3,7,9,4,6], + "6": [], + "7": [4,8], + "8": [], + "9": [6,8] + }, + "corners": {"1": true,"3": true,"7": true,"9": false} + }, + { + "image": [ + {"shape": "circle", "fill": "#ff0", "stroke": "#ff8", "cx": 24, "cy": 24, "r": 18} + ], + "item": true + }, + { + "image": [ + {"shape": "polygon", "fill": "#842", "stroke": "#f84", "points": [[4.5,0.5],[14.5,0.5],[14.5,17.5],[34,17.5],[33.5,0.5],[43.5,0.5],[43.5,47.5],[33.5,47.5],[33.5,30.5],[14.5,30.5],[14.5,47.5],[4.5,47.5]]} + ], + "jumpable": 3 + }, + { + "image": [ + {"shape": "polygon", "fill": "#333", "stroke": "#999", "points": [[0.5,0.5],[47.5,0.5],[24,47.5]]} + ], + "jumpable": 3, + "solid": { + "1": [2,4], + "2": [], + "3": [2,6], + "4": [1], + "5": [2,8,1,3,7,9,4,6], + "6": [3], + "7": [4,8], + "8": [], + "9": [6,8] + }, + "corners": {"1": false,"3": false,"7": true,"9": true} + }, + { + "image": [ + {"shape": "rect", "fill": "#114acb", "x": 0.5, "y": 0.5, "width": 47, "height": 47}, + {"shape": "polygon", "fill": "rgba(255,255,255,0.30)", "points": [[0.5,0.5],[47.5,0.5],[40,8],[8,8],[8,40],[0.5,47.5]]}, + {"shape": "polygon", "fill": "rgba(0,0,0,0.30)", "points": [[47.5,0.5],[48,48],[0.5,47.5],[8,40],[40,40],[40,8]]}, + {"shape": "polygon", "fill": "rgb(255,255,0)", "stroke": "rgba(255,255,0,0.5)", "points": [[24,9],[35,20],[26,29],[26,33],[22,33],[22,27],[29,20],[24,15],[16,23],[13,20]]}, + {"shape": "rect", "fill": "rgb(255,255,0)", "stroke": "rgba(255,255,0,0.5)", "x": 22, "y":35, "width": 4, "height": 4} + ] + } +] diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/jsonparse/samplejson/basic2.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/jsonparse/samplejson/basic2.json new file mode 100644 index 0000000000000000000000000000000000000000..3a6919b2ea274a44c4f02a88a86aded980d12d27 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/jsonparse/samplejson/basic2.json @@ -0,0 +1,180 @@ +[ + { + }, + { + "image": [ + {"shape": "rect", "fill": "#333", "stroke": "#999", "x": 0.5, "y": 0.5, "width": 47, "height": 47} + ], + "jumpable": 3, + "solid": { + "1": [2,4], + "2": [], + "3": [2,6], + "4": [], + "5": [2,8,1,3,7,9,4,6], + "6": [], + "7": [4,8], + "8": [], + "9": [6,8] + }, + "corners": {"1": true,"3": true,"7": true,"9": true} + }, + { + "image": [ + {"shape": "polygon", "fill": "#248", "stroke": "#48f", "points": [[0.5,47.5],[47.5,47.5],[47.5,0.5]]} + ], + "solid": { + "1": [2,4], + "2": [1], + "3": [2], + "4": [], + "5": [2,8,1,3,7,9,4,6], + "6": [], + "7": [4,8], + "8": [], + "9": [6,8] + }, + "corners": {"1": true,"3": true,"7": false,"9": true} + }, + { + "image": [ + {"shape": "polygon", "fill": "#248", "stroke": "#48f", "points": [[0.5,0.5],[47.5,47.5],[0.5,47.5]]} + ], + "solid": { + "1": [2], + "2": [3], + "3": [2,6], + "4": [], + "5": [2,8,1,3,7,9,4,6], + "6": [], + "7": [4,8], + "8": [], + "9": [6,8] + }, + "corners": {"1": true,"3": true,"7": true,"9": false} + }, + { + "image": [ + {"shape": "polygon", "fill": "#333", "stroke": "#999", "points": [[0.5,0.5],[47.5,47.5],[47.5,0.5]]} + ], + "jumpable": 3, + "solid": { + "1": [2,4], + "2": [], + "3": [2,6], + "4": [], + "5": [2,8,1,3,7,9,4,6], + "6": [3], + "7": [4,8], + "8": [7], + "9": [6,8] + }, + "corners": {"1": false,"3": true,"7": true,"9": true} + }, + { + "image": [ + {"shape": "polygon", "fill": "#333", "stroke": "#999", "points": [[0.5,0.5],[0.5,47.5],[47.5,0.5]]} + ], + "jumpable": 3, + "solid": { + "1": [2,4], + "2": [], + "3": [2,6], + "4": [1], + "5": [2,8,1,3,7,9,4,6], + "6": [], + "7": [4,8], + "8": [9], + "9": [6,8] + }, + "corners": {"1": true,"3": false,"7": true,"9": true} + }, + { + "image": [ + {"shape": "polygon", "fill": "#482", "stroke": "#8f4", "points": [[0.5,47.5],[0.5,23.5],[24.5,23.5],[24.5,0.5],[47.5,0.5],[47.5,47.5]]} + ], + "jumpable": 3, + "solid": { + "1": [2,4], + "2": [], + "3": [6,2], + "4": [], + "5": [2,8,1,3,7,9,4,6], + "6": [9], + "7": [4,8], + "8": [], + "9": [6,8] + }, + "corners": {"1": true,"3": true,"7": false,"9": true} + }, + { + "image": [ + {"shape": "polygon", "fill": "#482", "stroke": "#8f4", "points": [[0.5,0.5],[23.5,0.5],[23.5,24.5],[47.5,24.5],[47.5,47.5],[0.5,47.5]]} + ], + "jumpable": 3, + "solid": { + "1": [4,2], + "2": [], + "3": [2,6], + "4": [7], + "5": [2,8,1,3,7,9,4,6], + "6": [], + "7": [4,8], + "8": [], + "9": [6,8] + }, + "corners": {"1": true,"3": true,"7": true,"9": false} + }, + { + "image": [ + {"shape": "circle", "fill": "#ff0", "stroke": "#ff8", "cx": 24, "cy": 24, "r": 18} + ], + "item": true + }, + { + "image": [ + {"shape": "polygon", "fill": "#842", "stroke": "#f84", "points": [[4.5,0.5],[14.5,0.5],[14.5,17.5],[34,17.5],[33.5,0.5],[43.5,0.5],[43.5,47.5],[33.5,47.5],[33.5,30.5],[14.5,30.5],[14.5,47.5],[4.5,47.5]]} + ], + "jumpable": 3 + }, + { + "image": [ + {"shape": "polygon", "fill": "#333", "stroke": "#999", "points": [[0.5,0.5],[47.5,0.5],[24,47.5]]} + ], + "jumpable": 3, + "solid": { + "1": [2,4], + "2": [], + "3": [2,6], + "4": [1], + "5": [2,8,1,3,7,9,4,6], + "6": [3], + "7": [4,8], + "8": [], + "9": [6,8] + }, + "corners": {"1": false,"3": false,"7": true,"9": true} + }, + { + "image": [ + {"shape": "rect", "fill": "#114acb", "x": 0.5, "y": 0.5, "width": 47, "height": 47}, + {"shape": "polygon", "fill": "rgba(255,255,255,0.30)", "points": [[0.5,0.5],[47.5,0.5],[40,8],[8,8],[8,40],[0.5,47.5]]}, + {"shape": "polygon", "fill": "rgba(0,0,0,0.30)", "points": [[47.5,0.5],[48,48],[0.5,47.5],[8,40],[40,40],[40,8]]}, + {"shape": "polygon", "fill": "rgb(255,255,0)", "stroke": "rgba(255,255,0,0.5)", "points": [[24,9],[35,20],[26,29],[26,33],[22,33],[22,27],[29,20],[24,15],[16,23],[13,20]]}, + {"shape": "rect", "fill": "rgb(255,255,0)", "stroke": "rgba(255,255,0,0.5)", "x": 22, "y":35, "width": 4, "height": 4} + ], + "item": true + }, + { + "image": [ + {"shape": "circle", "fill": "#80f", "stroke": "#88f", "cx": 24, "cy": 24, "r": 18} + ], + "item": true + }, + { + "image": [ + {"shape": "circle", "fill": "#4f4", "stroke": "#8f8", "cx": 24, "cy": 24, "r": 18} + ], + "item": true + } +] diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/just-diff-apply/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/just-diff-apply/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..5d2c6e577af72cb6817e972f7a49c47fd4f1d9dd --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/just-diff-apply/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 angus croll + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/just-diff-apply/index.cjs b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/just-diff-apply/index.cjs new file mode 100644 index 0000000000000000000000000000000000000000..c5d2c3265a8520038436e32a8eab7c39f65c3236 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/just-diff-apply/index.cjs @@ -0,0 +1,161 @@ +module.exports = { + diffApply: diffApply, + jsonPatchPathConverter: jsonPatchPathConverter, +}; + +/* + const obj1 = {a: 3, b: 5}; + diffApply(obj1, + [ + { "op": "remove", "path": ['b'] }, + { "op": "replace", "path": ['a'], "value": 4 }, + { "op": "add", "path": ['c'], "value": 5 } + ] + ); + obj1; // {a: 4, c: 5} + + // using converter to apply jsPatch standard paths + // see http://jsonpatch.com + import {diff, jsonPatchPathConverter} from 'just-diff' + const obj2 = {a: 3, b: 5}; + diffApply(obj2, [ + { "op": "remove", "path": '/b' }, + { "op": "replace", "path": '/a', "value": 4 } + { "op": "add", "path": '/c', "value": 5 } + ], jsonPatchPathConverter); + obj2; // {a: 4, c: 5} + + // arrays + const obj3 = {a: 4, b: [1, 2, 3]}; + diffApply(obj3, [ + { "op": "replace", "path": ['a'], "value": 3 } + { "op": "replace", "path": ['b', 2], "value": 4 } + { "op": "add", "path": ['b', 3], "value": 9 } + ]); + obj3; // {a: 3, b: [1, 2, 4, 9]} + + // nested paths + const obj4 = {a: 4, b: {c: 3}}; + diffApply(obj4, [ + { "op": "replace", "path": ['a'], "value": 5 } + { "op": "remove", "path": ['b', 'c']} + { "op": "add", "path": ['b', 'd'], "value": 4 } + ]); + obj4; // {a: 5, b: {d: 4}} +*/ + +var REMOVE = 'remove'; +var REPLACE = 'replace'; +var ADD = 'add'; +var MOVE = 'move'; + +function diffApply(obj, diff, pathConverter) { + if (!obj || typeof obj != 'object') { + throw new Error('base object must be an object or an array'); + } + + if (!Array.isArray(diff)) { + throw new Error('diff must be an array'); + } + + var diffLength = diff.length; + for (var i = 0; i < diffLength; i++) { + var thisDiff = diff[i]; + var subObject = obj; + var thisOp = thisDiff.op; + + var thisPath = transformPath(pathConverter, thisDiff.path); + var thisFromPath = thisDiff.from && transformPath(pathConverter, thisDiff.from); + var toPath, toPathCopy, lastToProp, subToObject, valueToMove; + + if (thisFromPath) { + // MOVE only, "fromPath" is effectively path and "path" is toPath + toPath = thisPath; + thisPath = thisFromPath; + + toPathCopy = toPath.slice(); + lastToProp = toPathCopy.pop(); + prototypeCheck(lastToProp); + if (lastToProp == null) { + return false; + } + + var thisToProp; + while (((thisToProp = toPathCopy.shift())) != null) { + prototypeCheck(thisToProp); + if (!(thisToProp in subToObject)) { + subToObject[thisToProp] = {}; + } + subToObject = subToObject[thisToProp]; + } + } + + var pathCopy = thisPath.slice(); + var lastProp = pathCopy.pop(); + prototypeCheck(lastProp); + if (lastProp == null) { + return false; + } + + var thisProp; + while (((thisProp = pathCopy.shift())) != null) { + prototypeCheck(thisProp); + if (!(thisProp in subObject)) { + subObject[thisProp] = {}; + } + subObject = subObject[thisProp]; + } + if (thisOp === REMOVE || thisOp === REPLACE || thisOp === MOVE) { + var path = thisOp === MOVE ? thisDiff.from : thisDiff.path; + if (!subObject.hasOwnProperty(lastProp)) { + throw new Error(['expected to find property', path, 'in object', obj].join(' ')); + } + } + if (thisOp === REMOVE || thisOp === MOVE) { + if (thisOp === MOVE) { + valueToMove = subObject[lastProp]; + } + Array.isArray(subObject) ? subObject.splice(lastProp, 1) : delete subObject[lastProp]; + } + if (thisOp === REPLACE || thisOp === ADD) { + subObject[lastProp] = thisDiff.value; + } + + if (thisOp === MOVE) { + subObject[lastToProp] = valueToMove; + } + } + return subObject; +} + +function transformPath(pathConverter, thisPath) { + if(pathConverter) { + thisPath = pathConverter(thisPath); + if(!Array.isArray(thisPath)) { + throw new Error([ + 'pathConverter must return an array, returned:', + thisPath, + ].join(' ')); + } + } else { + if(!Array.isArray(thisPath)) { + throw new Error([ + 'diff path', + thisPath, + 'must be an array, consider supplying a path converter'] + .join(' ')); + } + } + return thisPath; +} + +function jsonPatchPathConverter(stringPath) { + return stringPath.split('/').slice(1); +} + +function prototypeCheck(prop) { + // coercion is intentional to catch prop values like `['__proto__']` + if (prop == '__proto__' || prop == 'constructor' || prop == 'prototype') { + throw new Error('setting of prototype values not supported'); + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/just-diff-apply/index.mjs b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/just-diff-apply/index.mjs new file mode 100644 index 0000000000000000000000000000000000000000..adc5f46ed51df7f7fc98a52f3173a61546900e15 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/just-diff-apply/index.mjs @@ -0,0 +1,158 @@ +/* + const obj1 = {a: 3, b: 5}; + diffApply(obj1, + [ + { "op": "remove", "path": ['b'] }, + { "op": "replace", "path": ['a'], "value": 4 }, + { "op": "add", "path": ['c'], "value": 5 } + ] + ); + obj1; // {a: 4, c: 5} + + // using converter to apply jsPatch standard paths + // see http://jsonpatch.com + import {diff, jsonPatchPathConverter} from 'just-diff' + const obj2 = {a: 3, b: 5}; + diffApply(obj2, [ + { "op": "remove", "path": '/b' }, + { "op": "replace", "path": '/a', "value": 4 } + { "op": "add", "path": '/c', "value": 5 } + ], jsonPatchPathConverter); + obj2; // {a: 4, c: 5} + + // arrays + const obj3 = {a: 4, b: [1, 2, 3]}; + diffApply(obj3, [ + { "op": "replace", "path": ['a'], "value": 3 } + { "op": "replace", "path": ['b', 2], "value": 4 } + { "op": "add", "path": ['b', 3], "value": 9 } + ]); + obj3; // {a: 3, b: [1, 2, 4, 9]} + + // nested paths + const obj4 = {a: 4, b: {c: 3}}; + diffApply(obj4, [ + { "op": "replace", "path": ['a'], "value": 5 } + { "op": "remove", "path": ['b', 'c']} + { "op": "add", "path": ['b', 'd'], "value": 4 } + ]); + obj4; // {a: 5, b: {d: 4}} +*/ + +var REMOVE = 'remove'; +var REPLACE = 'replace'; +var ADD = 'add'; +var MOVE = 'move'; + +function diffApply(obj, diff, pathConverter) { + if (!obj || typeof obj != 'object') { + throw new Error('base object must be an object or an array'); + } + + if (!Array.isArray(diff)) { + throw new Error('diff must be an array'); + } + + var diffLength = diff.length; + for (var i = 0; i < diffLength; i++) { + var thisDiff = diff[i]; + var subObject = obj; + var thisOp = thisDiff.op; + + var thisPath = transformPath(pathConverter, thisDiff.path); + var thisFromPath = thisDiff.from && transformPath(pathConverter, thisDiff.from); + var toPath, toPathCopy, lastToProp, subToObject, valueToMove; + + if (thisFromPath) { + // MOVE only, "fromPath" is effectively path and "path" is toPath + toPath = thisPath; + thisPath = thisFromPath; + + toPathCopy = toPath.slice(); + lastToProp = toPathCopy.pop(); + prototypeCheck(lastToProp); + if (lastToProp == null) { + return false; + } + + var thisToProp; + while (((thisToProp = toPathCopy.shift())) != null) { + prototypeCheck(thisToProp); + if (!(thisToProp in subToObject)) { + subToObject[thisToProp] = {}; + } + subToObject = subToObject[thisToProp]; + } + } + + var pathCopy = thisPath.slice(); + var lastProp = pathCopy.pop(); + prototypeCheck(lastProp); + if (lastProp == null) { + return false; + } + + var thisProp; + while (((thisProp = pathCopy.shift())) != null) { + prototypeCheck(thisProp); + if (!(thisProp in subObject)) { + subObject[thisProp] = {}; + } + subObject = subObject[thisProp]; + } + if (thisOp === REMOVE || thisOp === REPLACE || thisOp === MOVE) { + var path = thisOp === MOVE ? thisDiff.from : thisDiff.path; + if (!subObject.hasOwnProperty(lastProp)) { + throw new Error(['expected to find property', path, 'in object', obj].join(' ')); + } + } + if (thisOp === REMOVE || thisOp === MOVE) { + if (thisOp === MOVE) { + valueToMove = subObject[lastProp]; + } + Array.isArray(subObject) ? subObject.splice(lastProp, 1) : delete subObject[lastProp]; + } + if (thisOp === REPLACE || thisOp === ADD) { + subObject[lastProp] = thisDiff.value; + } + + if (thisOp === MOVE) { + subObject[lastToProp] = valueToMove; + } + } + return subObject; +} + +function transformPath(pathConverter, thisPath) { + if(pathConverter) { + thisPath = pathConverter(thisPath); + if(!Array.isArray(thisPath)) { + throw new Error([ + 'pathConverter must return an array, returned:', + thisPath, + ].join(' ')); + } + } else { + if(!Array.isArray(thisPath)) { + throw new Error([ + 'diff path', + thisPath, + 'must be an array, consider supplying a path converter'] + .join(' ')); + } + } + return thisPath; +} + +function jsonPatchPathConverter(stringPath) { + return stringPath.split('/').slice(1); +} + +function prototypeCheck(prop) { + // coercion is intentional to catch prop values like `['__proto__']` + if (prop == '__proto__' || prop == 'constructor' || prop == 'prototype') { + throw new Error('setting of prototype values not supported'); + } +} + +export {diffApply, jsonPatchPathConverter}; diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/just-diff-apply/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/just-diff-apply/package.json new file mode 100644 index 0000000000000000000000000000000000000000..be2879aacfadc25d0b21f7bb9c9cdeb6c22a003c --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/just-diff-apply/package.json @@ -0,0 +1,34 @@ +{ + "name": "just-diff-apply", + "version": "5.5.0", + "description": "Apply a diff to an object. Optionally supports jsonPatch protocol", + "type": "module", + "exports": { + ".": { + "types": "./index.d.ts", + "require": "./index.cjs", + "import": "./index.mjs" + }, + "./package.json": "./package.json" + }, + "main": "index.cjs", + "types": "index.d.ts", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1", + "build": "rollup -c" + }, + "repository": "https://github.com/angus-c/just", + "keywords": [ + "object", + "diff", + "apply", + "jsonPatch", + "no-dependencies", + "just" + ], + "author": "Angus Croll", + "license": "MIT", + "bugs": { + "url": "https://github.com/angus-c/just/issues" + } +} \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/just-diff-apply/rollup.config.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/just-diff-apply/rollup.config.js new file mode 100644 index 0000000000000000000000000000000000000000..fb9d24a3d845b1f51164da19b783f85bea75046a --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/just-diff-apply/rollup.config.js @@ -0,0 +1,3 @@ +const createRollupConfig = require('../../config/createRollupConfig'); + +module.exports = createRollupConfig(__dirname); diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmaccess/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmaccess/lib/index.js new file mode 100644 index 0000000000000000000000000000000000000000..71ba6ea352acd11a13e1ceb4a8e8c4f74df5e03c --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmaccess/lib/index.js @@ -0,0 +1,140 @@ +'use strict' + +const npa = require('npm-package-arg') +const npmFetch = require('npm-registry-fetch') + +const npar = (spec) => { + spec = npa(spec) + if (!spec.registry) { + throw new Error('must use package name only') + } + return spec +} + +const parseTeam = (scopeTeam) => { + let slice = 0 + if (scopeTeam.startsWith('@')) { + slice = 1 + } + const [scope, team] = scopeTeam.slice(slice).split(':').map(encodeURIComponent) + return { scope, team } +} + +const getPackages = async (scopeTeam, opts) => { + const { scope, team } = parseTeam(scopeTeam) + + let uri + if (team) { + uri = `/-/team/${scope}/${team}/package` + } else { + uri = `/-/org/${scope}/package` + } + try { + return await npmFetch.json(uri, opts) + } catch (err) { + if (err.code === 'E404') { + uri = `/-/user/${scope}/package` + return npmFetch.json(uri, opts) + } + throw err + } +} + +const getCollaborators = async (pkg, opts) => { + const spec = npar(pkg) + const uri = `/-/package/${spec.escapedName}/collaborators` + return npmFetch.json(uri, opts) +} + +const getVisibility = async (pkg, opts) => { + const spec = npar(pkg) + const uri = `/-/package/${spec.escapedName}/visibility` + return npmFetch.json(uri, opts) +} + +const setAccess = async (pkg, access, opts) => { + const spec = npar(pkg) + const uri = `/-/package/${spec.escapedName}/access` + await npmFetch(uri, { + ...opts, + method: 'POST', + body: { access }, + spec, + ignoreBody: true, + }) + return true +} + +const setMfa = async (pkg, level, opts) => { + const spec = npar(pkg) + const body = {} + switch (level) { + case 'none': + body.publish_requires_tfa = false + break + case 'publish': + // tfa is required, automation tokens cannot override tfa + body.publish_requires_tfa = true + body.automation_token_overrides_tfa = false + break + case 'automation': + // tfa is required, automation tokens can override tfa + body.publish_requires_tfa = true + body.automation_token_overrides_tfa = true + break + default: + throw new Error(`Invalid mfa setting ${level}`) + } + const uri = `/-/package/${spec.escapedName}/access` + await npmFetch(uri, { + ...opts, + method: 'POST', + body, + spec, + ignoreBody: true, + }) + return true +} + +const setPermissions = async (scopeTeam, pkg, permissions, opts) => { + const spec = npar(pkg) + const { scope, team } = parseTeam(scopeTeam) + if (!scope || !team) { + throw new Error('team must be in format `scope:team`') + } + const uri = `/-/team/${scope}/${team}/package` + await npmFetch(uri, { + ...opts, + method: 'PUT', + body: { package: spec.name, permissions }, + scope, + spec, + ignoreBody: true, + }) + return true +} + +const removePermissions = async (scopeTeam, pkg, opts) => { + const spec = npar(pkg) + const { scope, team } = parseTeam(scopeTeam) + const uri = `/-/team/${scope}/${team}/package` + await npmFetch(uri, { + ...opts, + method: 'DELETE', + body: { package: spec.name }, + scope, + spec, + ignoreBody: true, + }) + return true +} + +module.exports = { + getCollaborators, + getPackages, + getVisibility, + removePermissions, + setAccess, + setMfa, + setPermissions, +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmdiff/lib/format-diff.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmdiff/lib/format-diff.js new file mode 100644 index 0000000000000000000000000000000000000000..f50738207c854afcf8698bbfb99b1d67a051b72c --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmdiff/lib/format-diff.js @@ -0,0 +1,118 @@ +const jsDiff = require('diff') + +const shouldPrintPatch = require('./should-print-patch.js') + +const colors = { + // red + removed: { open: '\x1B[31m', close: '\x1B[39m' }, + // green + added: { open: '\x1B[32m', close: '\x1B[39m' }, + // blue + header: { open: '\x1B[34m', close: '\x1B[39m' }, + // cyan + section: { open: '\x1B[36m', close: '\x1B[39m' }, +} + +const color = (colorStr, colorId) => { + const { open, close } = colors[colorId] + // avoid highlighting the "\n" (would highlight till the end of the line) + return colorStr.replace(/[^\n\r]+/g, open + '$&' + close) +} + +const formatDiff = async ({ files, opts = {}, refs, versions }) => { + let res = '' + const srcPrefix = opts.diffNoPrefix ? '' : opts.diffSrcPrefix || 'a/' + const dstPrefix = opts.diffNoPrefix ? '' : opts.diffDstPrefix || 'b/' + + for (const filename of files.values()) { + const names = { + a: `${srcPrefix}${filename}`, + b: `${dstPrefix}${filename}`, + } + + let fileMode = '' + const filenames = { + a: refs.get(`a/${filename}`), + b: refs.get(`b/${filename}`), + } + const contents = { + a: filenames.a && filenames.a.content, + b: filenames.b && filenames.b.content, + } + const modes = { + a: filenames.a && filenames.a.mode, + b: filenames.b && filenames.b.mode, + } + + if (contents.a === contents.b && modes.a === modes.b) { + continue + } + + if (opts.diffNameOnly) { + res += `${filename}\n` + continue + } + + let patch = '' + let headerLength = 0 + const header = str => { + headerLength++ + patch += `${str}\n` + } + + // manually build a git diff-compatible header + header(`diff --git ${names.a} ${names.b}`) + if (modes.a === modes.b) { + fileMode = filenames.a.mode + } else { + if (modes.a && !modes.b) { + header(`deleted file mode ${modes.a}`) + } else if (!modes.a && modes.b) { + header(`new file mode ${modes.b}`) + } else { + header(`old mode ${modes.a}`) + header(`new mode ${modes.b}`) + } + } + /* eslint-disable-next-line max-len */ + header(`index ${opts.tagVersionPrefix || 'v'}${versions.a}..${opts.tagVersionPrefix || 'v'}${versions.b} ${fileMode}`) + + if (await shouldPrintPatch(filename)) { + patch += jsDiff.createTwoFilesPatch( + names.a, + names.b, + contents.a || '', + contents.b || '', + '', + '', + { + context: opts.diffUnified === 0 ? 0 : opts.diffUnified || 3, + ignoreWhitespace: opts.diffIgnoreAllSpace, + } + ).replace( + '===================================================================\n', + '' + ).replace(/\t\n/g, '\n') // strip trailing tabs + headerLength += 2 + } else { + header(`--- ${names.a}`) + header(`+++ ${names.b}`) + } + + if (opts.color) { + // this RegExp will include all the `\n` chars into the lines, easier to join + const lines = patch.split(/^/m) + res += color(lines.slice(0, headerLength).join(''), 'header') + res += lines.slice(headerLength).join('') + .replace(/^-.*/gm, color('$&', 'removed')) + .replace(/^\+.*/gm, color('$&', 'added')) + .replace(/^@@.+@@/gm, color('$&', 'section')) + } else { + res += patch + } + } + + return res.trim() +} + +module.exports = formatDiff diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmdiff/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmdiff/lib/index.js new file mode 100644 index 0000000000000000000000000000000000000000..10532c1990dc4ace9c7de612ac231d01fe39a447 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmdiff/lib/index.js @@ -0,0 +1,62 @@ +const pacote = require('pacote') + +const formatDiff = require('./format-diff.js') +const getTarball = require('./tarball.js') +const untar = require('./untar.js') + +// TODO: we test this condition in the diff command +// so this error probably doesnt need to be here. Or +// if it does we should figure out a standard code +// so we can catch it in the cli and display it consistently +const argsError = () => + Object.assign( + new TypeError('libnpmdiff needs two arguments to compare'), + { code: 'EDIFFARGS' } + ) +const diff = async (specs, opts = {}) => { + if (specs.length !== 2) { + throw argsError() + } + + const [ + aManifest, + bManifest, + ] = + await Promise.all(specs.map(spec => pacote.manifest(spec, opts))) + + const versions = { + a: aManifest.version, + b: bManifest.version, + } + + // fetches tarball using pacote + const [a, b] = await Promise.all([ + getTarball(aManifest, opts), + getTarball(bManifest, opts), + ]) + + // read all files + // populates `files` and `refs` + const { + files, + refs, + } = await untar([ + { + prefix: 'a/', + item: a, + }, + { + prefix: 'b/', + item: b, + }, + ], opts) + + return formatDiff({ + files, + opts, + refs, + versions, + }) +} + +module.exports = diff diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmdiff/lib/should-print-patch.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmdiff/lib/should-print-patch.js new file mode 100644 index 0000000000000000000000000000000000000000..c63cdee87c1fabe0f9d0534991c2d035ad1d7bb4 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmdiff/lib/should-print-patch.js @@ -0,0 +1,22 @@ +const { basename, extname } = require('node:path') + +// we should try to print patches as long as the +// extension is not identified as binary files +const shouldPrintPatch = async (path, opts = {}) => { + if (opts.diffText) { + return true + } + + const { default: binaryExtensions } = await import('binary-extensions') + + const filename = basename(path) + const extension = ( + filename.startsWith('.') + ? filename + : extname(filename) + ).slice(1) + + return !binaryExtensions.includes(extension) +} + +module.exports = shouldPrintPatch diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmdiff/lib/tarball.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmdiff/lib/tarball.js new file mode 100644 index 0000000000000000000000000000000000000000..e2738b58f11bc495175bd6e8782a125b33a1ca2b --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmdiff/lib/tarball.js @@ -0,0 +1,38 @@ +const { relative } = require('node:path') + +const Arborist = require('@npmcli/arborist') +const npa = require('npm-package-arg') +const pkgContents = require('@npmcli/installed-package-contents') +const pacote = require('pacote') +const { tarCreateOptions } = pacote.DirFetcher +const tar = require('tar') + +// returns a simplified tarball when reading files from node_modules folder, +// thus avoiding running the prepare scripts and the extra logic from packlist +const nodeModulesTarball = (manifest) => + pkgContents({ path: manifest._resolved, depth: 1 }) + .then(files => + files.map(file => relative(manifest._resolved, file)) + ) + .then(files => + tar.c(tarCreateOptions(manifest), files).concat() + ) + +const tarball = (manifest, opts) => { + const resolved = manifest._resolved + const where = opts.where || process.cwd() + + const fromNodeModules = npa(resolved).type === 'directory' + && /node_modules[\\/](@[^\\/]+\/)?[^\\/]+[\\/]?$/.test(relative(where, resolved)) + + if (fromNodeModules) { + return nodeModulesTarball(manifest, opts) + } + + return pacote.tarball(manifest._resolved, { + ...opts, + Arborist, + }) +} + +module.exports = tarball diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmdiff/lib/untar.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmdiff/lib/untar.js new file mode 100644 index 0000000000000000000000000000000000000000..6bbecd8a59ce07fe5dc1d293757c3fbddf315e18 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmdiff/lib/untar.js @@ -0,0 +1,96 @@ +const tar = require('tar') +const { minimatch } = require('minimatch') + +const normalizeMatch = str => str + .replace(/\\+/g, '/') + .replace(/^\.\/|^\./, '') + +// files and refs are mutating params +// filterFiles, item, prefix and opts are read-only options +const untar = ({ files, refs }, { filterFiles, item, prefix }) => { + tar.list({ + filter: (path, entry) => { + const fileMatch = () => + (!filterFiles.length || + filterFiles.some(f => { + const pattern = normalizeMatch(f) + return minimatch( + normalizeMatch(path), + `{package/,}${pattern}`, + { matchBase: pattern.startsWith('*') } + ) + })) + + // expands usage of simple path filters, e.g: lib or src/ + const folderMatch = () => + filterFiles.some(f => + normalizeMatch(path).startsWith(normalizeMatch(f)) || + normalizeMatch(path).startsWith(`package/${normalizeMatch(f)}`)) + + if ( + entry.type === 'File' && + (fileMatch() || folderMatch()) + ) { + const key = path.replace(/^[^/]+\/?/, '') + files.add(key) + + // should skip reading file when using --name-only option + let content + try { + content = entry.concat() + } catch (e) { + /* istanbul ignore next */ + throw Object.assign( + new Error('failed to read files'), + { code: 'EDIFFUNTAR' } + ) + } + + refs.set(`${prefix}${key}`, { + content, + mode: `100${entry.mode.toString(8)}`, + }) + return true + } + }, + }) + .on('error', /* istanbul ignore next */ e => { + throw e + }) + .end(item) +} + +const readTarballs = async (tarballs, opts = {}) => { + const files = new Set() + const refs = new Map() + const arr = [].concat(tarballs) + + const filterFiles = opts.diffFiles || [] + + for (const i of arr) { + untar({ + files, + refs, + }, { + item: i.item, + prefix: i.prefix, + filterFiles, + }) + } + + // await to read all content from included files + // TODO this feels like it could be one in one pass instead of three (values, map, forEach) + const allRefs = [...refs.values()] + const contents = await Promise.all(allRefs.map(async ref => ref.content)) + + contents.forEach((content, index) => { + allRefs[index].content = content.toString('utf8') + }) + + return { + files, + refs, + } +} + +module.exports = readTarballs diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmexec/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmexec/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..d3a1cdfd217b645f14677a8d49854e6fe570a0f6 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmexec/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) GitHub Inc. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmexec/README.md b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmexec/README.md new file mode 100644 index 0000000000000000000000000000000000000000..84512ac590498a10c4fb711632a85b5a9142a1e6 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmexec/README.md @@ -0,0 +1,47 @@ +# libnpmexec + +[![npm version](https://img.shields.io/npm/v/libnpmexec.svg)](https://npm.im/libnpmexec) +[![license](https://img.shields.io/npm/l/libnpmexec.svg)](https://npm.im/libnpmexec) +[![CI - libnpmexec](https://github.com/npm/cli/actions/workflows/ci-libnpmexec.yml/badge.svg)](https://github.com/npm/cli/actions/workflows/ci-libnpmexec.yml) + +The `npm exec` (`npx`) Programmatic API + +## Install + +`npm install libnpmexec` + +## Usage: + +```js +const libexec = require('libnpmexec') +await libexec({ + args: ['yosay', 'Bom dia!'], + cache: '~/.npm/_cacache', + npxCache: '~/.npm/_npx', + yes: true, +}) +``` + +## API: + +### `libexec(opts)` + +- `opts`: + - `args`: List of pkgs to execute **Array**, defaults to `[]` + - `call`: An alternative command to run when using `packages` option **String**, defaults to empty string. + - `cache`: The path location to where the npm cache folder is placed **String** + - `npxCache`: The path location to where the npx cache folder is placed **String** + - `chalk`: Chalk instance to use for colors? **Required** + - `localBin`: Location to the `node_modules/.bin` folder of the local project to start scanning for bin files **String**, defaults to `./node_modules/.bin`. **libexec** will walk up the directory structure looking for `node_modules/.bin` folders in parent folders that might satisfy the current `arg` and will use that bin if found. + - `locationMsg`: Overrides "at location" message when entering interactive mode **String** + - `globalBin`: Location to the global space bin folder, same as: `$(npm bin -g)` **String**, defaults to empty string. + - `packages`: A list of packages to be used (possibly fetch from the registry) **Array**, defaults to `[]` + - `path`: Location to where to read local project info (`package.json`) **String**, defaults to `.` + - `runPath`: Location to where to execute the script **String**, defaults to `.` + - `scriptShell`: Default shell to be used **String**, defaults to `sh` on POSIX systems, `process.env.ComSpec` OR `cmd` on Windows + - `yes`: Should skip download confirmation prompt when fetching missing packages from the registry? **Boolean** + - `registry`, `cache`, and more options that are forwarded to [@npmcli/arborist](https://github.com/npm/cli/blob/latest/workspaces/arborist/README.md) and [pacote](https://github.com/npm/pacote/#options) **Object** + +## LICENSE + +[ISC](./LICENSE) diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmexec/lib/file-exists.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmexec/lib/file-exists.js new file mode 100644 index 0000000000000000000000000000000000000000..8a1a88adee993d113d356d4f2a8427b33f6b25c9 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmexec/lib/file-exists.js @@ -0,0 +1,33 @@ +const { resolve } = require('node:path') +const { stat } = require('node:fs/promises') +const { walkUp } = require('walk-up-path') + +const fileExists = async (file) => { + try { + const res = await stat(file) + return res.isFile() + } catch { + return false + } +} + +const localFileExists = async (dir, binName, root) => { + for (const path of walkUp(dir)) { + const binDir = resolve(path, 'node_modules', '.bin') + + if (await fileExists(resolve(binDir, binName))) { + return binDir + } + + if (path.toLowerCase() === resolve(root).toLowerCase()) { + return false + } + } + + return false +} + +module.exports = { + fileExists, + localFileExists, +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmexec/lib/get-bin-from-manifest.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmexec/lib/get-bin-from-manifest.js new file mode 100644 index 0000000000000000000000000000000000000000..cede563c96a0de956935345463e3fb400fa7ca39 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmexec/lib/get-bin-from-manifest.js @@ -0,0 +1,22 @@ +const getBinFromManifest = (mani) => { + // if we have a bin matching (unscoped portion of) packagename, use that + // otherwise if there's 1 bin or all bin value is the same (alias), use + // that; otherwise, fail + const bin = mani.bin || {} + if (new Set(Object.values(bin)).size === 1) { + return Object.keys(bin)[0] + } + + // XXX probably a util to parse this better? + const name = mani.name.replace(/^@[^/]+\//, '') + if (bin[name]) { + return name + } + + // XXX need better error message + throw Object.assign(new Error('could not determine executable to run'), { + pkgid: mani._id, + }) +} + +module.exports = getBinFromManifest diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmexec/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmexec/lib/index.js new file mode 100644 index 0000000000000000000000000000000000000000..7b4c85a7510a1f252b929880069f1cbaf83791b7 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmexec/lib/index.js @@ -0,0 +1,310 @@ +'use strict' + +const { dirname, join, resolve } = require('node:path') +const crypto = require('node:crypto') +const { mkdir } = require('node:fs/promises') +const Arborist = require('@npmcli/arborist') +const ciInfo = require('ci-info') +const { log, input } = require('proc-log') +const npa = require('npm-package-arg') +const pacote = require('pacote') +const { read } = require('read') +const semver = require('semver') +const PackageJson = require('@npmcli/package-json') +const { fileExists, localFileExists } = require('./file-exists.js') +const getBinFromManifest = require('./get-bin-from-manifest.js') +const noTTY = require('./no-tty.js') +const runScript = require('./run-script.js') +const isWindows = require('./is-windows.js') +const withLock = require('./with-lock.js') + +const binPaths = [] + +// when checking the local tree we look up manifests, cache those results by +// spec.raw so we don't have to fetch again when we check npxCache +const manifests = new Map() + +const getManifest = async (spec, flatOptions) => { + if (!manifests.has(spec.raw)) { + const manifest = await pacote.manifest(spec, { ...flatOptions, preferOnline: true, Arborist }) + manifests.set(spec.raw, manifest) + } + return manifests.get(spec.raw) +} + +// Returns the required manifest if the spec is missing from the tree +// Returns the found node if it is in the tree +const missingFromTree = async ({ spec, tree, flatOptions, isNpxTree, shallow }) => { + // If asking for a spec by name only (spec.raw === spec.name): + // - In local or global mode go with anything in the tree that matches + // - If looking in the npx cache check if a newer version is available + const npxByNameOnly = isNpxTree && spec.name === spec.raw + // If they gave a range and not a tag we still need to check if it's outdated. + if (spec.registry && spec.type !== 'tag' && !npxByNameOnly) { + // registry spec that is not a specific tag. + const nodesBySpec = tree.inventory.query('packageName', spec.name) + for (const node of nodesBySpec) { + // continue if node is not a top level node + if (shallow && node.depth) { + continue + } + if (spec.rawSpec === '*') { + return { node } + } + // package requested by specific version + if (spec.type === 'version' && (node.pkgid === spec.raw)) { + return { node } + } + // package requested by version range, only remaining registry type + // the npx tree shouldn't be ok w/ an outdated version + if (!isNpxTree && semver.satisfies(node.package.version, spec.rawSpec)) { + return { node } + } + } + const manifest = await getManifest(spec, flatOptions) + return { manifest } + } else { + // non-registry spec, or a specific tag, or name only in npx tree. Look up + // manifest and check resolved to see if it's in the tree. + const manifest = await getManifest(spec, flatOptions) + if (spec.type === 'directory') { + return { manifest } + } + const nodesByManifest = tree.inventory.query('packageName', manifest.name) + for (const node of nodesByManifest) { + if (node.package.resolved === manifest._resolved) { + // we have a package by the same name and the same resolved destination, nothing to add. + return { node } + } + } + return { manifest } + } +} + +// see if the package.json at `path` has an entry that matches `cmd` +const hasPkgBin = (path, cmd, flatOptions) => + pacote.manifest(path, flatOptions) + .then(manifest => manifest?.bin?.[cmd]).catch(() => null) + +const exec = async (opts) => { + const { + args = [], + call = '', + localBin = resolve('./node_modules/.bin'), + locationMsg = undefined, + globalBin = '', + globalPath, + // dereference values because we manipulate it later + packages: [...packages] = [], + path = '.', + runPath = '.', + scriptShell = isWindows ? process.env.ComSpec || 'cmd' : 'sh', + ...flatOptions + } = opts + + let pkgPaths = opts.pkgPath + if (typeof pkgPaths === 'string') { + pkgPaths = [pkgPaths] + } + if (!pkgPaths) { + pkgPaths = ['.'] + } + let yes = opts.yes + const run = () => runScript({ + args, + call, + flatOptions, + locationMsg, + path, + binPaths, + runPath, + scriptShell, + }) + + // interactive mode + if (!call && !args.length && !packages.length) { + return run() + } + + // Look in the local tree too + pkgPaths.push(path) + + let needPackageCommandSwap = (args.length > 0) && (packages.length === 0) + // If they asked for a command w/o specifying a package, see if there is a + // bin that directly matches that name: + // - in any local packages (pkgPaths can have workspaces in them or just the root) + // - in the local tree (path) + // - globally + if (needPackageCommandSwap) { + // Local packages and local tree + for (const p of pkgPaths) { + if (await hasPkgBin(p, args[0], flatOptions)) { + // we have to install the local package into the npx cache so that its + // bin links get set up + flatOptions.installLinks = false + // args[0] will exist when the package is installed + packages.push(p) + yes = true + needPackageCommandSwap = false + break + } + } + if (needPackageCommandSwap) { + // no bin entry in local packages or in tree, now we look for binPaths + const dir = dirname(dirname(localBin)) + const localBinPath = await localFileExists(dir, args[0], '/') + if (localBinPath) { + binPaths.push(localBinPath) + return await run() + } else if (globalPath && await fileExists(`${globalBin}/${args[0]}`)) { + binPaths.push(globalBin) + return await run() + } + // We swap out args[0] with the bin from the manifest later + packages.push(args[0]) + } + } + + // Resolve any directory specs so that the npx directory is unique to the + // resolved directory, not the potentially relative one (i.e. "npx .") + for (const i in packages) { + const pkg = packages[i] + const spec = npa(pkg) + if (spec.type === 'directory') { + packages[i] = spec.fetchSpec + } + } + + const localArb = new Arborist({ ...flatOptions, path }) + const localTree = await localArb.loadActual() + + // Find anything that isn't installed locally + const needInstall = [] + let commandManifest + await Promise.all(packages.map(async (pkg, i) => { + const spec = npa(pkg, path) + const { manifest, node } = await missingFromTree({ spec, tree: localTree, flatOptions }) + if (manifest) { + // Package does not exist in the local tree + needInstall.push({ spec, manifest }) + if (i === 0) { + commandManifest = manifest + } + } else if (i === 0) { + // The node.package has enough to look up the bin + commandManifest = node.package + } + })) + + if (needPackageCommandSwap) { + const spec = npa(args[0]) + + if (spec.type === 'directory') { + yes = true + } + + args[0] = getBinFromManifest(commandManifest) + + if (needInstall.length > 0 && globalPath) { + // See if the package is installed globally. If it is, run the translated bin + const globalArb = new Arborist({ ...flatOptions, path: globalPath, global: true }) + const globalTree = await globalArb.loadActual().catch(() => { + log.verbose(`Could not read global path ${globalPath}, ignoring`) + return null + }) + if (globalTree) { + const { manifest: globalManifest } = + await missingFromTree({ spec, tree: globalTree, flatOptions, shallow: true }) + if (!globalManifest && await fileExists(`${globalBin}/${args[0]}`)) { + binPaths.push(globalBin) + return await run() + } + } + } + } + + const add = [] + if (needInstall.length > 0) { + // Install things to the npx cache, if needed + const { npxCache } = flatOptions + if (!npxCache) { + throw new Error('Must provide a valid npxCache path') + } + const hash = crypto.createHash('sha512') + .update(packages.map(p => { + // Keeps the npx directory unique to the resolved directory, not the + // potentially relative one (i.e. "npx .") + const spec = npa(p) + if (spec.type === 'directory') { + return spec.fetchSpec + } + return p + }).sort((a, b) => a.localeCompare(b, 'en')).join('\n')) + .digest('hex') + .slice(0, 16) + const installDir = resolve(npxCache, hash) + await mkdir(installDir, { recursive: true }) + const npxArb = new Arborist({ + ...flatOptions, + path: installDir, + }) + const lockPath = join(installDir, 'concurrency.lock') + const npxTree = await withLock(lockPath, () => npxArb.loadActual()) + await Promise.all(needInstall.map(async ({ spec }) => { + const { manifest } = await missingFromTree({ + spec, + tree: npxTree, + flatOptions, + isNpxTree: true, + }) + if (manifest) { + // Manifest is not in npxCache, we need to install it there + if (!spec.registry) { + add.push(manifest._from) + } else { + add.push(manifest._id) + } + } + })) + + if (add.length) { + if (!yes) { + const addList = add.map(a => `${a.replace(/@$/, '')}`) + + // set -n to always say no + if (yes === false) { + // Error message lists missing package(s) when process is canceled + /* eslint-disable-next-line max-len */ + throw new Error(`npx canceled due to missing packages and no YES option: ${JSON.stringify(addList)}`) + } + + if (noTTY() || ciInfo.isCI) { + /* eslint-disable-next-line max-len */ + log.warn('exec', `The following package${add.length === 1 ? ' was' : 's were'} not found and will be installed: ${addList.join(', ')}`) + } else { + const confirm = await input.read(() => read({ + /* eslint-disable-next-line max-len */ + prompt: `Need to install the following packages:\n${addList.join('\n')}\nOk to proceed? `, + default: 'y', + })) + if (confirm.trim().toLowerCase().charAt(0) !== 'y') { + throw new Error('canceled') + } + } + } + await withLock(lockPath, () => npxArb.reify({ + ...flatOptions, + save: true, + add, + })) + } + binPaths.push(resolve(installDir, 'node_modules/.bin')) + const pkgJson = await PackageJson.load(installDir) + pkgJson.update({ _npx: { packages } }) + await pkgJson.save() + } + + return await run() +} + +module.exports = exec diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmexec/lib/is-windows.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmexec/lib/is-windows.js new file mode 100644 index 0000000000000000000000000000000000000000..fbece90ad74964cc5fe9fe786564fd5b89643fcf --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmexec/lib/is-windows.js @@ -0,0 +1 @@ +module.exports = process.platform === 'win32' diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmexec/lib/no-tty.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmexec/lib/no-tty.js new file mode 100644 index 0000000000000000000000000000000000000000..601798d25cc77b8ac370588b3e3cc372e676ae87 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmexec/lib/no-tty.js @@ -0,0 +1 @@ +module.exports = () => !process.stdin.isTTY diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmexec/lib/run-script.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmexec/lib/run-script.js new file mode 100644 index 0000000000000000000000000000000000000000..13f16a74eb8a04c8ff378bae8469679a4085a2b8 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmexec/lib/run-script.js @@ -0,0 +1,73 @@ +const ciInfo = require('ci-info') +const runScript = require('@npmcli/run-script') +const pkgJson = require('@npmcli/package-json') +const { log, output } = require('proc-log') +const noTTY = require('./no-tty.js') +const isWindowsShell = require('./is-windows.js') + +const run = async ({ + args, + call, + flatOptions, + locationMsg, + path, + binPaths, + runPath, + scriptShell, +}) => { + // escape executable path + // necessary for preventing bash/cmd keywords from overriding + if (!isWindowsShell) { + if (args.length > 0) { + args[0] = '"' + args[0] + '"' + } + } + + // turn list of args into command string + const script = call || args.shift() || scriptShell + + // do the fakey runScript dance + // still should work if no package.json in cwd + const { content: realPkg } = await pkgJson.normalize(path, { steps: [ + 'binDir', + ...pkgJson.normalizeSteps, + ] }).catch(() => ({ content: {} })) + const pkg = { + ...realPkg, + scripts: { + ...(realPkg.scripts || {}), + npx: script, + }, + } + + if (script === scriptShell) { + if (!noTTY()) { + if (ciInfo.isCI) { + return log.warn('exec', 'Interactive mode disabled in CI environment') + } + + const { chalk } = flatOptions + + output.standard(`${ + chalk.reset('\nEntering npm script environment') + }${ + chalk.reset(locationMsg || ` at location:\n${chalk.dim(runPath)}`) + }${ + chalk.bold('\nType \'exit\' or ^D when finished\n') + }`) + } + } + return runScript({ + ...flatOptions, + pkg, + // we always run in cwd, not --prefix + path: runPath, + binPaths, + event: 'npx', + args, + stdio: 'inherit', + scriptShell, + }) +} + +module.exports = run diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmexec/lib/with-lock.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmexec/lib/with-lock.js new file mode 100644 index 0000000000000000000000000000000000000000..897046adedb8a79f58081df8b13430e846196bbd --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmexec/lib/with-lock.js @@ -0,0 +1,175 @@ +const fs = require('node:fs/promises') +const { rmdirSync } = require('node:fs') +const promiseRetry = require('promise-retry') +const { onExit } = require('signal-exit') + +// a lockfile implementation inspired by the unmaintained proper-lockfile library +// +// similarities: +// - based on mkdir's atomicity +// - works across processes and even machines (via NFS) +// - cleans up after itself +// - detects compromised locks +// +// differences: +// - higher-level API (just a withLock function) +// - written in async/await style +// - uses mtime + inode for more reliable compromised lock detection +// - more ergonomic compromised lock handling (i.e. withLock will reject, and callbacks have access to an AbortSignal) +// - uses a more recent version of signal-exit + +const touchInterval = 1_000 +// mtime precision is platform dependent, so use a reasonably large threshold +const staleThreshold = 5_000 + +// track current locks and their cleanup functions +const currentLocks = new Map() + +function cleanupLocks () { + for (const [, cleanup] of currentLocks) { + try { + cleanup() + } catch (err) { + // + } + } +} + +// clean up any locks that were not released normally +onExit(cleanupLocks) + +/** + * Acquire an advisory lock for the given path and hold it for the duration of the callback. + * + * The lock will be released automatically when the callback resolves or rejects. + * Concurrent calls to withLock() for the same path will wait until the lock is released. + */ +async function withLock (lockPath, cb) { + try { + const signal = await acquireLock(lockPath) + return await new Promise((resolve, reject) => { + signal.addEventListener('abort', () => { + reject(Object.assign(new Error('Lock compromised'), { code: 'ECOMPROMISED' })) + }); + + (async () => { + try { + resolve(await cb(signal)) + } catch (err) { + reject(err) + } + })() + }) + } finally { + releaseLock(lockPath) + } +} + +function acquireLock (lockPath) { + return promiseRetry({ + minTimeout: 100, + maxTimeout: 5_000, + // if another process legitimately holds the lock, wait for it to release; if it dies abnormally and the lock becomes stale, we'll acquire it automatically + forever: true, + }, async (retry) => { + try { + await fs.mkdir(lockPath) + } catch (err) { + if (err.code !== 'EEXIST' && err.code !== 'EBUSY' && err.code !== 'EPERM') { + throw err + } + + const status = await getLockStatus(lockPath) + + if (status === 'locked') { + // let's see if we can acquire it on the next attempt 🤞 + return retry(err) + } + if (status === 'stale') { + try { + // there is a very tiny window where another process could also release the stale lock and acquire it before we release it here; the lock compromise checker should detect this and throw an error + deleteLock(lockPath) + } catch (e) { + // on windows, EBUSY/EPERM can happen if another process is (re)creating the lock; maybe we can acquire it on a subsequent attempt 🤞 + if (e.code === 'EBUSY' || e.code === 'EPERM') { + return retry(e) + } + throw e + } + } + // immediately attempt to acquire the lock (no backoff) + return await acquireLock(lockPath) + } + try { + const signal = await maintainLock(lockPath) + return signal + } catch (err) { + throw Object.assign(new Error('Lock compromised'), { code: 'ECOMPROMISED' }) + } + }) +} + +function deleteLock (lockPath) { + try { + // synchronous, so we can call in an exit handler + rmdirSync(lockPath) + } catch (err) { + if (err.code !== 'ENOENT') { + throw err + } + } +} + +function releaseLock (lockPath) { + currentLocks.get(lockPath)?.() + currentLocks.delete(lockPath) +} + +async function getLockStatus (lockPath) { + try { + const stat = await fs.stat(lockPath) + return (Date.now() - stat.mtimeMs > staleThreshold) ? 'stale' : 'locked' + } catch (err) { + if (err.code === 'ENOENT') { + return 'unlocked' + } + throw err + } +} + +async function maintainLock (lockPath) { + const controller = new AbortController() + const stats = await fs.stat(lockPath) + // fs.utimes operates on floating points seconds (directly, or via strings/Date objects), which may not match the underlying filesystem's mtime precision, meaning that we might read a slightly different mtime than we write. always round to the nearest second, since all filesystems support at least second precision + let mtime = Math.round(stats.mtimeMs / 1000) + const signal = controller.signal + + async function touchLock () { + try { + const currentStats = (await fs.stat(lockPath)) + const currentMtime = Math.round(currentStats.mtimeMs / 1000) + if (currentStats.ino !== stats.ino || currentMtime !== mtime) { + throw new Error('Lock compromised') + } + mtime = Math.round(Date.now() / 1000) + // touch the lock, unless we just released it during this iteration + if (currentLocks.has(lockPath)) { + await fs.utimes(lockPath, mtime, mtime) + } + } catch (err) { + // stats mismatch or other fs error means the lock was compromised + controller.abort() + } + } + + const timeout = setInterval(touchLock, touchInterval) + timeout.unref() + function cleanup () { + clearInterval(timeout) + deleteLock(lockPath) + } + currentLocks.set(lockPath, cleanup) + return signal +} + +module.exports = withLock diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmexec/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmexec/package.json new file mode 100644 index 0000000000000000000000000000000000000000..d06081ce21a609890f30fc3c72a4d3a0a40e113f --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmexec/package.json @@ -0,0 +1,81 @@ +{ + "name": "libnpmexec", + "version": "10.1.8", + "files": [ + "bin/", + "lib/" + ], + "main": "lib/index.js", + "engines": { + "node": "^20.17.0 || >=22.9.0" + }, + "description": "npm exec (npx) programmatic API", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/cli.git", + "directory": "workspaces/libnpmexec" + }, + "keywords": [ + "npm", + "npmcli", + "libnpm", + "cli", + "workspaces", + "libnpmexec" + ], + "author": "GitHub Inc.", + "contributors": [ + { + "name": "Ruy Adorno", + "url": "https://ruyadorno.com", + "twitter": "ruyadorno" + } + ], + "license": "ISC", + "scripts": { + "lint": "npm run eslint", + "posttest": "npm run lint", + "test": "tap", + "snap": "tap", + "postlint": "template-oss-check", + "lintfix": "npm run eslint -- --fix", + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" + }, + "tap": { + "files": "test/*.js", + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "devDependencies": { + "@npmcli/eslint-config": "^5.0.1", + "@npmcli/mock-registry": "^1.0.0", + "@npmcli/template-oss": "4.25.1", + "bin-links": "^5.0.0", + "chalk": "^5.2.0", + "just-extend": "^6.2.0", + "just-safe-set": "^4.2.1", + "tap": "^16.3.8" + }, + "dependencies": { + "@npmcli/arborist": "^9.1.6", + "@npmcli/package-json": "^7.0.0", + "@npmcli/run-script": "^10.0.0", + "ci-info": "^4.0.0", + "npm-package-arg": "^13.0.0", + "pacote": "^21.0.2", + "proc-log": "^5.0.0", + "promise-retry": "^2.0.1", + "read": "^4.0.0", + "semver": "^7.3.7", + "signal-exit": "^4.1.0", + "walk-up-path": "^4.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.25.1", + "content": "../../scripts/template-oss/index.js" + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmfund/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmfund/lib/index.js new file mode 100644 index 0000000000000000000000000000000000000000..1616c88b73c50ca6ee635e0870cebcac5e0da331 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmfund/lib/index.js @@ -0,0 +1,210 @@ +'use strict' + +const URL = require('node:url').URL +const Arborist = require('@npmcli/arborist') + +// supports object funding and string shorthand, or an array of these +// if original was an array, returns an array; else returns the lone item +function normalizeFunding (funding) { + const normalizeItem = item => + typeof item === 'string' ? { url: item } : item + const sources = [].concat(funding || []).map(normalizeItem) + return Array.isArray(funding) ? sources : sources[0] +} + +// Is the value of a `funding` property of a `package.json` +// a valid type+url for `npm fund` to display? +function isValidFunding (funding) { + if (!funding) { + return false + } + + if (Array.isArray(funding)) { + return funding.every(f => !Array.isArray(f) && isValidFunding(f)) + } + + try { + var parsed = new URL(funding.url || funding) + } catch (error) { + return false + } + + if ( + parsed.protocol !== 'https:' && + parsed.protocol !== 'http:' + ) { + return false + } + + return Boolean(parsed.host) +} + +const empty = () => Object.create(null) + +function readTree (tree, opts) { + let packageWithFundingCount = 0 + const seen = new Set() + const { countOnly } = opts || {} + const _trailingDependencies = Symbol('trailingDependencies') + + let filterSet + + if (opts && opts.workspaces && opts.workspaces.length) { + const arb = new Arborist(opts) + filterSet = arb.workspaceDependencySet(tree, opts.workspaces) + } + + function tracked (name, version) { + const key = String(name) + String(version) + if (seen.has(key)) { + return true + } + + seen.add(key) + } + + function retrieveDependencies (dependencies) { + const trailing = dependencies[_trailingDependencies] + + if (trailing) { + return Object.assign( + empty(), + dependencies, + trailing + ) + } + + return dependencies + } + + function hasDependencies (dependencies) { + return dependencies && ( + Object.keys(dependencies).length || + dependencies[_trailingDependencies] + ) + } + + function attachFundingInfo (target, funding) { + if (funding && isValidFunding(funding)) { + target.funding = normalizeFunding(funding) + packageWithFundingCount++ + } + } + + function getFundingDependencies (t) { + const edges = t && t.edgesOut && t.edgesOut.values() + if (!edges) { + return empty() + } + + const directDepsWithFunding = Array.from(edges).map(edge => { + if (!edge || !edge.to) { + return empty() + } + + const node = edge.to.target || edge.to + if (!node.package) { + return empty() + } + + if (filterSet && filterSet.size > 0 && !filterSet.has(node)) { + return empty() + } + + const { name, funding, version } = node.package + + // avoids duplicated items within the funding tree + if (tracked(name, version)) { + return empty() + } + + const fundingItem = {} + + if (version) { + fundingItem.version = version + } + + attachFundingInfo(fundingItem, funding) + + return { + node, + fundingItem, + } + }) + + return directDepsWithFunding.reduce( + (res, { node, fundingItem }) => { + if (!fundingItem || + fundingItem.length === 0 || + !node) { + return res + } + + // recurse + const transitiveDependencies = node.edgesOut && + node.edgesOut.size > 0 && + getFundingDependencies(node) + + // if we're only counting items there's no need + // to add all the data to the resulting object + if (countOnly) { + return null + } + + if (hasDependencies(transitiveDependencies)) { + fundingItem.dependencies = + retrieveDependencies(transitiveDependencies) + } + + if (isValidFunding(fundingItem.funding)) { + res[node.package.name] = fundingItem + } else if (hasDependencies(fundingItem.dependencies)) { + res[_trailingDependencies] = + Object.assign( + empty(), + res[_trailingDependencies], + fundingItem.dependencies + ) + } + + return res + }, countOnly ? null : empty()) + } + + const treeDependencies = getFundingDependencies(tree) + const result = { + length: packageWithFundingCount, + } + + if (!countOnly) { + const name = + (tree && tree.package && tree.package.name) || + (tree && tree.name) + result.name = name || (tree && tree.path) + + if (tree && tree.package && tree.package.version) { + result.version = tree.package.version + } + + if (tree && tree.package && tree.package.funding) { + result.funding = normalizeFunding(tree.package.funding) + } + + result.dependencies = retrieveDependencies(treeDependencies) + } + + return result +} + +async function read (opts) { + const arb = new Arborist(opts) + const tree = await arb.loadActual(opts) + return readTree(tree, opts) +} + +module.exports = { + read, + readTree, + normalizeFunding, + isValidFunding, +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmorg/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmorg/lib/index.js new file mode 100644 index 0000000000000000000000000000000000000000..f3d361b8be6d75f30b074651e7c3483c7b73b810 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmorg/lib/index.js @@ -0,0 +1,64 @@ +'use strict' + +const eu = encodeURIComponent +const npmFetch = require('npm-registry-fetch') +const validate = require('aproba') + +// From https://github.com/npm/registry/blob/master/docs/orgs/memberships.md +const cmd = module.exports + +class MembershipDetail {} +cmd.set = (org, user, role, opts = {}) => { + if ( + typeof role === 'object' && + Object.keys(opts).length === 0 + ) { + opts = role + role = undefined + } + validate('SSSO|SSZO', [org, user, role, opts]) + user = user.replace(/^@?/, '') + org = org.replace(/^@?/, '') + return npmFetch.json(`/-/org/${eu(org)}/user`, { + ...opts, + method: 'PUT', + body: { user, role }, + }).then(ret => Object.assign(new MembershipDetail(), ret)) +} + +cmd.rm = (org, user, opts = {}) => { + validate('SSO', [org, user, opts]) + user = user.replace(/^@?/, '') + org = org.replace(/^@?/, '') + return npmFetch(`/-/org/${eu(org)}/user`, { + ...opts, + method: 'DELETE', + body: { user }, + ignoreBody: true, + }).then(() => null) +} + +class Roster {} +cmd.ls = (org, opts = {}) => { + return cmd.ls.stream(org, opts) + .collect() + .then(data => data.reduce((acc, [key, val]) => { + if (!acc) { + acc = {} + } + acc[key] = val + return acc + }, null)) + .then(ret => Object.assign(new Roster(), ret)) +} + +cmd.ls.stream = (org, opts = {}) => { + validate('SO', [org, opts]) + org = org.replace(/^@?/, '') + return npmFetch.json.stream(`/-/org/${eu(org)}/user`, '*', { + ...opts, + mapJSON: (value, [key]) => { + return [key, value] + }, + }) +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmpack/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmpack/lib/index.js new file mode 100644 index 0000000000000000000000000000000000000000..bd3e0c7bd7232ac3ee11f150e3af496659e76331 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmpack/lib/index.js @@ -0,0 +1,62 @@ +'use strict' + +const pacote = require('pacote') +const npa = require('npm-package-arg') +const runScript = require('@npmcli/run-script') +const path = require('node:path') +const Arborist = require('@npmcli/arborist') +const { writeFile } = require('node:fs/promises') + +module.exports = pack +async function pack (spec = 'file:.', opts = {}) { + // gets spec + spec = npa(spec) + + const manifest = await pacote.manifest(spec, { ...opts, Arborist }) + + const stdio = opts.foregroundScripts ? 'inherit' : 'pipe' + + if (spec.type === 'directory' && !opts.ignoreScripts) { + // prepack + await runScript({ + ...opts, + event: 'prepack', + path: spec.fetchSpec, + stdio, + pkg: manifest, + }) + } + + // packs tarball + const tarball = await pacote.tarball(manifest._resolved, { + ...opts, + Arborist, + integrity: manifest._integrity, + }) + + // check for explicit `false` so the default behavior is to skip writing to disk + if (opts.dryRun === false) { + const filename = `${manifest.name}-${manifest.version}.tgz` + .replace(/^@/, '').replace(/\//, '-') + const destination = path.resolve(opts.packDestination, filename) + await writeFile(destination, tarball) + } + + if (spec.type === 'directory' && !opts.ignoreScripts) { + // postpack + await runScript({ + ...opts, + event: 'postpack', + path: spec.fetchSpec, + stdio, + pkg: manifest, + env: { + npm_package_from: tarball.from, + npm_package_resolved: tarball.resolved, + npm_package_integrity: tarball.integrity, + }, + }) + } + + return tarball +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmpublish/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmpublish/lib/index.js new file mode 100644 index 0000000000000000000000000000000000000000..35687e0262559ec935b366af9d47eded4b942e28 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmpublish/lib/index.js @@ -0,0 +1,4 @@ +module.exports = { + publish: require('./publish.js'), + unpublish: require('./unpublish.js'), +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmpublish/lib/provenance.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmpublish/lib/provenance.js new file mode 100644 index 0000000000000000000000000000000000000000..cead8da6be34fe3940ef7552c2b4a955efec1b1f --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmpublish/lib/provenance.js @@ -0,0 +1,245 @@ +const sigstore = require('sigstore') +const { readFile } = require('node:fs/promises') +const ci = require('ci-info') +const { env } = process + +const INTOTO_PAYLOAD_TYPE = 'application/vnd.in-toto+json' +const INTOTO_STATEMENT_V01_TYPE = 'https://in-toto.io/Statement/v0.1' +const INTOTO_STATEMENT_V1_TYPE = 'https://in-toto.io/Statement/v1' +const SLSA_PREDICATE_V02_TYPE = 'https://slsa.dev/provenance/v0.2' +const SLSA_PREDICATE_V1_TYPE = 'https://slsa.dev/provenance/v1' + +const GITHUB_BUILDER_ID_PREFIX = 'https://github.com/actions/runner' +const GITHUB_BUILD_TYPE = 'https://slsa-framework.github.io/github-actions-buildtypes/workflow/v1' + +const GITLAB_BUILD_TYPE_PREFIX = 'https://github.com/npm/cli/gitlab' +const GITLAB_BUILD_TYPE_VERSION = 'v0alpha1' + +const generateProvenance = async (subject, opts) => { + let payload + if (ci.GITHUB_ACTIONS) { + /* istanbul ignore next - not covering missing env var case */ + const relativeRef = (env.GITHUB_WORKFLOW_REF || '').replace(env.GITHUB_REPOSITORY + '/', '') + const delimiterIndex = relativeRef.indexOf('@') + const workflowPath = relativeRef.slice(0, delimiterIndex) + const workflowRef = relativeRef.slice(delimiterIndex + 1) + + payload = { + _type: INTOTO_STATEMENT_V1_TYPE, + subject, + predicateType: SLSA_PREDICATE_V1_TYPE, + predicate: { + buildDefinition: { + buildType: GITHUB_BUILD_TYPE, + externalParameters: { + workflow: { + ref: workflowRef, + repository: `${env.GITHUB_SERVER_URL}/${env.GITHUB_REPOSITORY}`, + path: workflowPath, + }, + }, + internalParameters: { + github: { + event_name: env.GITHUB_EVENT_NAME, + repository_id: env.GITHUB_REPOSITORY_ID, + repository_owner_id: env.GITHUB_REPOSITORY_OWNER_ID, + }, + }, + resolvedDependencies: [ + { + uri: `git+${env.GITHUB_SERVER_URL}/${env.GITHUB_REPOSITORY}@${env.GITHUB_REF}`, + digest: { + gitCommit: env.GITHUB_SHA, + }, + }, + ], + }, + runDetails: { + builder: { id: `${GITHUB_BUILDER_ID_PREFIX}/${env.RUNNER_ENVIRONMENT}` }, + metadata: { + /* eslint-disable-next-line max-len */ + invocationId: `${env.GITHUB_SERVER_URL}/${env.GITHUB_REPOSITORY}/actions/runs/${env.GITHUB_RUN_ID}/attempts/${env.GITHUB_RUN_ATTEMPT}`, + }, + }, + }, + } + } + if (ci.GITLAB) { + payload = { + _type: INTOTO_STATEMENT_V01_TYPE, + subject, + predicateType: SLSA_PREDICATE_V02_TYPE, + predicate: { + buildType: `${GITLAB_BUILD_TYPE_PREFIX}/${GITLAB_BUILD_TYPE_VERSION}`, + builder: { id: `${env.CI_PROJECT_URL}/-/runners/${env.CI_RUNNER_ID}` }, + invocation: { + configSource: { + uri: `git+${env.CI_PROJECT_URL}`, + digest: { + sha1: env.CI_COMMIT_SHA, + }, + entryPoint: env.CI_JOB_NAME, + }, + parameters: { + CI: env.CI, + CI_API_GRAPHQL_URL: env.CI_API_GRAPHQL_URL, + CI_API_V4_URL: env.CI_API_V4_URL, + CI_BUILD_BEFORE_SHA: env.CI_BUILD_BEFORE_SHA, + CI_BUILD_ID: env.CI_BUILD_ID, + CI_BUILD_NAME: env.CI_BUILD_NAME, + CI_BUILD_REF: env.CI_BUILD_REF, + CI_BUILD_REF_NAME: env.CI_BUILD_REF_NAME, + CI_BUILD_REF_SLUG: env.CI_BUILD_REF_SLUG, + CI_BUILD_STAGE: env.CI_BUILD_STAGE, + CI_COMMIT_BEFORE_SHA: env.CI_COMMIT_BEFORE_SHA, + CI_COMMIT_BRANCH: env.CI_COMMIT_BRANCH, + CI_COMMIT_REF_NAME: env.CI_COMMIT_REF_NAME, + CI_COMMIT_REF_PROTECTED: env.CI_COMMIT_REF_PROTECTED, + CI_COMMIT_REF_SLUG: env.CI_COMMIT_REF_SLUG, + CI_COMMIT_SHA: env.CI_COMMIT_SHA, + CI_COMMIT_SHORT_SHA: env.CI_COMMIT_SHORT_SHA, + CI_COMMIT_TIMESTAMP: env.CI_COMMIT_TIMESTAMP, + CI_COMMIT_TITLE: env.CI_COMMIT_TITLE, + CI_CONFIG_PATH: env.CI_CONFIG_PATH, + CI_DEFAULT_BRANCH: env.CI_DEFAULT_BRANCH, + CI_DEPENDENCY_PROXY_DIRECT_GROUP_IMAGE_PREFIX: + env.CI_DEPENDENCY_PROXY_DIRECT_GROUP_IMAGE_PREFIX, + CI_DEPENDENCY_PROXY_GROUP_IMAGE_PREFIX: env.CI_DEPENDENCY_PROXY_GROUP_IMAGE_PREFIX, + CI_DEPENDENCY_PROXY_SERVER: env.CI_DEPENDENCY_PROXY_SERVER, + CI_DEPENDENCY_PROXY_USER: env.CI_DEPENDENCY_PROXY_USER, + CI_JOB_ID: env.CI_JOB_ID, + CI_JOB_NAME: env.CI_JOB_NAME, + CI_JOB_NAME_SLUG: env.CI_JOB_NAME_SLUG, + CI_JOB_STAGE: env.CI_JOB_STAGE, + CI_JOB_STARTED_AT: env.CI_JOB_STARTED_AT, + CI_JOB_URL: env.CI_JOB_URL, + CI_NODE_TOTAL: env.CI_NODE_TOTAL, + CI_PAGES_DOMAIN: env.CI_PAGES_DOMAIN, + CI_PAGES_URL: env.CI_PAGES_URL, + CI_PIPELINE_CREATED_AT: env.CI_PIPELINE_CREATED_AT, + CI_PIPELINE_ID: env.CI_PIPELINE_ID, + CI_PIPELINE_IID: env.CI_PIPELINE_IID, + CI_PIPELINE_SOURCE: env.CI_PIPELINE_SOURCE, + CI_PIPELINE_URL: env.CI_PIPELINE_URL, + CI_PROJECT_CLASSIFICATION_LABEL: env.CI_PROJECT_CLASSIFICATION_LABEL, + CI_PROJECT_DESCRIPTION: env.CI_PROJECT_DESCRIPTION, + CI_PROJECT_ID: env.CI_PROJECT_ID, + CI_PROJECT_NAME: env.CI_PROJECT_NAME, + CI_PROJECT_NAMESPACE: env.CI_PROJECT_NAMESPACE, + CI_PROJECT_NAMESPACE_ID: env.CI_PROJECT_NAMESPACE_ID, + CI_PROJECT_PATH: env.CI_PROJECT_PATH, + CI_PROJECT_PATH_SLUG: env.CI_PROJECT_PATH_SLUG, + CI_PROJECT_REPOSITORY_LANGUAGES: env.CI_PROJECT_REPOSITORY_LANGUAGES, + CI_PROJECT_ROOT_NAMESPACE: env.CI_PROJECT_ROOT_NAMESPACE, + CI_PROJECT_TITLE: env.CI_PROJECT_TITLE, + CI_PROJECT_URL: env.CI_PROJECT_URL, + CI_PROJECT_VISIBILITY: env.CI_PROJECT_VISIBILITY, + CI_REGISTRY: env.CI_REGISTRY, + CI_REGISTRY_IMAGE: env.CI_REGISTRY_IMAGE, + CI_REGISTRY_USER: env.CI_REGISTRY_USER, + CI_RUNNER_DESCRIPTION: env.CI_RUNNER_DESCRIPTION, + CI_RUNNER_ID: env.CI_RUNNER_ID, + CI_RUNNER_TAGS: env.CI_RUNNER_TAGS, + CI_SERVER_HOST: env.CI_SERVER_HOST, + CI_SERVER_NAME: env.CI_SERVER_NAME, + CI_SERVER_PORT: env.CI_SERVER_PORT, + CI_SERVER_PROTOCOL: env.CI_SERVER_PROTOCOL, + CI_SERVER_REVISION: env.CI_SERVER_REVISION, + CI_SERVER_SHELL_SSH_HOST: env.CI_SERVER_SHELL_SSH_HOST, + CI_SERVER_SHELL_SSH_PORT: env.CI_SERVER_SHELL_SSH_PORT, + CI_SERVER_URL: env.CI_SERVER_URL, + CI_SERVER_VERSION: env.CI_SERVER_VERSION, + CI_SERVER_VERSION_MAJOR: env.CI_SERVER_VERSION_MAJOR, + CI_SERVER_VERSION_MINOR: env.CI_SERVER_VERSION_MINOR, + CI_SERVER_VERSION_PATCH: env.CI_SERVER_VERSION_PATCH, + CI_TEMPLATE_REGISTRY_HOST: env.CI_TEMPLATE_REGISTRY_HOST, + GITLAB_CI: env.GITLAB_CI, + GITLAB_FEATURES: env.GITLAB_FEATURES, + GITLAB_USER_ID: env.GITLAB_USER_ID, + GITLAB_USER_LOGIN: env.GITLAB_USER_LOGIN, + RUNNER_GENERATE_ARTIFACTS_METADATA: env.RUNNER_GENERATE_ARTIFACTS_METADATA, + }, + environment: { + name: env.CI_RUNNER_DESCRIPTION, + architecture: env.CI_RUNNER_EXECUTABLE_ARCH, + server: env.CI_SERVER_URL, + project: env.CI_PROJECT_PATH, + job: { + id: env.CI_JOB_ID, + }, + pipeline: { + id: env.CI_PIPELINE_ID, + ref: env.CI_CONFIG_PATH, + }, + }, + }, + metadata: { + buildInvocationId: `${env.CI_JOB_URL}`, + completeness: { + parameters: true, + environment: true, + materials: false, + }, + reproducible: false, + }, + materials: [ + { + uri: `git+${env.CI_PROJECT_URL}`, + digest: { + sha1: env.CI_COMMIT_SHA, + }, + }, + ], + }, + } + } + return sigstore.attest(Buffer.from(JSON.stringify(payload)), INTOTO_PAYLOAD_TYPE, opts) +} + +const verifyProvenance = async (subject, provenancePath) => { + let provenanceBundle + try { + provenanceBundle = JSON.parse(await readFile(provenancePath)) + } catch (err) { + err.message = `Invalid provenance provided: ${err.message}` + throw err + } + + const payload = extractProvenance(provenanceBundle) + if (!payload.subject || !payload.subject.length) { + throw new Error('No subject found in sigstore bundle payload') + } + if (payload.subject.length > 1) { + throw new Error('Found more than one subject in the sigstore bundle payload') + } + + const bundleSubject = payload.subject[0] + if (subject.name !== bundleSubject.name) { + throw new Error( + `Provenance subject ${bundleSubject.name} does not match the package: ${subject.name}` + ) + } + if (subject.digest.sha512 !== bundleSubject.digest.sha512) { + throw new Error('Provenance subject digest does not match the package') + } + + await sigstore.verify(provenanceBundle) + return provenanceBundle +} + +const extractProvenance = (bundle) => { + if (!bundle?.dsseEnvelope?.payload) { + throw new Error('No dsseEnvelope with payload found in sigstore bundle') + } + try { + return JSON.parse(Buffer.from(bundle.dsseEnvelope.payload, 'base64').toString('utf8')) + } catch (err) { + err.message = `Failed to parse payload from dsseEnvelope: ${err.message}` + throw err + } +} + +module.exports = { + generateProvenance, + verifyProvenance, +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmpublish/lib/publish.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmpublish/lib/publish.js new file mode 100644 index 0000000000000000000000000000000000000000..933e142422b6c4f7e2f93af3b93b302d79ecafe0 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmpublish/lib/publish.js @@ -0,0 +1,226 @@ +const npmFetch = require('npm-registry-fetch') +const npa = require('npm-package-arg') +const PackageJson = require('@npmcli/package-json') +const { log } = require('proc-log') +const semver = require('semver') +const { URL } = require('node:url') +const ssri = require('ssri') +const ciInfo = require('ci-info') + +const { generateProvenance, verifyProvenance } = require('./provenance') + +const TLOG_BASE_URL = 'https://search.sigstore.dev/' + +const publish = async (manifest, tarballData, opts) => { + if (manifest.private) { + throw Object.assign( + new Error(`This package has been marked as private +Remove the 'private' field from the package.json to publish it.`), + { code: 'EPRIVATE' } + ) + } + + // spec is used to pick the appropriate registry/auth combo + const spec = npa.resolve(manifest.name, manifest.version) + opts = { + access: 'public', + algorithms: ['sha512'], + defaultTag: 'latest', + ...opts, + spec, + } + + const reg = npmFetch.pickRegistry(spec, opts) + const pubManifest = await patchManifest(manifest, opts) + + // registry-frontdoor cares about the access level, + // which is only configurable for scoped packages + if (!spec.scope && opts.access === 'restricted') { + throw Object.assign( + new Error("Can't restrict access to unscoped packages."), + { code: 'EUNSCOPED' } + ) + } + + const { metadata, transparencyLogUrl } = await buildMetadata( + reg, + pubManifest, + tarballData, + spec, + opts + ) + + const res = await npmFetch(spec.escapedName, { + ...opts, + method: 'PUT', + body: metadata, + ignoreBody: true, + }) + if (transparencyLogUrl) { + res.transparencyLogUrl = transparencyLogUrl + } + return res +} + +const patchManifest = async (_manifest, opts) => { + const { npmVersion } = opts + const steps = ['fixName'] + const manifestInput = { ..._manifest, _nodeVersion: process.versions.node } + if (npmVersion != null) { + manifestInput._npmVersion = npmVersion + } + const manifest = await new PackageJson() + .fromContent(manifestInput) + .normalize({ steps }) + .then(p => p.content) + + const version = semver.clean(manifest.version) + if (!version) { + throw Object.assign( + new Error('invalid semver: ' + manifest.version), + { code: 'EBADSEMVER' } + ) + } + manifest.version = version + return manifest +} + +const buildMetadata = async (registry, manifest, tarballData, spec, opts) => { + const { access, defaultTag, algorithms, provenance, provenanceFile } = opts + const root = { + _id: manifest.name, + name: manifest.name, + description: manifest.description, + 'dist-tags': {}, + versions: {}, + access, + } + + root.versions[manifest.version] = manifest + const tag = manifest.tag || defaultTag + root['dist-tags'][tag] = manifest.version + + const tarballName = `${manifest.name}-${manifest.version}.tgz` + const provenanceBundleName = `${manifest.name}-${manifest.version}.sigstore` + const tarballURI = `${manifest.name}/-/${tarballName}` + const integrity = ssri.fromData(tarballData, { + algorithms: [...new Set(['sha1'].concat(algorithms))], + }) + + manifest._id = `${manifest.name}@${manifest.version}` + manifest.dist = { ...manifest.dist } + // Don't bother having sha1 in the actual integrity field + manifest.dist.integrity = integrity.sha512[0].toString() + // Legacy shasum support + manifest.dist.shasum = integrity.sha1[0].hexDigest() + + // NB: the CLI always fetches via HTTPS if the registry is HTTPS, + // regardless of what's here. This makes it so that installing + // from an HTTP-only mirror doesn't cause problems, though. + manifest.dist.tarball = new URL(tarballURI, registry).href + .replace(/^https:\/\//, 'http://') + + root._attachments = {} + root._attachments[tarballName] = { + content_type: 'application/octet-stream', + data: tarballData.toString('base64'), + length: tarballData.length, + } + + // Handle case where --provenance flag was set to true + let transparencyLogUrl + if (provenance === true || provenanceFile) { + let provenanceBundle + const subject = { + name: npa.toPurl(spec), + digest: { sha512: integrity.sha512[0].hexDigest() }, + } + + if (provenance === true) { + await ensureProvenanceGeneration(registry, spec, opts) + provenanceBundle = await generateProvenance([subject], opts) + + /* eslint-disable-next-line max-len */ + log.notice('publish', `Signed provenance statement with source and build information from ${ciInfo.name}`) + + const tlogEntry = provenanceBundle?.verificationMaterial?.tlogEntries[0] + /* istanbul ignore else */ + if (tlogEntry) { + transparencyLogUrl = `${TLOG_BASE_URL}?logIndex=${tlogEntry.logIndex}` + log.notice( + 'publish', + `Provenance statement published to transparency log: ${transparencyLogUrl}` + ) + } + } else { + provenanceBundle = await verifyProvenance(subject, provenanceFile) + } + + const serializedBundle = JSON.stringify(provenanceBundle) + root._attachments[provenanceBundleName] = { + content_type: provenanceBundle.mediaType, + data: serializedBundle, + length: serializedBundle.length, + } + } + + return { + metadata: root, + transparencyLogUrl, + } +} + +// Check that all the prereqs are met for provenance generation +const ensureProvenanceGeneration = async (registry, spec, opts) => { + if (ciInfo.GITHUB_ACTIONS) { + // Ensure that the GHA OIDC token is available + if (!process.env.ACTIONS_ID_TOKEN_REQUEST_URL) { + throw Object.assign( + /* eslint-disable-next-line max-len */ + new Error('Provenance generation in GitHub Actions requires "write" access to the "id-token" permission'), + { code: 'EUSAGE' } + ) + } + } else if (ciInfo.GITLAB) { + // Ensure that the Sigstore OIDC token is available + if (!process.env.SIGSTORE_ID_TOKEN) { + throw Object.assign( + /* eslint-disable-next-line max-len */ + new Error('Provenance generation in GitLab CI requires "SIGSTORE_ID_TOKEN" with "sigstore" audience to be present in "id_tokens". For more info see:\nhttps://docs.gitlab.com/ee/ci/secrets/id_token_authentication.html'), + { code: 'EUSAGE' } + ) + } + } else { + throw Object.assign( + new Error('Automatic provenance generation not supported for provider: ' + ciInfo.name), + { code: 'EUSAGE' } + ) + } + + // Some registries (e.g. GH packages) require auth to check visibility, + // and always return 404 when no auth is supplied. In this case we assume + // the package is always private and require `--access public` to publish + // with provenance. + let visibility = { public: false } + if (opts.access !== 'public') { + try { + const res = await npmFetch + .json(`/-/package/${spec.escapedName}/visibility`, { ...opts, registry }) + visibility = res + } catch (err) { + if (err.code !== 'E404') { + throw err + } + } + } + + if (!visibility.public && opts.provenance === true && opts.access !== 'public') { + throw Object.assign( + /* eslint-disable-next-line max-len */ + new Error("Can't generate provenance for new or private package, you must set `access` to public."), + { code: 'EUSAGE' } + ) + } +} + +module.exports = publish diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmpublish/lib/unpublish.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmpublish/lib/unpublish.js new file mode 100644 index 0000000000000000000000000000000000000000..983a5d2c39068ea3dce07cdb192614bfe390e38b --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmpublish/lib/unpublish.js @@ -0,0 +1,119 @@ +'use strict' + +const { URL } = require('node:url') +const npa = require('npm-package-arg') +const npmFetch = require('npm-registry-fetch') +const semver = require('semver') + +// given a tarball url and a registry url, returns just the +// relevant pathname portion of it, so that it can be handled +// elegantly by npm-registry-fetch which only expects pathnames +// and handles the registry hostname via opts +const getPathname = (tarball, registry) => { + const registryUrl = new URL(registry).pathname.slice(1) + let tarballUrl = new URL(tarball).pathname.slice(1) + + // test the tarball url to see if it starts with a possible + // pathname from the registry url, in that case strips that portion + // of it so that we only return the post-registry-url pathname + if (registryUrl) { + tarballUrl = tarballUrl.slice(registryUrl.length) + } + return tarballUrl +} + +const unpublish = async (spec, opts) => { + spec = npa(spec) + // spec is used to pick the appropriate registry/auth combo. + opts = { + force: false, + ...opts, + spec, + } + + try { + const pkgUri = spec.escapedName + const pkg = await npmFetch.json(pkgUri, { + ...opts, + query: { write: true }, + }) + + const version = spec.rawSpec + const allVersions = pkg.versions || {} + const versionData = allVersions[version] + + const rawSpecs = (!spec.rawSpec || spec.rawSpec === '*') + const onlyVersion = Object.keys(allVersions).length === 1 + const noVersions = !Object.keys(allVersions).length + + // if missing specific version, + // assumed unpublished + if (!versionData && !rawSpecs && !noVersions) { + return true + } + + // unpublish all versions of a package: + // - no specs supplied "npm unpublish foo" + // - all specs ("*") "npm unpublish foo@*" + // - there was only one version + // - has no versions field on packument + if (rawSpecs || onlyVersion || noVersions) { + await npmFetch(`${pkgUri}/-rev/${pkg._rev}`, { + ...opts, + method: 'DELETE', + ignoreBody: true, + }) + return true + } else { + const dist = allVersions[version].dist + delete allVersions[version] + + const latestVer = pkg['dist-tags'].latest + + // deleting dist tags associated to version + Object.keys(pkg['dist-tags']).forEach(tag => { + if (pkg['dist-tags'][tag] === version) { + delete pkg['dist-tags'][tag] + } + }) + + if (latestVer === version) { + pkg['dist-tags'].latest = Object.keys( + allVersions + ).sort(semver.compareLoose).pop() + } + + delete pkg._revisions + delete pkg._attachments + + // Update packument with removed versions + await npmFetch(`${pkgUri}/-rev/${pkg._rev}`, { + ...opts, + method: 'PUT', + body: pkg, + ignoreBody: true, + }) + + // Remove the tarball itself + const { _rev } = await npmFetch.json(pkgUri, { + ...opts, + query: { write: true }, + }) + const tarballUrl = getPathname(dist.tarball, opts.registry) + await npmFetch(`${tarballUrl}/-rev/${_rev}`, { + ...opts, + method: 'DELETE', + ignoreBody: true, + }) + return true + } + } catch (err) { + if (err.code !== 'E404') { + throw err + } + + return true + } +} + +module.exports = unpublish diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmsearch/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmsearch/lib/index.js new file mode 100644 index 0000000000000000000000000000000000000000..959059176f5c7ce95f5f4fdc5478c6a465583f79 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmsearch/lib/index.js @@ -0,0 +1,71 @@ +'use strict' + +const npmFetch = require('npm-registry-fetch') + +module.exports = search +function search (query, opts) { + return search.stream(query, opts).collect() +} +search.stream = searchStream +function searchStream (query, opts = {}) { + opts = { + detailed: false, + limit: 20, + from: 0, + quality: 0.65, + popularity: 0.98, + maintenance: 0.5, + ...opts.opts, // this is to support the cli's --searchopts parameter + ...opts, + } + + switch (opts.sortBy) { + case 'optimal': { + opts.quality = 0.65 + opts.popularity = 0.98 + opts.maintenance = 0.5 + break + } + case 'quality': { + opts.quality = 1 + opts.popularity = 0 + opts.maintenance = 0 + break + } + case 'popularity': { + opts.quality = 0 + opts.popularity = 1 + opts.maintenance = 0 + break + } + case 'maintenance': { + opts.quality = 0 + opts.popularity = 0 + opts.maintenance = 1 + break + } + } + return npmFetch.json.stream('/-/v1/search', 'objects.*', + { + ...opts, + query: { + text: Array.isArray(query) ? query.join(' ') : query, + size: opts.limit, + from: opts.from, + quality: opts.quality, + popularity: opts.popularity, + maintenance: opts.maintenance, + }, + mapJSON: (obj) => { + if (obj.package.date) { + obj.package.date = new Date(obj.package.date) + } + if (opts.detailed) { + return obj + } else { + return obj.package + } + }, + } + ) +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmteam/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmteam/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..209e4477f39c1a137adf3c4a6cdda70aa24003d8 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmteam/LICENSE @@ -0,0 +1,13 @@ +Copyright npm, Inc + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmteam/README.md b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmteam/README.md new file mode 100644 index 0000000000000000000000000000000000000000..f61032514bf822074b9d162c7f7e2b4499d8af67 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmteam/README.md @@ -0,0 +1,172 @@ +# libnpmteam + +[![npm version](https://img.shields.io/npm/v/libnpmteam.svg)](https://npm.im/libnpmteam) +[![license](https://img.shields.io/npm/l/libnpmteam.svg)](https://npm.im/libnpmteam) +[![CI - libnpmteam](https://github.com/npm/cli/actions/workflows/ci-libnpmteam.yml/badge.svg)](https://github.com/npm/cli/actions/workflows/ci-libnpmteam.yml) + +[`libnpmteam`](https://github.com/npm/libnpmteam) is a Node.js +library that provides programmatic access to the guts of the npm CLI's `npm +team` command and its various subcommands. + +## Table of Contents + +* [Installing](#install) +* [Example](#example) +* [API](#api) + * [team opts](#opts) + * [`create()`](#create) + * [`destroy()`](#destroy) + * [`add()`](#add) + * [`rm()`](#rm) + * [`lsTeams()`](#ls-teams) + * [`lsTeams.stream()`](#ls-teams-stream) + * [`lsUsers()`](#ls-users) + * [`lsUsers.stream()`](#ls-users-stream) + +### Install + +`$ npm install libnpmteam` + +### Example + +```javascript +const team = require('libnpmteam') + +// List all teams for the @npm org. +console.log(await team.lsTeams('npm')) +``` + +### API + +#### `opts` for `libnpmteam` commands + +`libnpmteam` uses [`npm-registry-fetch`](https://npm.im/npm-registry-fetch). +All options are passed through directly to that library, so please refer to [its +own `opts` +documentation](https://www.npmjs.com/package/npm-registry-fetch#fetch-options) +for options that can be passed in. + +A couple of options of note for those in a hurry: + +* `opts.token` - can be passed in and will be used as the authentication token for the registry. For other ways to pass in auth details, see the n-r-f docs. +* `opts.otp` - certain operations will require an OTP token to be passed in. If a `libnpmteam` command fails with `err.code === EOTP`, please retry the request with `{otp: <2fa token>}` + +#### `> team.create(team, [opts]) -> Promise` + +Creates a team named `team`. Team names use the format `@:`, with +the `@` being optional. + +Additionally, `opts.description` may be passed in to include a description. + +##### Example + +```javascript +await team.create('@npm:cli', {token: 'myregistrytoken'}) +// The @npm:cli team now exists. +``` + +#### `> team.destroy(team, [opts]) -> Promise` + +Destroys a team named `team`. Team names use the format `@:`, with +the `@` being optional. + +##### Example + +```javascript +await team.destroy('@npm:cli', {token: 'myregistrytoken'}) +// The @npm:cli team has been destroyed. +``` + +#### `> team.add(user, team, [opts]) -> Promise` + +Adds `user` to `team`. + +##### Example + +```javascript +await team.add('zkat', '@npm:cli', {token: 'myregistrytoken'}) +// @zkat now belongs to the @npm:cli team. +``` + +#### `> team.rm(user, team, [opts]) -> Promise` + +Removes `user` from `team`. + +##### Example + +```javascript +await team.rm('zkat', '@npm:cli', {token: 'myregistrytoken'}) +// @zkat is no longer part of the @npm:cli team. +``` + +#### `> team.lsTeams(scope, [opts]) -> Promise` + +Resolves to an array of team names belonging to `scope`. + +##### Example + +```javascript +await team.lsTeams('@npm', {token: 'myregistrytoken'}) +=> +[ + 'npm:cli', + 'npm:web', + 'npm:registry', + 'npm:developers' +] +``` + +#### `> team.lsTeams.stream(scope, [opts]) -> Stream` + +Returns a stream of teams belonging to `scope`. + +For a Promise-based version of these results, see [`team.lsTeams()`](#ls-teams). + +##### Example + +```javascript +for await (let team of team.lsTeams.stream('@npm', {token: 'myregistrytoken'})) { + console.log(team) +} + +// outputs +// npm:cli +// npm:web +// npm:registry +// npm:developers +``` + +#### `> team.lsUsers(team, [opts]) -> Promise` + +Resolves to an array of usernames belonging to `team`. + +For a streamed version of these results, see [`team.lsUsers.stream()`](#ls-users-stream). + +##### Example + +```javascript +await team.lsUsers('@npm:cli', {token: 'myregistrytoken'}) +=> +[ + 'iarna', + 'zkat' +] +``` + +#### `> team.lsUsers.stream(team, [opts]) -> Stream` + +Returns a stream of usernames belonging to `team`. + +For a Promise-based version of these results, see [`team.lsUsers()`](#ls-users). + +##### Example + +```javascript +for await (let user of team.lsUsers.stream('@npm:cli', {token: 'myregistrytoken'})) { + console.log(user) +} + +// outputs +// iarna +// zkat +``` diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmteam/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmteam/package.json new file mode 100644 index 0000000000000000000000000000000000000000..6f1f0661b3857d32e26fa0b372d55fdfb6bf77e0 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmteam/package.json @@ -0,0 +1,52 @@ +{ + "name": "libnpmteam", + "description": "npm Team management APIs", + "version": "8.0.2", + "author": "GitHub Inc.", + "license": "ISC", + "main": "lib/index.js", + "scripts": { + "lint": "npm run eslint", + "test": "tap", + "posttest": "npm run lint", + "postlint": "template-oss-check", + "lintfix": "npm run eslint -- --fix", + "snap": "tap", + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" + }, + "devDependencies": { + "@npmcli/eslint-config": "^5.0.1", + "@npmcli/template-oss": "4.25.1", + "nock": "^13.3.3", + "tap": "^16.3.8" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/cli.git", + "directory": "workspaces/libnpmteam" + }, + "files": [ + "bin/", + "lib/" + ], + "homepage": "https://npmjs.com/package/libnpmteam", + "dependencies": { + "aproba": "^2.0.0", + "npm-registry-fetch": "^19.0.0" + }, + "engines": { + "node": "^20.17.0 || >=22.9.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.25.1", + "content": "../../scripts/template-oss/index.js" + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmversion/lib/commit.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmversion/lib/commit.js new file mode 100644 index 0000000000000000000000000000000000000000..dec6edbec98c3dd64dff4a9765d61c4cba1464e7 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmversion/lib/commit.js @@ -0,0 +1,17 @@ +const git = require('@npmcli/git') + +module.exports = (version, opts) => { + const { commitHooks, allowSameVersion, signGitCommit, message } = opts + const args = ['commit'] + if (commitHooks === false) { + args.push('-n') + } + if (allowSameVersion) { + args.push('--allow-empty') + } + if (signGitCommit) { + args.push('-S') + } + args.push('-m') + return git.spawn([...args, message.replace(/%s/g, version)], opts) +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmversion/lib/enforce-clean.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmversion/lib/enforce-clean.js new file mode 100644 index 0000000000000000000000000000000000000000..25ebb5590e9171b1a2c0d00e62c0f4bdfdcf41b3 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmversion/lib/enforce-clean.js @@ -0,0 +1,32 @@ +const git = require('@npmcli/git') +const { log } = require('proc-log') + +// returns true if it's cool to do git stuff +// throws if it's unclean, and not forced. +module.exports = async opts => { + const { force } = opts + let hadError = false + const clean = await git.isClean(opts).catch(er => { + if (er.code === 'ENOGIT') { + log.warn( + 'version', + 'This is a Git checkout, but the git command was not found.', + 'npm could not create a Git tag for this release!' + ) + hadError = true + // how can merges be real if our git isn't real? + return true + } else { + throw er + } + }) + + if (!clean) { + if (!force) { + throw new Error('Git working directory not clean.') + } + log.warn('version', 'Git working directory not clean, proceeding forcefully.') + } + + return !hadError +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmversion/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmversion/lib/index.js new file mode 100644 index 0000000000000000000000000000000000000000..4d2fb45945a7b9ebce9b1af1268af7bcf18bfaf6 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmversion/lib/index.js @@ -0,0 +1,38 @@ +const readJson = require('./read-json.js') +const version = require('./version.js') + +module.exports = async (newversion, opts = {}) => { + const { + path = process.cwd(), + allowSameVersion = false, + tagVersionPrefix = 'v', + commitHooks = true, + gitTagVersion = true, + signGitCommit = false, + signGitTag = false, + force = false, + ignoreScripts = false, + scriptShell = undefined, + preid = null, + message = 'v%s', + } = opts + + const pkg = opts.pkg || await readJson(path + '/package.json') + + return version(newversion, { + path, + cwd: path, + allowSameVersion, + tagVersionPrefix, + commitHooks, + gitTagVersion, + signGitCommit, + signGitTag, + force, + ignoreScripts, + scriptShell, + preid, + pkg, + message, + }) +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmversion/lib/read-json.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmversion/lib/read-json.js new file mode 100644 index 0000000000000000000000000000000000000000..d5d47565a41ab9e8abe98a8fc868cc44db28cc70 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmversion/lib/read-json.js @@ -0,0 +1,6 @@ +// can't use read-package-json-fast, because we want to ensure +// that we make as few changes as possible, even for safety issues. +const { readFile } = require('node:fs/promises') +const parse = require('json-parse-even-better-errors') + +module.exports = async path => parse(await readFile(path)) diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmversion/lib/retrieve-tag.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmversion/lib/retrieve-tag.js new file mode 100644 index 0000000000000000000000000000000000000000..230b631a164738d7e5db08a31191aab7e99ac7e0 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmversion/lib/retrieve-tag.js @@ -0,0 +1,13 @@ +const { spawn } = require('@npmcli/git') +const semver = require('semver') + +module.exports = async opts => { + const tag = (await spawn( + ['describe', '--tags', '--abbrev=0', '--match=*.*.*'], + opts)).stdout.trim() + const ver = semver.coerce(tag, { loose: true, includePrerelease: true }) + if (ver) { + return ver.version + } + throw new Error(`Tag is not a valid version: ${JSON.stringify(tag)}`) +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmversion/lib/tag.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmversion/lib/tag.js new file mode 100644 index 0000000000000000000000000000000000000000..095456b20301ada7e1290813d32f415eceb86f01 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmversion/lib/tag.js @@ -0,0 +1,30 @@ +const git = require('@npmcli/git') + +module.exports = async (version, opts) => { + const { + signGitTag, + allowSameVersion, + tagVersionPrefix, + message, + } = opts + + const tag = `${tagVersionPrefix}${version}` + const flags = ['-'] + + if (signGitTag) { + flags.push('s') + } + + if (allowSameVersion) { + flags.push('f') + } + + flags.push('m') + + return git.spawn([ + 'tag', + flags.join(''), + message.replace(/%s/g, version), + tag, + ], opts) +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmversion/lib/version.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmversion/lib/version.js new file mode 100644 index 0000000000000000000000000000000000000000..bfcd8a521496d5ca954c787eda86679fe3cae7ac --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmversion/lib/version.js @@ -0,0 +1,136 @@ +// called with all the options already set to their defaults + +const retrieveTag = require('./retrieve-tag.js') +const semver = require('semver') +const enforceClean = require('./enforce-clean.js') +const writeJson = require('./write-json.js') +const readJson = require('./read-json.js') +const git = require('@npmcli/git') +const commit = require('./commit.js') +const tag = require('./tag.js') +const { log } = require('proc-log') + +const runScript = require('@npmcli/run-script') + +module.exports = async (newversion, opts) => { + const { + path, + allowSameVersion, + gitTagVersion, + ignoreScripts, + preid, + pkg, + } = opts + + const { valid, clean, inc } = semver + const current = pkg.version || '0.0.0' + const currentClean = clean(current) + + let newV + if (valid(newversion, { loose: true })) { + newV = clean(newversion, { loose: true }) + } else if (newversion === 'from-git') { + newV = await retrieveTag(opts) + } else { + newV = inc(currentClean, newversion, { loose: true }, preid) + } + + if (!newV) { + throw Object.assign(new Error('Invalid version: ' + newversion), { + current, + requested: newversion, + }) + } + + if (newV === currentClean && !allowSameVersion) { + throw Object.assign(new Error('Version not changed'), { + current, + requested: newversion, + newVersion: newV, + }) + } + + const isGitDir = newversion === 'from-git' || await git.is(opts) + + // ok! now we know the new version, and the old version is in pkg + + // - check if git dir is clean + // returns false if we should not keep doing git stuff + const doGit = gitTagVersion && isGitDir && await enforceClean(opts) + + if (!ignoreScripts) { + await runScript({ + ...opts, + pkg, + stdio: 'inherit', + event: 'preversion', + env: { + npm_old_version: current, + npm_new_version: newV, + }, + }) + } + + // - update the files + pkg.version = newV + delete pkg._id + await writeJson(`${path}/package.json`, pkg) + + // try to update shrinkwrap, but ok if this fails + const locks = [`${path}/package-lock.json`, `${path}/npm-shrinkwrap.json`] + const haveLocks = [] + for (const lock of locks) { + try { + const sw = await readJson(lock) + sw.version = newV + if (sw.packages && sw.packages['']) { + sw.packages[''].version = newV + } + await writeJson(lock, sw) + haveLocks.push(lock) + } catch { + // ignore errors + } + } + + if (!ignoreScripts) { + await runScript({ + ...opts, + pkg, + stdio: 'inherit', + event: 'version', + env: { + npm_old_version: current, + npm_new_version: newV, + }, + }) + } + + if (doGit) { + // - git add, git commit, git tag + await git.spawn(['add', `${path}/package.json`], opts) + // sometimes people .gitignore their lockfiles + for (const lock of haveLocks) { + await git.spawn(['add', lock], opts).catch(() => {}) + } + await commit(newV, opts) + await tag(newV, opts) + } else { + log.verbose('version', 'Not tagging: not in a git repo or no git cmd') + } + + if (!ignoreScripts) { + await runScript({ + ...opts, + pkg, + stdio: 'inherit', + event: 'postversion', + env: { + npm_old_version: current, + npm_new_version: newV, + }, + }) + } + + return newV +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmversion/lib/write-json.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmversion/lib/write-json.js new file mode 100644 index 0000000000000000000000000000000000000000..2f19953d75d2816ea2c92f2168eebc69e4c9ff2b --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/libnpmversion/lib/write-json.js @@ -0,0 +1,15 @@ +// write the json back, preserving the line breaks and indent +const { writeFile } = require('node:fs/promises') +const kIndent = Symbol.for('indent') +const kNewline = Symbol.for('newline') + +module.exports = async (path, pkg) => { + const { + [kIndent]: indent = 2, + [kNewline]: newline = '\n', + } = pkg + delete pkg._id + const raw = JSON.stringify(pkg, null, indent) + '\n' + const data = newline === '\n' ? raw : raw.split('\n').join(newline) + return writeFile(path, data) +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/lru-cache/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/lru-cache/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..f785757cd63f863c42f19d4f5a417141d6c4a890 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/lru-cache/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/lru-cache/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/lru-cache/package.json new file mode 100644 index 0000000000000000000000000000000000000000..24bb077d632ea67a476cf4826380c3e0d4987c8c --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/lru-cache/package.json @@ -0,0 +1,113 @@ +{ + "name": "lru-cache", + "description": "A cache object that deletes the least-recently-used items.", + "version": "11.2.2", + "author": "Isaac Z. Schlueter ", + "keywords": [ + "mru", + "lru", + "cache" + ], + "sideEffects": false, + "scripts": { + "build": "npm run prepare", + "prepare": "tshy && bash fixup.sh", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "test": "tap", + "snap": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "format": "prettier --write .", + "typedoc": "typedoc --tsconfig ./.tshy/esm.json ./src/*.ts", + "benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh", + "prebenchmark": "npm run prepare", + "benchmark": "make -C benchmark", + "preprofile": "npm run prepare", + "profile": "make -C benchmark profile" + }, + "main": "./dist/commonjs/index.js", + "types": "./dist/commonjs/index.d.ts", + "tshy": { + "exports": { + ".": "./src/index.ts", + "./min": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.min.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.min.js" + } + } + } + }, + "repository": { + "type": "git", + "url": "git://github.com/isaacs/node-lru-cache.git" + }, + "devDependencies": { + "@types/node": "^24.3.0", + "benchmark": "^2.1.4", + "esbuild": "^0.25.9", + "marked": "^4.2.12", + "mkdirp": "^3.0.1", + "prettier": "^3.6.2", + "tap": "^21.1.0", + "tshy": "^3.0.2", + "typedoc": "^0.28.12" + }, + "license": "ISC", + "files": [ + "dist" + ], + "engines": { + "node": "20 || >=22" + }, + "prettier": { + "experimentalTernaries": true, + "semi": false, + "printWidth": 70, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "tap": { + "node-arg": [ + "--expose-gc" + ], + "plugin": [ + "@tapjs/clock" + ] + }, + "exports": { + ".": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + }, + "./min": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.min.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.min.js" + } + } + }, + "type": "module", + "module": "./dist/esm/index.js" +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/make-fetch-happen/lib/cache/entry.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/make-fetch-happen/lib/cache/entry.js new file mode 100644 index 0000000000000000000000000000000000000000..bfcfacbcc95e181cb1aaa15adfa33536679a4307 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/make-fetch-happen/lib/cache/entry.js @@ -0,0 +1,471 @@ +const { Request, Response } = require('minipass-fetch') +const { Minipass } = require('minipass') +const MinipassFlush = require('minipass-flush') +const cacache = require('cacache') +const url = require('url') + +const CachingMinipassPipeline = require('../pipeline.js') +const CachePolicy = require('./policy.js') +const cacheKey = require('./key.js') +const remote = require('../remote.js') + +const hasOwnProperty = (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop) + +// allow list for request headers that will be written to the cache index +// note: we will also store any request headers +// that are named in a response's vary header +const KEEP_REQUEST_HEADERS = [ + 'accept-charset', + 'accept-encoding', + 'accept-language', + 'accept', + 'cache-control', +] + +// allow list for response headers that will be written to the cache index +// note: we must not store the real response's age header, or when we load +// a cache policy based on the metadata it will think the cached response +// is always stale +const KEEP_RESPONSE_HEADERS = [ + 'cache-control', + 'content-encoding', + 'content-language', + 'content-type', + 'date', + 'etag', + 'expires', + 'last-modified', + 'link', + 'location', + 'pragma', + 'vary', +] + +// return an object containing all metadata to be written to the index +const getMetadata = (request, response, options) => { + const metadata = { + time: Date.now(), + url: request.url, + reqHeaders: {}, + resHeaders: {}, + + // options on which we must match the request and vary the response + options: { + compress: options.compress != null ? options.compress : request.compress, + }, + } + + // only save the status if it's not a 200 or 304 + if (response.status !== 200 && response.status !== 304) { + metadata.status = response.status + } + + for (const name of KEEP_REQUEST_HEADERS) { + if (request.headers.has(name)) { + metadata.reqHeaders[name] = request.headers.get(name) + } + } + + // if the request's host header differs from the host in the url + // we need to keep it, otherwise it's just noise and we ignore it + const host = request.headers.get('host') + const parsedUrl = new url.URL(request.url) + if (host && parsedUrl.host !== host) { + metadata.reqHeaders.host = host + } + + // if the response has a vary header, make sure + // we store the relevant request headers too + if (response.headers.has('vary')) { + const vary = response.headers.get('vary') + // a vary of "*" means every header causes a different response. + // in that scenario, we do not include any additional headers + // as the freshness check will always fail anyway and we don't + // want to bloat the cache indexes + if (vary !== '*') { + // copy any other request headers that will vary the response + const varyHeaders = vary.trim().toLowerCase().split(/\s*,\s*/) + for (const name of varyHeaders) { + if (request.headers.has(name)) { + metadata.reqHeaders[name] = request.headers.get(name) + } + } + } + } + + for (const name of KEEP_RESPONSE_HEADERS) { + if (response.headers.has(name)) { + metadata.resHeaders[name] = response.headers.get(name) + } + } + + for (const name of options.cacheAdditionalHeaders) { + if (response.headers.has(name)) { + metadata.resHeaders[name] = response.headers.get(name) + } + } + + return metadata +} + +// symbols used to hide objects that may be lazily evaluated in a getter +const _request = Symbol('request') +const _response = Symbol('response') +const _policy = Symbol('policy') + +class CacheEntry { + constructor ({ entry, request, response, options }) { + if (entry) { + this.key = entry.key + this.entry = entry + // previous versions of this module didn't write an explicit timestamp in + // the metadata, so fall back to the entry's timestamp. we can't use the + // entry timestamp to determine staleness because cacache will update it + // when it verifies its data + this.entry.metadata.time = this.entry.metadata.time || this.entry.time + } else { + this.key = cacheKey(request) + } + + this.options = options + + // these properties are behind getters that lazily evaluate + this[_request] = request + this[_response] = response + this[_policy] = null + } + + // returns a CacheEntry instance that satisfies the given request + // or undefined if no existing entry satisfies + static async find (request, options) { + try { + // compacts the index and returns an array of unique entries + var matches = await cacache.index.compact(options.cachePath, cacheKey(request), (A, B) => { + const entryA = new CacheEntry({ entry: A, options }) + const entryB = new CacheEntry({ entry: B, options }) + return entryA.policy.satisfies(entryB.request) + }, { + validateEntry: (entry) => { + // clean out entries with a buggy content-encoding value + if (entry.metadata && + entry.metadata.resHeaders && + entry.metadata.resHeaders['content-encoding'] === null) { + return false + } + + // if an integrity is null, it needs to have a status specified + if (entry.integrity === null) { + return !!(entry.metadata && entry.metadata.status) + } + + return true + }, + }) + } catch (err) { + // if the compact request fails, ignore the error and return + return + } + + // a cache mode of 'reload' means to behave as though we have no cache + // on the way to the network. return undefined to allow cacheFetch to + // create a brand new request no matter what. + if (options.cache === 'reload') { + return + } + + // find the specific entry that satisfies the request + let match + for (const entry of matches) { + const _entry = new CacheEntry({ + entry, + options, + }) + + if (_entry.policy.satisfies(request)) { + match = _entry + break + } + } + + return match + } + + // if the user made a PUT/POST/PATCH then we invalidate our + // cache for the same url by deleting the index entirely + static async invalidate (request, options) { + const key = cacheKey(request) + try { + await cacache.rm.entry(options.cachePath, key, { removeFully: true }) + } catch (err) { + // ignore errors + } + } + + get request () { + if (!this[_request]) { + this[_request] = new Request(this.entry.metadata.url, { + method: 'GET', + headers: this.entry.metadata.reqHeaders, + ...this.entry.metadata.options, + }) + } + + return this[_request] + } + + get response () { + if (!this[_response]) { + this[_response] = new Response(null, { + url: this.entry.metadata.url, + counter: this.options.counter, + status: this.entry.metadata.status || 200, + headers: { + ...this.entry.metadata.resHeaders, + 'content-length': this.entry.size, + }, + }) + } + + return this[_response] + } + + get policy () { + if (!this[_policy]) { + this[_policy] = new CachePolicy({ + entry: this.entry, + request: this.request, + response: this.response, + options: this.options, + }) + } + + return this[_policy] + } + + // wraps the response in a pipeline that stores the data + // in the cache while the user consumes it + async store (status) { + // if we got a status other than 200, 301, or 308, + // or the CachePolicy forbid storage, append the + // cache status header and return it untouched + if ( + this.request.method !== 'GET' || + ![200, 301, 308].includes(this.response.status) || + !this.policy.storable() + ) { + this.response.headers.set('x-local-cache-status', 'skip') + return this.response + } + + const size = this.response.headers.get('content-length') + const cacheOpts = { + algorithms: this.options.algorithms, + metadata: getMetadata(this.request, this.response, this.options), + size, + integrity: this.options.integrity, + integrityEmitter: this.response.body.hasIntegrityEmitter && this.response.body, + } + + let body = null + // we only set a body if the status is a 200, redirects are + // stored as metadata only + if (this.response.status === 200) { + let cacheWriteResolve, cacheWriteReject + const cacheWritePromise = new Promise((resolve, reject) => { + cacheWriteResolve = resolve + cacheWriteReject = reject + }).catch((err) => { + body.emit('error', err) + }) + + body = new CachingMinipassPipeline({ events: ['integrity', 'size'] }, new MinipassFlush({ + flush () { + return cacheWritePromise + }, + })) + // this is always true since if we aren't reusing the one from the remote fetch, we + // are using the one from cacache + body.hasIntegrityEmitter = true + + const onResume = () => { + const tee = new Minipass() + const cacheStream = cacache.put.stream(this.options.cachePath, this.key, cacheOpts) + // re-emit the integrity and size events on our new response body so they can be reused + cacheStream.on('integrity', i => body.emit('integrity', i)) + cacheStream.on('size', s => body.emit('size', s)) + // stick a flag on here so downstream users will know if they can expect integrity events + tee.pipe(cacheStream) + // TODO if the cache write fails, log a warning but return the response anyway + // eslint-disable-next-line promise/catch-or-return + cacheStream.promise().then(cacheWriteResolve, cacheWriteReject) + body.unshift(tee) + body.unshift(this.response.body) + } + + body.once('resume', onResume) + body.once('end', () => body.removeListener('resume', onResume)) + } else { + await cacache.index.insert(this.options.cachePath, this.key, null, cacheOpts) + } + + // note: we do not set the x-local-cache-hash header because we do not know + // the hash value until after the write to the cache completes, which doesn't + // happen until after the response has been sent and it's too late to write + // the header anyway + this.response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) + this.response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) + this.response.headers.set('x-local-cache-mode', 'stream') + this.response.headers.set('x-local-cache-status', status) + this.response.headers.set('x-local-cache-time', new Date().toISOString()) + const newResponse = new Response(body, { + url: this.response.url, + status: this.response.status, + headers: this.response.headers, + counter: this.options.counter, + }) + return newResponse + } + + // use the cached data to create a response and return it + async respond (method, options, status) { + let response + if (method === 'HEAD' || [301, 308].includes(this.response.status)) { + // if the request is a HEAD, or the response is a redirect, + // then the metadata in the entry already includes everything + // we need to build a response + response = this.response + } else { + // we're responding with a full cached response, so create a body + // that reads from cacache and attach it to a new Response + const body = new Minipass() + const headers = { ...this.policy.responseHeaders() } + + const onResume = () => { + const cacheStream = cacache.get.stream.byDigest( + this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } + ) + cacheStream.on('error', async (err) => { + cacheStream.pause() + if (err.code === 'EINTEGRITY') { + await cacache.rm.content( + this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } + ) + } + if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') { + await CacheEntry.invalidate(this.request, this.options) + } + body.emit('error', err) + cacheStream.resume() + }) + // emit the integrity and size events based on our metadata so we're consistent + body.emit('integrity', this.entry.integrity) + body.emit('size', Number(headers['content-length'])) + cacheStream.pipe(body) + } + + body.once('resume', onResume) + body.once('end', () => body.removeListener('resume', onResume)) + response = new Response(body, { + url: this.entry.metadata.url, + counter: options.counter, + status: 200, + headers, + }) + } + + response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) + response.headers.set('x-local-cache-hash', encodeURIComponent(this.entry.integrity)) + response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) + response.headers.set('x-local-cache-mode', 'stream') + response.headers.set('x-local-cache-status', status) + response.headers.set('x-local-cache-time', new Date(this.entry.metadata.time).toUTCString()) + return response + } + + // use the provided request along with this cache entry to + // revalidate the stored response. returns a response, either + // from the cache or from the update + async revalidate (request, options) { + const revalidateRequest = new Request(request, { + headers: this.policy.revalidationHeaders(request), + }) + + try { + // NOTE: be sure to remove the headers property from the + // user supplied options, since we have already defined + // them on the new request object. if they're still in the + // options then those will overwrite the ones from the policy + var response = await remote(revalidateRequest, { + ...options, + headers: undefined, + }) + } catch (err) { + // if the network fetch fails, return the stale + // cached response unless it has a cache-control + // of 'must-revalidate' + if (!this.policy.mustRevalidate) { + return this.respond(request.method, options, 'stale') + } + + throw err + } + + if (this.policy.revalidated(revalidateRequest, response)) { + // we got a 304, write a new index to the cache and respond from cache + const metadata = getMetadata(request, response, options) + // 304 responses do not include headers that are specific to the response data + // since they do not include a body, so we copy values for headers that were + // in the old cache entry to the new one, if the new metadata does not already + // include that header + for (const name of KEEP_RESPONSE_HEADERS) { + if ( + !hasOwnProperty(metadata.resHeaders, name) && + hasOwnProperty(this.entry.metadata.resHeaders, name) + ) { + metadata.resHeaders[name] = this.entry.metadata.resHeaders[name] + } + } + + for (const name of options.cacheAdditionalHeaders) { + const inMeta = hasOwnProperty(metadata.resHeaders, name) + const inEntry = hasOwnProperty(this.entry.metadata.resHeaders, name) + const inPolicy = hasOwnProperty(this.policy.response.headers, name) + + // if the header is in the existing entry, but it is not in the metadata + // then we need to write it to the metadata as this will refresh the on-disk cache + if (!inMeta && inEntry) { + metadata.resHeaders[name] = this.entry.metadata.resHeaders[name] + } + // if the header is in the metadata, but not in the policy, then we need to set + // it in the policy so that it's included in the immediate response. future + // responses will load a new cache entry, so we don't need to change that + if (!inPolicy && inMeta) { + this.policy.response.headers[name] = metadata.resHeaders[name] + } + } + + try { + await cacache.index.insert(options.cachePath, this.key, this.entry.integrity, { + size: this.entry.size, + metadata, + }) + } catch (err) { + // if updating the cache index fails, we ignore it and + // respond anyway + } + return this.respond(request.method, options, 'revalidated') + } + + // if we got a modified response, create a new entry based on it + const newEntry = new CacheEntry({ + request, + response, + options, + }) + + // respond with the new entry while writing it to the cache + return newEntry.store('updated') + } +} + +module.exports = CacheEntry diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/make-fetch-happen/lib/cache/errors.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/make-fetch-happen/lib/cache/errors.js new file mode 100644 index 0000000000000000000000000000000000000000..67a66573bebe668050d374ec969cc217bc778d18 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/make-fetch-happen/lib/cache/errors.js @@ -0,0 +1,11 @@ +class NotCachedError extends Error { + constructor (url) { + /* eslint-disable-next-line max-len */ + super(`request to ${url} failed: cache mode is 'only-if-cached' but no cached response is available.`) + this.code = 'ENOTCACHED' + } +} + +module.exports = { + NotCachedError, +} diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/make-fetch-happen/lib/cache/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/make-fetch-happen/lib/cache/index.js new file mode 100644 index 0000000000000000000000000000000000000000..0de49d23fb9336579de2cad6c25a1591938371d2 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/make-fetch-happen/lib/cache/index.js @@ -0,0 +1,49 @@ +const { NotCachedError } = require('./errors.js') +const CacheEntry = require('./entry.js') +const remote = require('../remote.js') + +// do whatever is necessary to get a Response and return it +const cacheFetch = async (request, options) => { + // try to find a cached entry that satisfies this request + const entry = await CacheEntry.find(request, options) + if (!entry) { + // no cached result, if the cache mode is 'only-if-cached' that's a failure + if (options.cache === 'only-if-cached') { + throw new NotCachedError(request.url) + } + + // otherwise, we make a request, store it and return it + const response = await remote(request, options) + const newEntry = new CacheEntry({ request, response, options }) + return newEntry.store('miss') + } + + // we have a cached response that satisfies this request, however if the cache + // mode is 'no-cache' then we send the revalidation request no matter what + if (options.cache === 'no-cache') { + return entry.revalidate(request, options) + } + + // if the cached entry is not stale, or if the cache mode is 'force-cache' or + // 'only-if-cached' we can respond with the cached entry. set the status + // based on the result of needsRevalidation and respond + const _needsRevalidation = entry.policy.needsRevalidation(request) + if (options.cache === 'force-cache' || + options.cache === 'only-if-cached' || + !_needsRevalidation) { + return entry.respond(request.method, options, _needsRevalidation ? 'stale' : 'hit') + } + + // if we got here, the cache entry is stale so revalidate it + return entry.revalidate(request, options) +} + +cacheFetch.invalidate = async (request, options) => { + if (!options.cachePath) { + return + } + + return CacheEntry.invalidate(request, options) +} + +module.exports = cacheFetch diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/make-fetch-happen/lib/cache/key.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/make-fetch-happen/lib/cache/key.js new file mode 100644 index 0000000000000000000000000000000000000000..f7684d562b7fae326eaebd728c24a6e31ed826d6 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/make-fetch-happen/lib/cache/key.js @@ -0,0 +1,17 @@ +const { URL, format } = require('url') + +// options passed to url.format() when generating a key +const formatOptions = { + auth: false, + fragment: false, + search: true, + unicode: false, +} + +// returns a string to be used as the cache key for the Request +const cacheKey = (request) => { + const parsed = new URL(request.url) + return `make-fetch-happen:request-cache:${format(parsed, formatOptions)}` +} + +module.exports = cacheKey diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/make-fetch-happen/lib/cache/policy.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/make-fetch-happen/lib/cache/policy.js new file mode 100644 index 0000000000000000000000000000000000000000..ada3c8600dae924500f210d80332a8fd449cc9fe --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/make-fetch-happen/lib/cache/policy.js @@ -0,0 +1,161 @@ +const CacheSemantics = require('http-cache-semantics') +const Negotiator = require('negotiator') +const ssri = require('ssri') + +// options passed to http-cache-semantics constructor +const policyOptions = { + shared: false, + ignoreCargoCult: true, +} + +// a fake empty response, used when only testing the +// request for storability +const emptyResponse = { status: 200, headers: {} } + +// returns a plain object representation of the Request +const requestObject = (request) => { + const _obj = { + method: request.method, + url: request.url, + headers: {}, + compress: request.compress, + } + + request.headers.forEach((value, key) => { + _obj.headers[key] = value + }) + + return _obj +} + +// returns a plain object representation of the Response +const responseObject = (response) => { + const _obj = { + status: response.status, + headers: {}, + } + + response.headers.forEach((value, key) => { + _obj.headers[key] = value + }) + + return _obj +} + +class CachePolicy { + constructor ({ entry, request, response, options }) { + this.entry = entry + this.request = requestObject(request) + this.response = responseObject(response) + this.options = options + this.policy = new CacheSemantics(this.request, this.response, policyOptions) + + if (this.entry) { + // if we have an entry, copy the timestamp to the _responseTime + // this is necessary because the CacheSemantics constructor forces + // the value to Date.now() which means a policy created from a + // cache entry is likely to always identify itself as stale + this.policy._responseTime = this.entry.metadata.time + } + } + + // static method to quickly determine if a request alone is storable + static storable (request, options) { + // no cachePath means no caching + if (!options.cachePath) { + return false + } + + // user explicitly asked not to cache + if (options.cache === 'no-store') { + return false + } + + // we only cache GET and HEAD requests + if (!['GET', 'HEAD'].includes(request.method)) { + return false + } + + // otherwise, let http-cache-semantics make the decision + // based on the request's headers + const policy = new CacheSemantics(requestObject(request), emptyResponse, policyOptions) + return policy.storable() + } + + // returns true if the policy satisfies the request + satisfies (request) { + const _req = requestObject(request) + if (this.request.headers.host !== _req.headers.host) { + return false + } + + if (this.request.compress !== _req.compress) { + return false + } + + const negotiatorA = new Negotiator(this.request) + const negotiatorB = new Negotiator(_req) + + if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes())) { + return false + } + + if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages())) { + return false + } + + if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings())) { + return false + } + + if (this.options.integrity) { + return ssri.parse(this.options.integrity).match(this.entry.integrity) + } + + return true + } + + // returns true if the request and response allow caching + storable () { + return this.policy.storable() + } + + // NOTE: this is a hack to avoid parsing the cache-control + // header ourselves, it returns true if the response's + // cache-control contains must-revalidate + get mustRevalidate () { + return !!this.policy._rescc['must-revalidate'] + } + + // returns true if the cached response requires revalidation + // for the given request + needsRevalidation (request) { + const _req = requestObject(request) + // force method to GET because we only cache GETs + // but can serve a HEAD from a cached GET + _req.method = 'GET' + return !this.policy.satisfiesWithoutRevalidation(_req) + } + + responseHeaders () { + return this.policy.responseHeaders() + } + + // returns a new object containing the appropriate headers + // to send a revalidation request + revalidationHeaders (request) { + const _req = requestObject(request) + return this.policy.revalidationHeaders(_req) + } + + // returns true if the request/response was revalidated + // successfully. returns false if a new response was received + revalidated (request, response) { + const _req = requestObject(request) + const _res = responseObject(response) + const policy = this.policy.revalidatedPolicy(_req, _res) + return !policy.modified + } +} + +module.exports = CachePolicy diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/make-fetch-happen/lib/fetch.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/make-fetch-happen/lib/fetch.js new file mode 100644 index 0000000000000000000000000000000000000000..233ba67e1655028526d9c478530c3d92c2ed186c --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/make-fetch-happen/lib/fetch.js @@ -0,0 +1,118 @@ +'use strict' + +const { FetchError, Request, isRedirect } = require('minipass-fetch') +const url = require('url') + +const CachePolicy = require('./cache/policy.js') +const cache = require('./cache/index.js') +const remote = require('./remote.js') + +// given a Request, a Response and user options +// return true if the response is a redirect that +// can be followed. we throw errors that will result +// in the fetch being rejected if the redirect is +// possible but invalid for some reason +const canFollowRedirect = (request, response, options) => { + if (!isRedirect(response.status)) { + return false + } + + if (options.redirect === 'manual') { + return false + } + + if (options.redirect === 'error') { + throw new FetchError(`redirect mode is set to error: ${request.url}`, + 'no-redirect', { code: 'ENOREDIRECT' }) + } + + if (!response.headers.has('location')) { + throw new FetchError(`redirect location header missing for: ${request.url}`, + 'no-location', { code: 'EINVALIDREDIRECT' }) + } + + if (request.counter >= request.follow) { + throw new FetchError(`maximum redirect reached at: ${request.url}`, + 'max-redirect', { code: 'EMAXREDIRECT' }) + } + + return true +} + +// given a Request, a Response, and the user's options return an object +// with a new Request and a new options object that will be used for +// following the redirect +const getRedirect = (request, response, options) => { + const _opts = { ...options } + const location = response.headers.get('location') + const redirectUrl = new url.URL(location, /^https?:/.test(location) ? undefined : request.url) + // Comment below is used under the following license: + /** + * @license + * Copyright (c) 2010-2012 Mikeal Rogers + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an "AS + * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language + * governing permissions and limitations under the License. + */ + + // Remove authorization if changing hostnames (but not if just + // changing ports or protocols). This matches the behavior of request: + // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138 + if (new url.URL(request.url).hostname !== redirectUrl.hostname) { + request.headers.delete('authorization') + request.headers.delete('cookie') + } + + // for POST request with 301/302 response, or any request with 303 response, + // use GET when following redirect + if ( + response.status === 303 || + (request.method === 'POST' && [301, 302].includes(response.status)) + ) { + _opts.method = 'GET' + _opts.body = null + request.headers.delete('content-length') + } + + _opts.headers = {} + request.headers.forEach((value, key) => { + _opts.headers[key] = value + }) + + _opts.counter = ++request.counter + const redirectReq = new Request(url.format(redirectUrl), _opts) + return { + request: redirectReq, + options: _opts, + } +} + +const fetch = async (request, options) => { + const response = CachePolicy.storable(request, options) + ? await cache(request, options) + : await remote(request, options) + + // if the request wasn't a GET or HEAD, and the response + // status is between 200 and 399 inclusive, invalidate the + // request url + if (!['GET', 'HEAD'].includes(request.method) && + response.status >= 200 && + response.status <= 399) { + await cache.invalidate(request, options) + } + + if (!canFollowRedirect(request, response, options)) { + return response + } + + const redirect = getRedirect(request, response, options) + return fetch(redirect.request, redirect.options) +} + +module.exports = fetch diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/make-fetch-happen/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/make-fetch-happen/lib/index.js new file mode 100644 index 0000000000000000000000000000000000000000..2f12e8e1b611310d16de44b7ca06d6e8296ab5f9 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/make-fetch-happen/lib/index.js @@ -0,0 +1,41 @@ +const { FetchError, Headers, Request, Response } = require('minipass-fetch') + +const configureOptions = require('./options.js') +const fetch = require('./fetch.js') + +const makeFetchHappen = (url, opts) => { + const options = configureOptions(opts) + + const request = new Request(url, options) + return fetch(request, options) +} + +makeFetchHappen.defaults = (defaultUrl, defaultOptions = {}, wrappedFetch = makeFetchHappen) => { + if (typeof defaultUrl === 'object') { + defaultOptions = defaultUrl + defaultUrl = null + } + + const defaultedFetch = (url, options = {}) => { + const finalUrl = url || defaultUrl + const finalOptions = { + ...defaultOptions, + ...options, + headers: { + ...defaultOptions.headers, + ...options.headers, + }, + } + return wrappedFetch(finalUrl, finalOptions) + } + + defaultedFetch.defaults = (defaultUrl1, defaultOptions1 = {}) => + makeFetchHappen.defaults(defaultUrl1, defaultOptions1, defaultedFetch) + return defaultedFetch +} + +module.exports = makeFetchHappen +module.exports.FetchError = FetchError +module.exports.Headers = Headers +module.exports.Request = Request +module.exports.Response = Response diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/make-fetch-happen/lib/options.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/make-fetch-happen/lib/options.js new file mode 100644 index 0000000000000000000000000000000000000000..db51cc63248176c0deb939e052e650ab268a80ac --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/make-fetch-happen/lib/options.js @@ -0,0 +1,59 @@ +const dns = require('dns') + +const conditionalHeaders = [ + 'if-modified-since', + 'if-none-match', + 'if-unmodified-since', + 'if-match', + 'if-range', +] + +const configureOptions = (opts) => { + const { strictSSL, ...options } = { ...opts } + options.method = options.method ? options.method.toUpperCase() : 'GET' + + if (strictSSL === undefined || strictSSL === null) { + options.rejectUnauthorized = process.env.NODE_TLS_REJECT_UNAUTHORIZED !== '0' + } else { + options.rejectUnauthorized = strictSSL !== false + } + + if (!options.retry) { + options.retry = { retries: 0 } + } else if (typeof options.retry === 'string') { + const retries = parseInt(options.retry, 10) + if (isFinite(retries)) { + options.retry = { retries } + } else { + options.retry = { retries: 0 } + } + } else if (typeof options.retry === 'number') { + options.retry = { retries: options.retry } + } else { + options.retry = { retries: 0, ...options.retry } + } + + options.dns = { ttl: 5 * 60 * 1000, lookup: dns.lookup, ...options.dns } + + options.cache = options.cache || 'default' + if (options.cache === 'default') { + const hasConditionalHeader = Object.keys(options.headers || {}).some((name) => { + return conditionalHeaders.includes(name.toLowerCase()) + }) + if (hasConditionalHeader) { + options.cache = 'no-store' + } + } + + options.cacheAdditionalHeaders = options.cacheAdditionalHeaders || [] + + // cacheManager is deprecated, but if it's set and + // cachePath is not we should copy it to the new field + if (options.cacheManager && !options.cachePath) { + options.cachePath = options.cacheManager + } + + return options +} + +module.exports = configureOptions diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/make-fetch-happen/lib/pipeline.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/make-fetch-happen/lib/pipeline.js new file mode 100644 index 0000000000000000000000000000000000000000..b1d221b2d0ce311f12703adbab63c65b7bfec1ed --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/make-fetch-happen/lib/pipeline.js @@ -0,0 +1,41 @@ +'use strict' + +const MinipassPipeline = require('minipass-pipeline') + +class CachingMinipassPipeline extends MinipassPipeline { + #events = [] + #data = new Map() + + constructor (opts, ...streams) { + // CRITICAL: do NOT pass the streams to the call to super(), this will start + // the flow of data and potentially cause the events we need to catch to emit + // before we've finished our own setup. instead we call super() with no args, + // finish our setup, and then push the streams into ourselves to start the + // data flow + super() + this.#events = opts.events + + /* istanbul ignore next - coverage disabled because this is pointless to test here */ + if (streams.length) { + this.push(...streams) + } + } + + on (event, handler) { + if (this.#events.includes(event) && this.#data.has(event)) { + return handler(...this.#data.get(event)) + } + + return super.on(event, handler) + } + + emit (event, ...data) { + if (this.#events.includes(event)) { + this.#data.set(event, data) + } + + return super.emit(event, ...data) + } +} + +module.exports = CachingMinipassPipeline diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/make-fetch-happen/lib/remote.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/make-fetch-happen/lib/remote.js new file mode 100644 index 0000000000000000000000000000000000000000..1d640e5380baaf5be8f3dfdf7fa8099656ade8e4 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/make-fetch-happen/lib/remote.js @@ -0,0 +1,132 @@ +const { Minipass } = require('minipass') +const fetch = require('minipass-fetch') +const promiseRetry = require('promise-retry') +const ssri = require('ssri') +const { log } = require('proc-log') + +const CachingMinipassPipeline = require('./pipeline.js') +const { getAgent } = require('@npmcli/agent') +const pkg = require('../package.json') + +const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})` + +const RETRY_ERRORS = [ + 'ECONNRESET', // remote socket closed on us + 'ECONNREFUSED', // remote host refused to open connection + 'EADDRINUSE', // failed to bind to a local port (proxy?) + 'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW + // from @npmcli/agent + 'ECONNECTIONTIMEOUT', + 'EIDLETIMEOUT', + 'ERESPONSETIMEOUT', + 'ETRANSFERTIMEOUT', + // Known codes we do NOT retry on: + // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline) + // EINVALIDPROXY // invalid protocol from @npmcli/agent + // EINVALIDRESPONSE // invalid status code from @npmcli/agent +] + +const RETRY_TYPES = [ + 'request-timeout', +] + +// make a request directly to the remote source, +// retrying certain classes of errors as well as +// following redirects (through the cache if necessary) +// and verifying response integrity +const remoteFetch = (request, options) => { + // options.signal is intended for the fetch itself, not the agent. Attaching it to the agent will re-use that signal across multiple requests, which prevents any connections beyond the first one. + const agent = getAgent(request.url, { ...options, signal: undefined }) + if (!request.headers.has('connection')) { + request.headers.set('connection', agent ? 'keep-alive' : 'close') + } + + if (!request.headers.has('user-agent')) { + request.headers.set('user-agent', USER_AGENT) + } + + // keep our own options since we're overriding the agent + // and the redirect mode + const _opts = { + ...options, + agent, + redirect: 'manual', + } + + return promiseRetry(async (retryHandler, attemptNum) => { + const req = new fetch.Request(request, _opts) + try { + let res = await fetch(req, _opts) + if (_opts.integrity && res.status === 200) { + // we got a 200 response and the user has specified an expected + // integrity value, so wrap the response in an ssri stream to verify it + const integrityStream = ssri.integrityStream({ + algorithms: _opts.algorithms, + integrity: _opts.integrity, + size: _opts.size, + }) + const pipeline = new CachingMinipassPipeline({ + events: ['integrity', 'size'], + }, res.body, integrityStream) + // we also propagate the integrity and size events out to the pipeline so we can use + // this new response body as an integrityEmitter for cacache + integrityStream.on('integrity', i => pipeline.emit('integrity', i)) + integrityStream.on('size', s => pipeline.emit('size', s)) + res = new fetch.Response(pipeline, res) + // set an explicit flag so we know if our response body will emit integrity and size + res.body.hasIntegrityEmitter = true + } + + res.headers.set('x-fetch-attempts', attemptNum) + + // do not retry POST requests, or requests with a streaming body + // do retry requests with a 408, 420, 429 or 500+ status in the response + const isStream = Minipass.isStream(req.body) + const isRetriable = req.method !== 'POST' && + !isStream && + ([408, 420, 429].includes(res.status) || res.status >= 500) + + if (isRetriable) { + if (typeof options.onRetry === 'function') { + options.onRetry(res) + } + + /* eslint-disable-next-line max-len */ + log.http('fetch', `${req.method} ${req.url} attempt ${attemptNum} failed with ${res.status}`) + return retryHandler(res) + } + + return res + } catch (err) { + const code = (err.code === 'EPROMISERETRY') + ? err.retried.code + : err.code + + // err.retried will be the thing that was thrown from above + // if it's a response, we just got a bad status code and we + // can re-throw to allow the retry + const isRetryError = err.retried instanceof fetch.Response || + (RETRY_ERRORS.includes(code) && RETRY_TYPES.includes(err.type)) + + if (req.method === 'POST' || isRetryError) { + throw err + } + + if (typeof options.onRetry === 'function') { + options.onRetry(err) + } + + log.http('fetch', `${req.method} ${req.url} attempt ${attemptNum} failed with ${err.code}`) + return retryHandler(err) + } + }, options.retry).catch((err) => { + // don't reject for http errors, just return them + if (err.status >= 400 && err.type !== 'system') { + return err + } + + throw err + }) +} + +module.exports = remoteFetch diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js new file mode 100644 index 0000000000000000000000000000000000000000..5fc86bbd0116c9b0e2579cb27de4ad53af938fc6 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.assertValidPattern = void 0; +const MAX_PATTERN_LENGTH = 1024 * 64; +const assertValidPattern = (pattern) => { + if (typeof pattern !== 'string') { + throw new TypeError('invalid pattern'); + } + if (pattern.length > MAX_PATTERN_LENGTH) { + throw new TypeError('pattern is too long'); + } +}; +exports.assertValidPattern = assertValidPattern; +//# sourceMappingURL=assert-valid-pattern.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/commonjs/ast.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/commonjs/ast.js new file mode 100644 index 0000000000000000000000000000000000000000..9e1f9e765c597eadf2e7d254264d9e8d634f4acb --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/commonjs/ast.js @@ -0,0 +1,592 @@ +"use strict"; +// parse a single path portion +Object.defineProperty(exports, "__esModule", { value: true }); +exports.AST = void 0; +const brace_expressions_js_1 = require("./brace-expressions.js"); +const unescape_js_1 = require("./unescape.js"); +const types = new Set(['!', '?', '+', '*', '@']); +const isExtglobType = (c) => types.has(c); +// Patterns that get prepended to bind to the start of either the +// entire string, or just a single path portion, to prevent dots +// and/or traversal patterns, when needed. +// Exts don't need the ^ or / bit, because the root binds that already. +const startNoTraversal = '(?!(?:^|/)\\.\\.?(?:$|/))'; +const startNoDot = '(?!\\.)'; +// characters that indicate a start of pattern needs the "no dots" bit, +// because a dot *might* be matched. ( is not in the list, because in +// the case of a child extglob, it will handle the prevention itself. +const addPatternStart = new Set(['[', '.']); +// cases where traversal is A-OK, no dot prevention needed +const justDots = new Set(['..', '.']); +const reSpecials = new Set('().*{}+?[]^$\\!'); +const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&'); +// any single thing other than / +const qmark = '[^/]'; +// * => any number of characters +const star = qmark + '*?'; +// use + when we need to ensure that *something* matches, because the * is +// the only thing in the path portion. +const starNoEmpty = qmark + '+?'; +// remove the \ chars that we added if we end up doing a nonmagic compare +// const deslash = (s: string) => s.replace(/\\(.)/g, '$1') +class AST { + type; + #root; + #hasMagic; + #uflag = false; + #parts = []; + #parent; + #parentIndex; + #negs; + #filledNegs = false; + #options; + #toString; + // set to true if it's an extglob with no children + // (which really means one child of '') + #emptyExt = false; + constructor(type, parent, options = {}) { + this.type = type; + // extglobs are inherently magical + if (type) + this.#hasMagic = true; + this.#parent = parent; + this.#root = this.#parent ? this.#parent.#root : this; + this.#options = this.#root === this ? options : this.#root.#options; + this.#negs = this.#root === this ? [] : this.#root.#negs; + if (type === '!' && !this.#root.#filledNegs) + this.#negs.push(this); + this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0; + } + get hasMagic() { + /* c8 ignore start */ + if (this.#hasMagic !== undefined) + return this.#hasMagic; + /* c8 ignore stop */ + for (const p of this.#parts) { + if (typeof p === 'string') + continue; + if (p.type || p.hasMagic) + return (this.#hasMagic = true); + } + // note: will be undefined until we generate the regexp src and find out + return this.#hasMagic; + } + // reconstructs the pattern + toString() { + if (this.#toString !== undefined) + return this.#toString; + if (!this.type) { + return (this.#toString = this.#parts.map(p => String(p)).join('')); + } + else { + return (this.#toString = + this.type + '(' + this.#parts.map(p => String(p)).join('|') + ')'); + } + } + #fillNegs() { + /* c8 ignore start */ + if (this !== this.#root) + throw new Error('should only call on root'); + if (this.#filledNegs) + return this; + /* c8 ignore stop */ + // call toString() once to fill this out + this.toString(); + this.#filledNegs = true; + let n; + while ((n = this.#negs.pop())) { + if (n.type !== '!') + continue; + // walk up the tree, appending everthing that comes AFTER parentIndex + let p = n; + let pp = p.#parent; + while (pp) { + for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) { + for (const part of n.#parts) { + /* c8 ignore start */ + if (typeof part === 'string') { + throw new Error('string part in extglob AST??'); + } + /* c8 ignore stop */ + part.copyIn(pp.#parts[i]); + } + } + p = pp; + pp = p.#parent; + } + } + return this; + } + push(...parts) { + for (const p of parts) { + if (p === '') + continue; + /* c8 ignore start */ + if (typeof p !== 'string' && !(p instanceof AST && p.#parent === this)) { + throw new Error('invalid part: ' + p); + } + /* c8 ignore stop */ + this.#parts.push(p); + } + } + toJSON() { + const ret = this.type === null + ? this.#parts.slice().map(p => (typeof p === 'string' ? p : p.toJSON())) + : [this.type, ...this.#parts.map(p => p.toJSON())]; + if (this.isStart() && !this.type) + ret.unshift([]); + if (this.isEnd() && + (this === this.#root || + (this.#root.#filledNegs && this.#parent?.type === '!'))) { + ret.push({}); + } + return ret; + } + isStart() { + if (this.#root === this) + return true; + // if (this.type) return !!this.#parent?.isStart() + if (!this.#parent?.isStart()) + return false; + if (this.#parentIndex === 0) + return true; + // if everything AHEAD of this is a negation, then it's still the "start" + const p = this.#parent; + for (let i = 0; i < this.#parentIndex; i++) { + const pp = p.#parts[i]; + if (!(pp instanceof AST && pp.type === '!')) { + return false; + } + } + return true; + } + isEnd() { + if (this.#root === this) + return true; + if (this.#parent?.type === '!') + return true; + if (!this.#parent?.isEnd()) + return false; + if (!this.type) + return this.#parent?.isEnd(); + // if not root, it'll always have a parent + /* c8 ignore start */ + const pl = this.#parent ? this.#parent.#parts.length : 0; + /* c8 ignore stop */ + return this.#parentIndex === pl - 1; + } + copyIn(part) { + if (typeof part === 'string') + this.push(part); + else + this.push(part.clone(this)); + } + clone(parent) { + const c = new AST(this.type, parent); + for (const p of this.#parts) { + c.copyIn(p); + } + return c; + } + static #parseAST(str, ast, pos, opt) { + let escaping = false; + let inBrace = false; + let braceStart = -1; + let braceNeg = false; + if (ast.type === null) { + // outside of a extglob, append until we find a start + let i = pos; + let acc = ''; + while (i < str.length) { + const c = str.charAt(i++); + // still accumulate escapes at this point, but we do ignore + // starts that are escaped + if (escaping || c === '\\') { + escaping = !escaping; + acc += c; + continue; + } + if (inBrace) { + if (i === braceStart + 1) { + if (c === '^' || c === '!') { + braceNeg = true; + } + } + else if (c === ']' && !(i === braceStart + 2 && braceNeg)) { + inBrace = false; + } + acc += c; + continue; + } + else if (c === '[') { + inBrace = true; + braceStart = i; + braceNeg = false; + acc += c; + continue; + } + if (!opt.noext && isExtglobType(c) && str.charAt(i) === '(') { + ast.push(acc); + acc = ''; + const ext = new AST(c, ast); + i = AST.#parseAST(str, ext, i, opt); + ast.push(ext); + continue; + } + acc += c; + } + ast.push(acc); + return i; + } + // some kind of extglob, pos is at the ( + // find the next | or ) + let i = pos + 1; + let part = new AST(null, ast); + const parts = []; + let acc = ''; + while (i < str.length) { + const c = str.charAt(i++); + // still accumulate escapes at this point, but we do ignore + // starts that are escaped + if (escaping || c === '\\') { + escaping = !escaping; + acc += c; + continue; + } + if (inBrace) { + if (i === braceStart + 1) { + if (c === '^' || c === '!') { + braceNeg = true; + } + } + else if (c === ']' && !(i === braceStart + 2 && braceNeg)) { + inBrace = false; + } + acc += c; + continue; + } + else if (c === '[') { + inBrace = true; + braceStart = i; + braceNeg = false; + acc += c; + continue; + } + if (isExtglobType(c) && str.charAt(i) === '(') { + part.push(acc); + acc = ''; + const ext = new AST(c, part); + part.push(ext); + i = AST.#parseAST(str, ext, i, opt); + continue; + } + if (c === '|') { + part.push(acc); + acc = ''; + parts.push(part); + part = new AST(null, ast); + continue; + } + if (c === ')') { + if (acc === '' && ast.#parts.length === 0) { + ast.#emptyExt = true; + } + part.push(acc); + acc = ''; + ast.push(...parts, part); + return i; + } + acc += c; + } + // unfinished extglob + // if we got here, it was a malformed extglob! not an extglob, but + // maybe something else in there. + ast.type = null; + ast.#hasMagic = undefined; + ast.#parts = [str.substring(pos - 1)]; + return i; + } + static fromGlob(pattern, options = {}) { + const ast = new AST(null, undefined, options); + AST.#parseAST(pattern, ast, 0, options); + return ast; + } + // returns the regular expression if there's magic, or the unescaped + // string if not. + toMMPattern() { + // should only be called on root + /* c8 ignore start */ + if (this !== this.#root) + return this.#root.toMMPattern(); + /* c8 ignore stop */ + const glob = this.toString(); + const [re, body, hasMagic, uflag] = this.toRegExpSource(); + // if we're in nocase mode, and not nocaseMagicOnly, then we do + // still need a regular expression if we have to case-insensitively + // match capital/lowercase characters. + const anyMagic = hasMagic || + this.#hasMagic || + (this.#options.nocase && + !this.#options.nocaseMagicOnly && + glob.toUpperCase() !== glob.toLowerCase()); + if (!anyMagic) { + return body; + } + const flags = (this.#options.nocase ? 'i' : '') + (uflag ? 'u' : ''); + return Object.assign(new RegExp(`^${re}$`, flags), { + _src: re, + _glob: glob, + }); + } + get options() { + return this.#options; + } + // returns the string match, the regexp source, whether there's magic + // in the regexp (so a regular expression is required) and whether or + // not the uflag is needed for the regular expression (for posix classes) + // TODO: instead of injecting the start/end at this point, just return + // the BODY of the regexp, along with the start/end portions suitable + // for binding the start/end in either a joined full-path makeRe context + // (where we bind to (^|/), or a standalone matchPart context (where + // we bind to ^, and not /). Otherwise slashes get duped! + // + // In part-matching mode, the start is: + // - if not isStart: nothing + // - if traversal possible, but not allowed: ^(?!\.\.?$) + // - if dots allowed or not possible: ^ + // - if dots possible and not allowed: ^(?!\.) + // end is: + // - if not isEnd(): nothing + // - else: $ + // + // In full-path matching mode, we put the slash at the START of the + // pattern, so start is: + // - if first pattern: same as part-matching mode + // - if not isStart(): nothing + // - if traversal possible, but not allowed: /(?!\.\.?(?:$|/)) + // - if dots allowed or not possible: / + // - if dots possible and not allowed: /(?!\.) + // end is: + // - if last pattern, same as part-matching mode + // - else nothing + // + // Always put the (?:$|/) on negated tails, though, because that has to be + // there to bind the end of the negated pattern portion, and it's easier to + // just stick it in now rather than try to inject it later in the middle of + // the pattern. + // + // We can just always return the same end, and leave it up to the caller + // to know whether it's going to be used joined or in parts. + // And, if the start is adjusted slightly, can do the same there: + // - if not isStart: nothing + // - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$) + // - if dots allowed or not possible: (?:/|^) + // - if dots possible and not allowed: (?:/|^)(?!\.) + // + // But it's better to have a simpler binding without a conditional, for + // performance, so probably better to return both start options. + // + // Then the caller just ignores the end if it's not the first pattern, + // and the start always gets applied. + // + // But that's always going to be $ if it's the ending pattern, or nothing, + // so the caller can just attach $ at the end of the pattern when building. + // + // So the todo is: + // - better detect what kind of start is needed + // - return both flavors of starting pattern + // - attach $ at the end of the pattern when creating the actual RegExp + // + // Ah, but wait, no, that all only applies to the root when the first pattern + // is not an extglob. If the first pattern IS an extglob, then we need all + // that dot prevention biz to live in the extglob portions, because eg + // +(*|.x*) can match .xy but not .yx. + // + // So, return the two flavors if it's #root and the first child is not an + // AST, otherwise leave it to the child AST to handle it, and there, + // use the (?:^|/) style of start binding. + // + // Even simplified further: + // - Since the start for a join is eg /(?!\.) and the start for a part + // is ^(?!\.), we can just prepend (?!\.) to the pattern (either root + // or start or whatever) and prepend ^ or / at the Regexp construction. + toRegExpSource(allowDot) { + const dot = allowDot ?? !!this.#options.dot; + if (this.#root === this) + this.#fillNegs(); + if (!this.type) { + const noEmpty = this.isStart() && this.isEnd(); + const src = this.#parts + .map(p => { + const [re, _, hasMagic, uflag] = typeof p === 'string' + ? AST.#parseGlob(p, this.#hasMagic, noEmpty) + : p.toRegExpSource(allowDot); + this.#hasMagic = this.#hasMagic || hasMagic; + this.#uflag = this.#uflag || uflag; + return re; + }) + .join(''); + let start = ''; + if (this.isStart()) { + if (typeof this.#parts[0] === 'string') { + // this is the string that will match the start of the pattern, + // so we need to protect against dots and such. + // '.' and '..' cannot match unless the pattern is that exactly, + // even if it starts with . or dot:true is set. + const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]); + if (!dotTravAllowed) { + const aps = addPatternStart; + // check if we have a possibility of matching . or .., + // and prevent that. + const needNoTrav = + // dots are allowed, and the pattern starts with [ or . + (dot && aps.has(src.charAt(0))) || + // the pattern starts with \., and then [ or . + (src.startsWith('\\.') && aps.has(src.charAt(2))) || + // the pattern starts with \.\., and then [ or . + (src.startsWith('\\.\\.') && aps.has(src.charAt(4))); + // no need to prevent dots if it can't match a dot, or if a + // sub-pattern will be preventing it anyway. + const needNoDot = !dot && !allowDot && aps.has(src.charAt(0)); + start = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : ''; + } + } + } + // append the "end of path portion" pattern to negation tails + let end = ''; + if (this.isEnd() && + this.#root.#filledNegs && + this.#parent?.type === '!') { + end = '(?:$|\\/)'; + } + const final = start + src + end; + return [ + final, + (0, unescape_js_1.unescape)(src), + (this.#hasMagic = !!this.#hasMagic), + this.#uflag, + ]; + } + // We need to calculate the body *twice* if it's a repeat pattern + // at the start, once in nodot mode, then again in dot mode, so a + // pattern like *(?) can match 'x.y' + const repeated = this.type === '*' || this.type === '+'; + // some kind of extglob + const start = this.type === '!' ? '(?:(?!(?:' : '(?:'; + let body = this.#partsToRegExp(dot); + if (this.isStart() && this.isEnd() && !body && this.type !== '!') { + // invalid extglob, has to at least be *something* present, if it's + // the entire path portion. + const s = this.toString(); + this.#parts = [s]; + this.type = null; + this.#hasMagic = undefined; + return [s, (0, unescape_js_1.unescape)(this.toString()), false, false]; + } + // XXX abstract out this map method + let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot + ? '' + : this.#partsToRegExp(true); + if (bodyDotAllowed === body) { + bodyDotAllowed = ''; + } + if (bodyDotAllowed) { + body = `(?:${body})(?:${bodyDotAllowed})*?`; + } + // an empty !() is exactly equivalent to a starNoEmpty + let final = ''; + if (this.type === '!' && this.#emptyExt) { + final = (this.isStart() && !dot ? startNoDot : '') + starNoEmpty; + } + else { + const close = this.type === '!' + ? // !() must match something,but !(x) can match '' + '))' + + (this.isStart() && !dot && !allowDot ? startNoDot : '') + + star + + ')' + : this.type === '@' + ? ')' + : this.type === '?' + ? ')?' + : this.type === '+' && bodyDotAllowed + ? ')' + : this.type === '*' && bodyDotAllowed + ? `)?` + : `)${this.type}`; + final = start + body + close; + } + return [ + final, + (0, unescape_js_1.unescape)(body), + (this.#hasMagic = !!this.#hasMagic), + this.#uflag, + ]; + } + #partsToRegExp(dot) { + return this.#parts + .map(p => { + // extglob ASTs should only contain parent ASTs + /* c8 ignore start */ + if (typeof p === 'string') { + throw new Error('string type in extglob ast??'); + } + /* c8 ignore stop */ + // can ignore hasMagic, because extglobs are already always magic + const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot); + this.#uflag = this.#uflag || uflag; + return re; + }) + .filter(p => !(this.isStart() && this.isEnd()) || !!p) + .join('|'); + } + static #parseGlob(glob, hasMagic, noEmpty = false) { + let escaping = false; + let re = ''; + let uflag = false; + for (let i = 0; i < glob.length; i++) { + const c = glob.charAt(i); + if (escaping) { + escaping = false; + re += (reSpecials.has(c) ? '\\' : '') + c; + continue; + } + if (c === '\\') { + if (i === glob.length - 1) { + re += '\\\\'; + } + else { + escaping = true; + } + continue; + } + if (c === '[') { + const [src, needUflag, consumed, magic] = (0, brace_expressions_js_1.parseClass)(glob, i); + if (consumed) { + re += src; + uflag = uflag || needUflag; + i += consumed - 1; + hasMagic = hasMagic || magic; + continue; + } + } + if (c === '*') { + if (noEmpty && glob === '*') + re += starNoEmpty; + else + re += star; + hasMagic = true; + continue; + } + if (c === '?') { + re += qmark; + hasMagic = true; + continue; + } + re += regExpEscape(c); + } + return [re, (0, unescape_js_1.unescape)(glob), !!hasMagic, uflag]; + } +} +exports.AST = AST; +//# sourceMappingURL=ast.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/commonjs/brace-expressions.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/commonjs/brace-expressions.js new file mode 100644 index 0000000000000000000000000000000000000000..0e13eefc4cfee2b39459025da2a5f53ba1450dd2 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/commonjs/brace-expressions.js @@ -0,0 +1,152 @@ +"use strict"; +// translate the various posix character classes into unicode properties +// this works across all unicode locales +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseClass = void 0; +// { : [, /u flag required, negated] +const posixClasses = { + '[:alnum:]': ['\\p{L}\\p{Nl}\\p{Nd}', true], + '[:alpha:]': ['\\p{L}\\p{Nl}', true], + '[:ascii:]': ['\\x' + '00-\\x' + '7f', false], + '[:blank:]': ['\\p{Zs}\\t', true], + '[:cntrl:]': ['\\p{Cc}', true], + '[:digit:]': ['\\p{Nd}', true], + '[:graph:]': ['\\p{Z}\\p{C}', true, true], + '[:lower:]': ['\\p{Ll}', true], + '[:print:]': ['\\p{C}', true], + '[:punct:]': ['\\p{P}', true], + '[:space:]': ['\\p{Z}\\t\\r\\n\\v\\f', true], + '[:upper:]': ['\\p{Lu}', true], + '[:word:]': ['\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}', true], + '[:xdigit:]': ['A-Fa-f0-9', false], +}; +// only need to escape a few things inside of brace expressions +// escapes: [ \ ] - +const braceEscape = (s) => s.replace(/[[\]\\-]/g, '\\$&'); +// escape all regexp magic characters +const regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&'); +// everything has already been escaped, we just have to join +const rangesToString = (ranges) => ranges.join(''); +// takes a glob string at a posix brace expression, and returns +// an equivalent regular expression source, and boolean indicating +// whether the /u flag needs to be applied, and the number of chars +// consumed to parse the character class. +// This also removes out of order ranges, and returns ($.) if the +// entire class just no good. +const parseClass = (glob, position) => { + const pos = position; + /* c8 ignore start */ + if (glob.charAt(pos) !== '[') { + throw new Error('not in a brace expression'); + } + /* c8 ignore stop */ + const ranges = []; + const negs = []; + let i = pos + 1; + let sawStart = false; + let uflag = false; + let escaping = false; + let negate = false; + let endPos = pos; + let rangeStart = ''; + WHILE: while (i < glob.length) { + const c = glob.charAt(i); + if ((c === '!' || c === '^') && i === pos + 1) { + negate = true; + i++; + continue; + } + if (c === ']' && sawStart && !escaping) { + endPos = i + 1; + break; + } + sawStart = true; + if (c === '\\') { + if (!escaping) { + escaping = true; + i++; + continue; + } + // escaped \ char, fall through and treat like normal char + } + if (c === '[' && !escaping) { + // either a posix class, a collation equivalent, or just a [ + for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) { + if (glob.startsWith(cls, i)) { + // invalid, [a-[] is fine, but not [a-[:alpha]] + if (rangeStart) { + return ['$.', false, glob.length - pos, true]; + } + i += cls.length; + if (neg) + negs.push(unip); + else + ranges.push(unip); + uflag = uflag || u; + continue WHILE; + } + } + } + // now it's just a normal character, effectively + escaping = false; + if (rangeStart) { + // throw this range away if it's not valid, but others + // can still match. + if (c > rangeStart) { + ranges.push(braceEscape(rangeStart) + '-' + braceEscape(c)); + } + else if (c === rangeStart) { + ranges.push(braceEscape(c)); + } + rangeStart = ''; + i++; + continue; + } + // now might be the start of a range. + // can be either c-d or c-] or c] or c] at this point + if (glob.startsWith('-]', i + 1)) { + ranges.push(braceEscape(c + '-')); + i += 2; + continue; + } + if (glob.startsWith('-', i + 1)) { + rangeStart = c; + i += 2; + continue; + } + // not the start of a range, just a single character + ranges.push(braceEscape(c)); + i++; + } + if (endPos < i) { + // didn't see the end of the class, not a valid class, + // but might still be valid as a literal match. + return ['', false, 0, false]; + } + // if we got no ranges and no negates, then we have a range that + // cannot possibly match anything, and that poisons the whole glob + if (!ranges.length && !negs.length) { + return ['$.', false, glob.length - pos, true]; + } + // if we got one positive range, and it's a single character, then that's + // not actually a magic pattern, it's just that one literal character. + // we should not treat that as "magic", we should just return the literal + // character. [_] is a perfectly valid way to escape glob magic chars. + if (negs.length === 0 && + ranges.length === 1 && + /^\\?.$/.test(ranges[0]) && + !negate) { + const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0]; + return [regexpEscape(r), false, endPos - pos, false]; + } + const sranges = '[' + (negate ? '^' : '') + rangesToString(ranges) + ']'; + const snegs = '[' + (negate ? '' : '^') + rangesToString(negs) + ']'; + const comb = ranges.length && negs.length + ? '(' + sranges + '|' + snegs + ')' + : ranges.length + ? sranges + : snegs; + return [comb, uflag, endPos - pos, true]; +}; +exports.parseClass = parseClass; +//# sourceMappingURL=brace-expressions.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/commonjs/escape.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/commonjs/escape.js new file mode 100644 index 0000000000000000000000000000000000000000..02a4f8a8e0a588c2b99ea16c14d4ce77786adeb7 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/commonjs/escape.js @@ -0,0 +1,22 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.escape = void 0; +/** + * Escape all magic characters in a glob pattern. + * + * If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape} + * option is used, then characters are escaped by wrapping in `[]`, because + * a magic character wrapped in a character class can only be satisfied by + * that exact character. In this mode, `\` is _not_ escaped, because it is + * not interpreted as a magic character, but instead as a path separator. + */ +const escape = (s, { windowsPathsNoEscape = false, } = {}) => { + // don't need to escape +@! because we escape the parens + // that make those magic, and escaping ! as [!] isn't valid, + // because [!]] is a valid glob class meaning not ']'. + return windowsPathsNoEscape + ? s.replace(/[?*()[\]]/g, '[$&]') + : s.replace(/[?*()[\]\\]/g, '\\$&'); +}; +exports.escape = escape; +//# sourceMappingURL=escape.js.map \ No newline at end of file diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/commonjs/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/commonjs/index.js new file mode 100644 index 0000000000000000000000000000000000000000..f58fb8616aa9abd104a4fb7696d6ba01cd1006f1 --- /dev/null +++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/commonjs/index.js @@ -0,0 +1,1014 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.unescape = exports.escape = exports.AST = exports.Minimatch = exports.match = exports.makeRe = exports.braceExpand = exports.defaults = exports.filter = exports.GLOBSTAR = exports.sep = exports.minimatch = void 0; +const brace_expansion_1 = require("@isaacs/brace-expansion"); +const assert_valid_pattern_js_1 = require("./assert-valid-pattern.js"); +const ast_js_1 = require("./ast.js"); +const escape_js_1 = require("./escape.js"); +const unescape_js_1 = require("./unescape.js"); +const minimatch = (p, pattern, options = {}) => { + (0, assert_valid_pattern_js_1.assertValidPattern)(pattern); + // shortcut: comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + return false; + } + return new Minimatch(pattern, options).match(p); +}; +exports.minimatch = minimatch; +// Optimized checking for the most common glob patterns. +const starDotExtRE = /^\*+([^+@!?\*\[\(]*)$/; +const starDotExtTest = (ext) => (f) => !f.startsWith('.') && f.endsWith(ext); +const starDotExtTestDot = (ext) => (f) => f.endsWith(ext); +const starDotExtTestNocase = (ext) => { + ext = ext.toLowerCase(); + return (f) => !f.startsWith('.') && f.toLowerCase().endsWith(ext); +}; +const starDotExtTestNocaseDot = (ext) => { + ext = ext.toLowerCase(); + return (f) => f.toLowerCase().endsWith(ext); +}; +const starDotStarRE = /^\*+\.\*+$/; +const starDotStarTest = (f) => !f.startsWith('.') && f.includes('.'); +const starDotStarTestDot = (f) => f !== '.' && f !== '..' && f.includes('.'); +const dotStarRE = /^\.\*+$/; +const dotStarTest = (f) => f !== '.' && f !== '..' && f.startsWith('.'); +const starRE = /^\*+$/; +const starTest = (f) => f.length !== 0 && !f.startsWith('.'); +const starTestDot = (f) => f.length !== 0 && f !== '.' && f !== '..'; +const qmarksRE = /^\?+([^+@!?\*\[\(]*)?$/; +const qmarksTestNocase = ([$0, ext = '']) => { + const noext = qmarksTestNoExt([$0]); + if (!ext) + return noext; + ext = ext.toLowerCase(); + return (f) => noext(f) && f.toLowerCase().endsWith(ext); +}; +const qmarksTestNocaseDot = ([$0, ext = '']) => { + const noext = qmarksTestNoExtDot([$0]); + if (!ext) + return noext; + ext = ext.toLowerCase(); + return (f) => noext(f) && f.toLowerCase().endsWith(ext); +}; +const qmarksTestDot = ([$0, ext = '']) => { + const noext = qmarksTestNoExtDot([$0]); + return !ext ? noext : (f) => noext(f) && f.endsWith(ext); +}; +const qmarksTest = ([$0, ext = '']) => { + const noext = qmarksTestNoExt([$0]); + return !ext ? noext : (f) => noext(f) && f.endsWith(ext); +}; +const qmarksTestNoExt = ([$0]) => { + const len = $0.length; + return (f) => f.length === len && !f.startsWith('.'); +}; +const qmarksTestNoExtDot = ([$0]) => { + const len = $0.length; + return (f) => f.length === len && f !== '.' && f !== '..'; +}; +/* c8 ignore start */ +const defaultPlatform = (typeof process === 'object' && process + ? (typeof process.env === 'object' && + process.env && + process.env.__MINIMATCH_TESTING_PLATFORM__) || + process.platform + : 'posix'); +const path = { + win32: { sep: '\\' }, + posix: { sep: '/' }, +}; +/* c8 ignore stop */ +exports.sep = defaultPlatform === 'win32' ? path.win32.sep : path.posix.sep; +exports.minimatch.sep = exports.sep; +exports.GLOBSTAR = Symbol('globstar **'); +exports.minimatch.GLOBSTAR = exports.GLOBSTAR; +// any single thing other than / +// don't need to escape / when using new RegExp() +const qmark = '[^/]'; +// * => any number of characters +const star = qmark + '*?'; +// ** when dots are allowed. Anything goes, except .. and . +// not (^ or / followed by one or two dots followed by $ or /), +// followed by anything, any number of times. +const twoStarDot = '(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?'; +// not a ^ or / followed by a dot, +// followed by anything, any number of times. +const twoStarNoDot = '(?:(?!(?:\\/|^)\\.).)*?'; +const filter = (pattern, options = {}) => (p) => (0, exports.minimatch)(p, pattern, options); +exports.filter = filter; +exports.minimatch.filter = exports.filter; +const ext = (a, b = {}) => Object.assign({}, a, b); +const defaults = (def) => { + if (!def || typeof def !== 'object' || !Object.keys(def).length) { + return exports.minimatch; + } + const orig = exports.minimatch; + const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options)); + return Object.assign(m, { + Minimatch: class Minimatch extends orig.Minimatch { + constructor(pattern, options = {}) { + super(pattern, ext(def, options)); + } + static defaults(options) { + return orig.defaults(ext(def, options)).Minimatch; + } + }, + AST: class AST extends orig.AST { + /* c8 ignore start */ + constructor(type, parent, options = {}) { + super(type, parent, ext(def, options)); + } + /* c8 ignore stop */ + static fromGlob(pattern, options = {}) { + return orig.AST.fromGlob(pattern, ext(def, options)); + } + }, + unescape: (s, options = {}) => orig.unescape(s, ext(def, options)), + escape: (s, options = {}) => orig.escape(s, ext(def, options)), + filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)), + defaults: (options) => orig.defaults(ext(def, options)), + makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)), + braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)), + match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)), + sep: orig.sep, + GLOBSTAR: exports.GLOBSTAR, + }); +}; +exports.defaults = defaults; +exports.minimatch.defaults = exports.defaults; +// Brace expansion: +// a{b,c}d -> abd acd +// a{b,}c -> abc ac +// a{0..3}d -> a0d a1d a2d a3d +// a{b,c{d,e}f}g -> abg acdfg acefg +// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg +// +// Invalid sets are not expanded. +// a{2..}b -> a{2..}b +// a{b}c -> a{b}c +const braceExpand = (pattern, options = {}) => { + (0, assert_valid_pattern_js_1.assertValidPattern)(pattern); + // Thanks to Yeting Li for + // improving this regexp to avoid a ReDOS vulnerability. + if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) { + // shortcut. no need to expand. + return [pattern]; + } + return (0, brace_expansion_1.expand)(pattern); +}; +exports.braceExpand = braceExpand; +exports.minimatch.braceExpand = exports.braceExpand; +// parse a component of the expanded set. +// At this point, no pattern may contain "/" in it +// so we're going to return a 2d array, where each entry is the full +// pattern, split on '/', and then turned into a regular expression. +// A regexp is made at the end which joins each array with an +// escaped /, and another full one which joins each regexp with |. +// +// Following the lead of Bash 4.1, note that "**" only has special meaning +// when it is the *only* thing in a path portion. Otherwise, any series +// of * is equivalent to a single *. Globstar behavior is enabled by +// default, and can be disabled by setting options.noglobstar. +const makeRe = (pattern, options = {}) => new Minimatch(pattern, options).makeRe(); +exports.makeRe = makeRe; +exports.minimatch.makeRe = exports.makeRe; +const match = (list, pattern, options = {}) => { + const mm = new Minimatch(pattern, options); + list = list.filter(f => mm.match(f)); + if (mm.options.nonull && !list.length) { + list.push(pattern); + } + return list; +}; +exports.match = match; +exports.minimatch.match = exports.match; +// replace stuff like \* with * +const globMagic = /[?*]|[+@!]\(.*?\)|\[|\]/; +const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&'); +class Minimatch { + options; + set; + pattern; + windowsPathsNoEscape; + nonegate; + negate; + comment; + empty; + preserveMultipleSlashes; + partial; + globSet; + globParts; + nocase; + isWindows; + platform; + windowsNoMagicRoot; + regexp; + constructor(pattern, options = {}) { + (0, assert_valid_pattern_js_1.assertValidPattern)(pattern); + options = options || {}; + this.options = options; + this.pattern = pattern; + this.platform = options.platform || defaultPlatform; + this.isWindows = this.platform === 'win32'; + this.windowsPathsNoEscape = + !!options.windowsPathsNoEscape || options.allowWindowsEscape === false; + if (this.windowsPathsNoEscape) { + this.pattern = this.pattern.replace(/\\/g, '/'); + } + this.preserveMultipleSlashes = !!options.preserveMultipleSlashes; + this.regexp = null; + this.negate = false; + this.nonegate = !!options.nonegate; + this.comment = false; + this.empty = false; + this.partial = !!options.partial; + this.nocase = !!this.options.nocase; + this.windowsNoMagicRoot = + options.windowsNoMagicRoot !== undefined + ? options.windowsNoMagicRoot + : !!(this.isWindows && this.nocase); + this.globSet = []; + this.globParts = []; + this.set = []; + // make the set of regexps etc. + this.make(); + } + hasMagic() { + if (this.options.magicalBraces && this.set.length > 1) { + return true; + } + for (const pattern of this.set) { + for (const part of pattern) { + if (typeof part !== 'string') + return true; + } + } + return false; + } + debug(..._) { } + make() { + const pattern = this.pattern; + const options = this.options; + // empty patterns and comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + this.comment = true; + return; + } + if (!pattern) { + this.empty = true; + return; + } + // step 1: figure out negation, etc. + this.parseNegate(); + // step 2: expand braces + this.globSet = [...new Set(this.braceExpand())]; + if (options.debug) { + this.debug = (...args) => console.error(...args); + } + this.debug(this.pattern, this.globSet); + // step 3: now we have a set, so turn each one into a series of + // path-portion matching patterns. + // These will be regexps, except in the case of "**", which is + // set to the GLOBSTAR object for globstar behavior, + // and will not contain any / characters + // + // First, we preprocess to make the glob pattern sets a bit simpler + // and deduped. There are some perf-killing patterns that can cause + // problems with a glob walk, but we can simplify them down a bit. + const rawGlobParts = this.globSet.map(s => this.slashSplit(s)); + this.globParts = this.preprocess(rawGlobParts); + this.debug(this.pattern, this.globParts); + // glob --> regexps + let set = this.globParts.map((s, _, __) => { + if (this.isWindows && this.windowsNoMagicRoot) { + // check if it's a drive or unc path. + const isUNC = s[0] === '' && + s[1] === '' && + (s[2] === '?' || !globMagic.test(s[2])) && + !globMagic.test(s[3]); + const isDrive = /^[a-z]:/i.test(s[0]); + if (isUNC) { + return [...s.slice(0, 4), ...s.slice(4).map(ss => this.parse(ss))]; + } + else if (isDrive) { + return [s[0], ...s.slice(1).map(ss => this.parse(ss))]; + } + } + return s.map(ss => this.parse(ss)); + }); + this.debug(this.pattern, set); + // filter out everything that didn't compile properly. + this.set = set.filter(s => s.indexOf(false) === -1); + // do not treat the ? in UNC paths as magic + if (this.isWindows) { + for (let i = 0; i < this.set.length; i++) { + const p = this.set[i]; + if (p[0] === '' && + p[1] === '' && + this.globParts[i][2] === '?' && + typeof p[3] === 'string' && + /^[a-z]:$/i.test(p[3])) { + p[2] = '?'; + } + } + } + this.debug(this.pattern, this.set); + } + // various transforms to equivalent pattern sets that are + // faster to process in a filesystem walk. The goal is to + // eliminate what we can, and push all ** patterns as far + // to the right as possible, even if it increases the number + // of patterns that we have to process. + preprocess(globParts) { + // if we're not in globstar mode, then turn all ** into * + if (this.options.noglobstar) { + for (let i = 0; i < globParts.length; i++) { + for (let j = 0; j < globParts[i].length; j++) { + if (globParts[i][j] === '**') { + globParts[i][j] = '*'; + } + } + } + } + const { optimizationLevel = 1 } = this.options; + if (optimizationLevel >= 2) { + // aggressive optimization for the purpose of fs walking + globParts = this.firstPhasePreProcess(globParts); + globParts = this.secondPhasePreProcess(globParts); + } + else if (optimizationLevel >= 1) { + // just basic optimizations to remove some .. parts + globParts = this.levelOneOptimize(globParts); + } + else { + // just collapse multiple ** portions into one + globParts = this.adjascentGlobstarOptimize(globParts); + } + return globParts; + } + // just get rid of adjascent ** portions + adjascentGlobstarOptimize(globParts) { + return globParts.map(parts => { + let gs = -1; + while (-1 !== (gs = parts.indexOf('**', gs + 1))) { + let i = gs; + while (parts[i + 1] === '**') { + i++; + } + if (i !== gs) { + parts.splice(gs, i - gs); + } + } + return parts; + }); + } + // get rid of adjascent ** and resolve .. portions + levelOneOptimize(globParts) { + return globParts.map(parts => { + parts = parts.reduce((set, part) => { + const prev = set[set.length - 1]; + if (part === '**' && prev === '**') { + return set; + } + if (part === '..') { + if (prev && prev !== '..' && prev !== '.' && prev !== '**') { + set.pop(); + return set; + } + } + set.push(part); + return set; + }, []); + return parts.length === 0 ? [''] : parts; + }); + } + levelTwoFileOptimize(parts) { + if (!Array.isArray(parts)) { + parts = this.slashSplit(parts); + } + let didSomething = false; + do { + didSomething = false; + //
// -> 
/
+            if (!this.preserveMultipleSlashes) {
+                for (let i = 1; i < parts.length - 1; i++) {
+                    const p = parts[i];
+                    // don't squeeze out UNC patterns
+                    if (i === 1 && p === '' && parts[0] === '')
+                        continue;
+                    if (p === '.' || p === '') {
+                        didSomething = true;
+                        parts.splice(i, 1);
+                        i--;
+                    }
+                }
+                if (parts[0] === '.' &&
+                    parts.length === 2 &&
+                    (parts[1] === '.' || parts[1] === '')) {
+                    didSomething = true;
+                    parts.pop();
+                }
+            }
+            // 
/

/../ ->

/
+            let dd = 0;
+            while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                const p = parts[dd - 1];
+                if (p && p !== '.' && p !== '..' && p !== '**') {
+                    didSomething = true;
+                    parts.splice(dd - 1, 2);
+                    dd -= 2;
+                }
+            }
+        } while (didSomething);
+        return parts.length === 0 ? [''] : parts;
+    }
+    // First phase: single-pattern processing
+    // 
 is 1 or more portions
+    //  is 1 or more portions
+    // 

is any portion other than ., .., '', or ** + // is . or '' + // + // **/.. is *brutal* for filesystem walking performance, because + // it effectively resets the recursive walk each time it occurs, + // and ** cannot be reduced out by a .. pattern part like a regexp + // or most strings (other than .., ., and '') can be. + // + //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + //

// -> 
/
+    // 
/

/../ ->

/
+    // **/**/ -> **/
+    //
+    // **/*/ -> */**/ <== not valid because ** doesn't follow
+    // this WOULD be allowed if ** did follow symlinks, or * didn't
+    firstPhasePreProcess(globParts) {
+        let didSomething = false;
+        do {
+            didSomething = false;
+            // 
/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + for (let parts of globParts) { + let gs = -1; + while (-1 !== (gs = parts.indexOf('**', gs + 1))) { + let gss = gs; + while (parts[gss + 1] === '**') { + //

/**/**/ -> 
/**/
+                        gss++;
+                    }
+                    // eg, if gs is 2 and gss is 4, that means we have 3 **
+                    // parts, and can remove 2 of them.
+                    if (gss > gs) {
+                        parts.splice(gs + 1, gss - gs);
+                    }
+                    let next = parts[gs + 1];
+                    const p = parts[gs + 2];
+                    const p2 = parts[gs + 3];
+                    if (next !== '..')
+                        continue;
+                    if (!p ||
+                        p === '.' ||
+                        p === '..' ||
+                        !p2 ||
+                        p2 === '.' ||
+                        p2 === '..') {
+                        continue;
+                    }
+                    didSomething = true;
+                    // edit parts in place, and push the new one
+                    parts.splice(gs, 1);
+                    const other = parts.slice(0);
+                    other[gs] = '**';
+                    globParts.push(other);
+                    gs--;
+                }
+                // 
// -> 
/
+                if (!this.preserveMultipleSlashes) {
+                    for (let i = 1; i < parts.length - 1; i++) {
+                        const p = parts[i];
+                        // don't squeeze out UNC patterns
+                        if (i === 1 && p === '' && parts[0] === '')
+                            continue;
+                        if (p === '.' || p === '') {
+                            didSomething = true;
+                            parts.splice(i, 1);
+                            i--;
+                        }
+                    }
+                    if (parts[0] === '.' &&
+                        parts.length === 2 &&
+                        (parts[1] === '.' || parts[1] === '')) {
+                        didSomething = true;
+                        parts.pop();
+                    }
+                }
+                // 
/

/../ ->

/
+                let dd = 0;
+                while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                    const p = parts[dd - 1];
+                    if (p && p !== '.' && p !== '..' && p !== '**') {
+                        didSomething = true;
+                        const needDot = dd === 1 && parts[dd + 1] === '**';
+                        const splin = needDot ? ['.'] : [];
+                        parts.splice(dd - 1, 2, ...splin);
+                        if (parts.length === 0)
+                            parts.push('');
+                        dd -= 2;
+                    }
+                }
+            }
+        } while (didSomething);
+        return globParts;
+    }
+    // second phase: multi-pattern dedupes
+    // {
/*/,
/

/} ->

/*/
+    // {
/,
/} -> 
/
+    // {
/**/,
/} -> 
/**/
+    //
+    // {
/**/,
/**/

/} ->

/**/
+    // ^-- not valid because ** doens't follow symlinks
+    secondPhasePreProcess(globParts) {
+        for (let i = 0; i < globParts.length - 1; i++) {
+            for (let j = i + 1; j < globParts.length; j++) {
+                const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);
+                if (matched) {
+                    globParts[i] = [];
+                    globParts[j] = matched;
+                    break;
+                }
+            }
+        }
+        return globParts.filter(gs => gs.length);
+    }
+    partsMatch(a, b, emptyGSMatch = false) {
+        let ai = 0;
+        let bi = 0;
+        let result = [];
+        let which = '';
+        while (ai < a.length && bi < b.length) {
+            if (a[ai] === b[bi]) {
+                result.push(which === 'b' ? b[bi] : a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (emptyGSMatch && a[ai] === '**' && b[bi] === a[ai + 1]) {
+                result.push(a[ai]);
+                ai++;
+            }
+            else if (emptyGSMatch && b[bi] === '**' && a[ai] === b[bi + 1]) {
+                result.push(b[bi]);
+                bi++;
+            }
+            else if (a[ai] === '*' &&
+                b[bi] &&
+                (this.options.dot || !b[bi].startsWith('.')) &&
+                b[bi] !== '**') {
+                if (which === 'b')
+                    return false;
+                which = 'a';
+                result.push(a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (b[bi] === '*' &&
+                a[ai] &&
+                (this.options.dot || !a[ai].startsWith('.')) &&
+                a[ai] !== '**') {
+                if (which === 'a')
+                    return false;
+                which = 'b';
+                result.push(b[bi]);
+                ai++;
+                bi++;
+            }
+            else {
+                return false;
+            }
+        }
+        // if we fall out of the loop, it means they two are identical
+        // as long as their lengths match
+        return a.length === b.length && result;
+    }
+    parseNegate() {
+        if (this.nonegate)
+            return;
+        const pattern = this.pattern;
+        let negate = false;
+        let negateOffset = 0;
+        for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {
+            negate = !negate;
+            negateOffset++;
+        }
+        if (negateOffset)
+            this.pattern = pattern.slice(negateOffset);
+        this.negate = negate;
+    }
+    // set partial to true to test if, for example,
+    // "/a/b" matches the start of "/*/b/*/d"
+    // Partial means, if you run out of file before you run
+    // out of pattern, then that's fine, as long as all
+    // the parts match.
+    matchOne(file, pattern, partial = false) {
+        const options = this.options;
+        // UNC paths like //?/X:/... can match X:/... and vice versa
+        // Drive letters in absolute drive or unc paths are always compared
+        // case-insensitively.
+        if (this.isWindows) {
+            const fileDrive = typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0]);
+            const fileUNC = !fileDrive &&
+                file[0] === '' &&
+                file[1] === '' &&
+                file[2] === '?' &&
+                /^[a-z]:$/i.test(file[3]);
+            const patternDrive = typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0]);
+            const patternUNC = !patternDrive &&
+                pattern[0] === '' &&
+                pattern[1] === '' &&
+                pattern[2] === '?' &&
+                typeof pattern[3] === 'string' &&
+                /^[a-z]:$/i.test(pattern[3]);
+            const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined;
+            const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined;
+            if (typeof fdi === 'number' && typeof pdi === 'number') {
+                const [fd, pd] = [file[fdi], pattern[pdi]];
+                if (fd.toLowerCase() === pd.toLowerCase()) {
+                    pattern[pdi] = fd;
+                    if (pdi > fdi) {
+                        pattern = pattern.slice(pdi);
+                    }
+                    else if (fdi > pdi) {
+                        file = file.slice(fdi);
+                    }
+                }
+            }
+        }
+        // resolve and reduce . and .. portions in the file as well.
+        // dont' need to do the second phase, because it's only one string[]
+        const { optimizationLevel = 1 } = this.options;
+        if (optimizationLevel >= 2) {
+            file = this.levelTwoFileOptimize(file);
+        }
+        this.debug('matchOne', this, { file, pattern });
+        this.debug('matchOne', file.length, pattern.length);
+        for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
+            this.debug('matchOne loop');
+            var p = pattern[pi];
+            var f = file[fi];
+            this.debug(pattern, p, f);
+            // should be impossible.
+            // some invalid regexp stuff in the set.
+            /* c8 ignore start */
+            if (p === false) {
+                return false;
+            }
+            /* c8 ignore stop */
+            if (p === exports.GLOBSTAR) {
+                this.debug('GLOBSTAR', [pattern, p, f]);
+                // "**"
+                // a/**/b/**/c would match the following:
+                // a/b/x/y/z/c
+                // a/x/y/z/b/c
+                // a/b/x/b/x/c
+                // a/b/c
+                // To do this, take the rest of the pattern after
+                // the **, and see if it would match the file remainder.
+                // If so, return success.
+                // If not, the ** "swallows" a segment, and try again.
+                // This is recursively awful.
+                //
+                // a/**/b/**/c matching a/b/x/y/z/c
+                // - a matches a
+                // - doublestar
+                //   - matchOne(b/x/y/z/c, b/**/c)
+                //     - b matches b
+                //     - doublestar
+                //       - matchOne(x/y/z/c, c) -> no
+                //       - matchOne(y/z/c, c) -> no
+                //       - matchOne(z/c, c) -> no
+                //       - matchOne(c, c) yes, hit
+                var fr = fi;
+                var pr = pi + 1;
+                if (pr === pl) {
+                    this.debug('** at the end');
+                    // a ** at the end will just swallow the rest.
+                    // We have found a match.
+                    // however, it will not swallow /.x, unless
+                    // options.dot is set.
+                    // . and .. are *never* matched by **, for explosively
+                    // exponential reasons.
+                    for (; fi < fl; fi++) {
+                        if (file[fi] === '.' ||
+                            file[fi] === '..' ||
+                            (!options.dot && file[fi].charAt(0) === '.'))
+                            return false;
+                    }
+                    return true;
+                }
+                // ok, let's see if we can swallow whatever we can.
+                while (fr < fl) {
+                    var swallowee = file[fr];
+                    this.debug('\nglobstar while', file, fr, pattern, pr, swallowee);
+                    // XXX remove this slice.  Just pass the start index.
+                    if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
+                        this.debug('globstar found match!', fr, fl, swallowee);
+                        // found a match.
+                        return true;
+                    }
+                    else {
+                        // can't swallow "." or ".." ever.
+                        // can only swallow ".foo" when explicitly asked.
+                        if (swallowee === '.' ||
+                            swallowee === '..' ||
+                            (!options.dot && swallowee.charAt(0) === '.')) {
+                            this.debug('dot detected!', file, fr, pattern, pr);
+                            break;
+                        }
+                        // ** swallows a segment, and continue.
+                        this.debug('globstar swallow a segment, and continue');
+                        fr++;
+                    }
+                }
+                // no match was found.
+                // However, in partial mode, we can't say this is necessarily over.
+                /* c8 ignore start */
+                if (partial) {
+                    // ran out of file
+                    this.debug('\n>>> no match, partial?', file, fr, pattern, pr);
+                    if (fr === fl) {
+                        return true;
+                    }
+                }
+                /* c8 ignore stop */
+                return false;
+            }
+            // something other than **
+            // non-magic patterns just have to match exactly
+            // patterns with magic have been turned into regexps.
+            let hit;
+            if (typeof p === 'string') {
+                hit = f === p;
+                this.debug('string match', p, f, hit);
+            }
+            else {
+                hit = p.test(f);
+                this.debug('pattern match', p, f, hit);
+            }
+            if (!hit)
+                return false;
+        }
+        // Note: ending in / means that we'll get a final ""
+        // at the end of the pattern.  This can only match a
+        // corresponding "" at the end of the file.
+        // If the file ends in /, then it can only match a
+        // a pattern that ends in /, unless the pattern just
+        // doesn't have any more for it. But, a/b/ should *not*
+        // match "a/b/*", even though "" matches against the
+        // [^/]*? pattern, except in partial mode, where it might
+        // simply not be reached yet.
+        // However, a/b/ should still satisfy a/*
+        // now either we fell off the end of the pattern, or we're done.
+        if (fi === fl && pi === pl) {
+            // ran out of pattern and filename at the same time.
+            // an exact hit!
+            return true;
+        }
+        else if (fi === fl) {
+            // ran out of file, but still had pattern left.
+            // this is ok if we're doing the match as part of
+            // a glob fs traversal.
+            return partial;
+        }
+        else if (pi === pl) {
+            // ran out of pattern, still have file left.
+            // this is only acceptable if we're on the very last
+            // empty segment of a file with a trailing slash.
+            // a/* should match a/b/
+            return fi === fl - 1 && file[fi] === '';
+            /* c8 ignore start */
+        }
+        else {
+            // should be unreachable.
+            throw new Error('wtf?');
+        }
+        /* c8 ignore stop */
+    }
+    braceExpand() {
+        return (0, exports.braceExpand)(this.pattern, this.options);
+    }
+    parse(pattern) {
+        (0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
+        const options = this.options;
+        // shortcuts
+        if (pattern === '**')
+            return exports.GLOBSTAR;
+        if (pattern === '')
+            return '';
+        // far and away, the most common glob pattern parts are
+        // *, *.*, and *.  Add a fast check method for those.
+        let m;
+        let fastTest = null;
+        if ((m = pattern.match(starRE))) {
+            fastTest = options.dot ? starTestDot : starTest;
+        }
+        else if ((m = pattern.match(starDotExtRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? starDotExtTestNocaseDot
+                    : starDotExtTestNocase
+                : options.dot
+                    ? starDotExtTestDot
+                    : starDotExtTest)(m[1]);
+        }
+        else if ((m = pattern.match(qmarksRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? qmarksTestNocaseDot
+                    : qmarksTestNocase
+                : options.dot
+                    ? qmarksTestDot
+                    : qmarksTest)(m);
+        }
+        else if ((m = pattern.match(starDotStarRE))) {
+            fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
+        }
+        else if ((m = pattern.match(dotStarRE))) {
+            fastTest = dotStarTest;
+        }
+        const re = ast_js_1.AST.fromGlob(pattern, this.options).toMMPattern();
+        if (fastTest && typeof re === 'object') {
+            // Avoids overriding in frozen environments
+            Reflect.defineProperty(re, 'test', { value: fastTest });
+        }
+        return re;
+    }
+    makeRe() {
+        if (this.regexp || this.regexp === false)
+            return this.regexp;
+        // at this point, this.set is a 2d array of partial
+        // pattern strings, or "**".
+        //
+        // It's better to use .match().  This function shouldn't
+        // be used, really, but it's pretty convenient sometimes,
+        // when you just want to work with a regex.
+        const set = this.set;
+        if (!set.length) {
+            this.regexp = false;
+            return this.regexp;
+        }
+        const options = this.options;
+        const twoStar = options.noglobstar
+            ? star
+            : options.dot
+                ? twoStarDot
+                : twoStarNoDot;
+        const flags = new Set(options.nocase ? ['i'] : []);
+        // regexpify non-globstar patterns
+        // if ** is only item, then we just do one twoStar
+        // if ** is first, and there are more, prepend (\/|twoStar\/)? to next
+        // if ** is last, append (\/twoStar|) to previous
+        // if ** is in the middle, append (\/|\/twoStar\/) to previous
+        // then filter out GLOBSTAR symbols
+        let re = set
+            .map(pattern => {
+            const pp = pattern.map(p => {
+                if (p instanceof RegExp) {
+                    for (const f of p.flags.split(''))
+                        flags.add(f);
+                }
+                return typeof p === 'string'
+                    ? regExpEscape(p)
+                    : p === exports.GLOBSTAR
+                        ? exports.GLOBSTAR
+                        : p._src;
+            });
+            pp.forEach((p, i) => {
+                const next = pp[i + 1];
+                const prev = pp[i - 1];
+                if (p !== exports.GLOBSTAR || prev === exports.GLOBSTAR) {
+                    return;
+                }
+                if (prev === undefined) {
+                    if (next !== undefined && next !== exports.GLOBSTAR) {
+                        pp[i + 1] = '(?:\\/|' + twoStar + '\\/)?' + next;
+                    }
+                    else {
+                        pp[i] = twoStar;
+                    }
+                }
+                else if (next === undefined) {
+                    pp[i - 1] = prev + '(?:\\/|' + twoStar + ')?';
+                }
+                else if (next !== exports.GLOBSTAR) {
+                    pp[i - 1] = prev + '(?:\\/|\\/' + twoStar + '\\/)' + next;
+                    pp[i + 1] = exports.GLOBSTAR;
+                }
+            });
+            return pp.filter(p => p !== exports.GLOBSTAR).join('/');
+        })
+            .join('|');
+        // need to wrap in parens if we had more than one thing with |,
+        // otherwise only the first will be anchored to ^ and the last to $
+        const [open, close] = set.length > 1 ? ['(?:', ')'] : ['', ''];
+        // must match entire pattern
+        // ending in a * or ** will make it less strict.
+        re = '^' + open + re + close + '$';
+        // can match anything, as long as it's not this.
+        if (this.negate)
+            re = '^(?!' + re + ').+$';
+        try {
+            this.regexp = new RegExp(re, [...flags].join(''));
+            /* c8 ignore start */
+        }
+        catch (ex) {
+            // should be impossible
+            this.regexp = false;
+        }
+        /* c8 ignore stop */
+        return this.regexp;
+    }
+    slashSplit(p) {
+        // if p starts with // on windows, we preserve that
+        // so that UNC paths aren't broken.  Otherwise, any number of
+        // / characters are coalesced into one, unless
+        // preserveMultipleSlashes is set to true.
+        if (this.preserveMultipleSlashes) {
+            return p.split('/');
+        }
+        else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
+            // add an extra '' for the one we lose
+            return ['', ...p.split(/\/+/)];
+        }
+        else {
+            return p.split(/\/+/);
+        }
+    }
+    match(f, partial = this.partial) {
+        this.debug('match', f, this.pattern);
+        // short-circuit in the case of busted things.
+        // comments, etc.
+        if (this.comment) {
+            return false;
+        }
+        if (this.empty) {
+            return f === '';
+        }
+        if (f === '/' && partial) {
+            return true;
+        }
+        const options = this.options;
+        // windows: need to use /, not \
+        if (this.isWindows) {
+            f = f.split('\\').join('/');
+        }
+        // treat the test path as a set of pathparts.
+        const ff = this.slashSplit(f);
+        this.debug(this.pattern, 'split', ff);
+        // just ONE of the pattern sets in this.set needs to match
+        // in order for it to be valid.  If negating, then just one
+        // match means that we have failed.
+        // Either way, return on the first hit.
+        const set = this.set;
+        this.debug(this.pattern, 'set', set);
+        // Find the basename of the path by looking for the last non-empty segment
+        let filename = ff[ff.length - 1];
+        if (!filename) {
+            for (let i = ff.length - 2; !filename && i >= 0; i--) {
+                filename = ff[i];
+            }
+        }
+        for (let i = 0; i < set.length; i++) {
+            const pattern = set[i];
+            let file = ff;
+            if (options.matchBase && pattern.length === 1) {
+                file = [filename];
+            }
+            const hit = this.matchOne(file, pattern, partial);
+            if (hit) {
+                if (options.flipNegate) {
+                    return true;
+                }
+                return !this.negate;
+            }
+        }
+        // didn't get any hits.  this is success if it's a negative
+        // pattern, failure otherwise.
+        if (options.flipNegate) {
+            return false;
+        }
+        return this.negate;
+    }
+    static defaults(def) {
+        return exports.minimatch.defaults(def).Minimatch;
+    }
+}
+exports.Minimatch = Minimatch;
+/* c8 ignore start */
+var ast_js_2 = require("./ast.js");
+Object.defineProperty(exports, "AST", { enumerable: true, get: function () { return ast_js_2.AST; } });
+var escape_js_2 = require("./escape.js");
+Object.defineProperty(exports, "escape", { enumerable: true, get: function () { return escape_js_2.escape; } });
+var unescape_js_2 = require("./unescape.js");
+Object.defineProperty(exports, "unescape", { enumerable: true, get: function () { return unescape_js_2.unescape; } });
+/* c8 ignore stop */
+exports.minimatch.AST = ast_js_1.AST;
+exports.minimatch.Minimatch = Minimatch;
+exports.minimatch.escape = escape_js_1.escape;
+exports.minimatch.unescape = unescape_js_1.unescape;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/commonjs/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/commonjs/package.json
new file mode 100644
index 0000000000000000000000000000000000000000..5bbefffbabee392d1855491b84dc0a716b6a3bf2
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/commonjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/commonjs/unescape.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/commonjs/unescape.js
new file mode 100644
index 0000000000000000000000000000000000000000..47c36bcee5a02af522c067a10c3b2167492d6811
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/commonjs/unescape.js
@@ -0,0 +1,24 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.unescape = void 0;
+/**
+ * Un-escape a string that has been escaped with {@link escape}.
+ *
+ * If the {@link windowsPathsNoEscape} option is used, then square-brace
+ * escapes are removed, but not backslash escapes.  For example, it will turn
+ * the string `'[*]'` into `*`, but it will not turn `'\\*'` into `'*'`,
+ * becuase `\` is a path separator in `windowsPathsNoEscape` mode.
+ *
+ * When `windowsPathsNoEscape` is not set, then both brace escapes and
+ * backslash escapes are removed.
+ *
+ * Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot be escaped
+ * or unescaped.
+ */
+const unescape = (s, { windowsPathsNoEscape = false, } = {}) => {
+    return windowsPathsNoEscape
+        ? s.replace(/\[([^\/\\])\]/g, '$1')
+        : s.replace(/((?!\\).|^)\[([^\/\\])\]/g, '$1$2').replace(/\\([^\/])/g, '$1');
+};
+exports.unescape = unescape;
+//# sourceMappingURL=unescape.js.map
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/esm/assert-valid-pattern.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/esm/assert-valid-pattern.js
new file mode 100644
index 0000000000000000000000000000000000000000..7b534fc30200bb8e36fc6f85aebb49a8738f711d
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/esm/assert-valid-pattern.js
@@ -0,0 +1,10 @@
+const MAX_PATTERN_LENGTH = 1024 * 64;
+export const assertValidPattern = (pattern) => {
+    if (typeof pattern !== 'string') {
+        throw new TypeError('invalid pattern');
+    }
+    if (pattern.length > MAX_PATTERN_LENGTH) {
+        throw new TypeError('pattern is too long');
+    }
+};
+//# sourceMappingURL=assert-valid-pattern.js.map
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/esm/ast.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/esm/ast.js
new file mode 100644
index 0000000000000000000000000000000000000000..02c6bda68427fcd80caaf07a43e1dc4b12c54ab1
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/esm/ast.js
@@ -0,0 +1,588 @@
+// parse a single path portion
+import { parseClass } from './brace-expressions.js';
+import { unescape } from './unescape.js';
+const types = new Set(['!', '?', '+', '*', '@']);
+const isExtglobType = (c) => types.has(c);
+// Patterns that get prepended to bind to the start of either the
+// entire string, or just a single path portion, to prevent dots
+// and/or traversal patterns, when needed.
+// Exts don't need the ^ or / bit, because the root binds that already.
+const startNoTraversal = '(?!(?:^|/)\\.\\.?(?:$|/))';
+const startNoDot = '(?!\\.)';
+// characters that indicate a start of pattern needs the "no dots" bit,
+// because a dot *might* be matched. ( is not in the list, because in
+// the case of a child extglob, it will handle the prevention itself.
+const addPatternStart = new Set(['[', '.']);
+// cases where traversal is A-OK, no dot prevention needed
+const justDots = new Set(['..', '.']);
+const reSpecials = new Set('().*{}+?[]^$\\!');
+const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+// any single thing other than /
+const qmark = '[^/]';
+// * => any number of characters
+const star = qmark + '*?';
+// use + when we need to ensure that *something* matches, because the * is
+// the only thing in the path portion.
+const starNoEmpty = qmark + '+?';
+// remove the \ chars that we added if we end up doing a nonmagic compare
+// const deslash = (s: string) => s.replace(/\\(.)/g, '$1')
+export class AST {
+    type;
+    #root;
+    #hasMagic;
+    #uflag = false;
+    #parts = [];
+    #parent;
+    #parentIndex;
+    #negs;
+    #filledNegs = false;
+    #options;
+    #toString;
+    // set to true if it's an extglob with no children
+    // (which really means one child of '')
+    #emptyExt = false;
+    constructor(type, parent, options = {}) {
+        this.type = type;
+        // extglobs are inherently magical
+        if (type)
+            this.#hasMagic = true;
+        this.#parent = parent;
+        this.#root = this.#parent ? this.#parent.#root : this;
+        this.#options = this.#root === this ? options : this.#root.#options;
+        this.#negs = this.#root === this ? [] : this.#root.#negs;
+        if (type === '!' && !this.#root.#filledNegs)
+            this.#negs.push(this);
+        this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0;
+    }
+    get hasMagic() {
+        /* c8 ignore start */
+        if (this.#hasMagic !== undefined)
+            return this.#hasMagic;
+        /* c8 ignore stop */
+        for (const p of this.#parts) {
+            if (typeof p === 'string')
+                continue;
+            if (p.type || p.hasMagic)
+                return (this.#hasMagic = true);
+        }
+        // note: will be undefined until we generate the regexp src and find out
+        return this.#hasMagic;
+    }
+    // reconstructs the pattern
+    toString() {
+        if (this.#toString !== undefined)
+            return this.#toString;
+        if (!this.type) {
+            return (this.#toString = this.#parts.map(p => String(p)).join(''));
+        }
+        else {
+            return (this.#toString =
+                this.type + '(' + this.#parts.map(p => String(p)).join('|') + ')');
+        }
+    }
+    #fillNegs() {
+        /* c8 ignore start */
+        if (this !== this.#root)
+            throw new Error('should only call on root');
+        if (this.#filledNegs)
+            return this;
+        /* c8 ignore stop */
+        // call toString() once to fill this out
+        this.toString();
+        this.#filledNegs = true;
+        let n;
+        while ((n = this.#negs.pop())) {
+            if (n.type !== '!')
+                continue;
+            // walk up the tree, appending everthing that comes AFTER parentIndex
+            let p = n;
+            let pp = p.#parent;
+            while (pp) {
+                for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) {
+                    for (const part of n.#parts) {
+                        /* c8 ignore start */
+                        if (typeof part === 'string') {
+                            throw new Error('string part in extglob AST??');
+                        }
+                        /* c8 ignore stop */
+                        part.copyIn(pp.#parts[i]);
+                    }
+                }
+                p = pp;
+                pp = p.#parent;
+            }
+        }
+        return this;
+    }
+    push(...parts) {
+        for (const p of parts) {
+            if (p === '')
+                continue;
+            /* c8 ignore start */
+            if (typeof p !== 'string' && !(p instanceof AST && p.#parent === this)) {
+                throw new Error('invalid part: ' + p);
+            }
+            /* c8 ignore stop */
+            this.#parts.push(p);
+        }
+    }
+    toJSON() {
+        const ret = this.type === null
+            ? this.#parts.slice().map(p => (typeof p === 'string' ? p : p.toJSON()))
+            : [this.type, ...this.#parts.map(p => p.toJSON())];
+        if (this.isStart() && !this.type)
+            ret.unshift([]);
+        if (this.isEnd() &&
+            (this === this.#root ||
+                (this.#root.#filledNegs && this.#parent?.type === '!'))) {
+            ret.push({});
+        }
+        return ret;
+    }
+    isStart() {
+        if (this.#root === this)
+            return true;
+        // if (this.type) return !!this.#parent?.isStart()
+        if (!this.#parent?.isStart())
+            return false;
+        if (this.#parentIndex === 0)
+            return true;
+        // if everything AHEAD of this is a negation, then it's still the "start"
+        const p = this.#parent;
+        for (let i = 0; i < this.#parentIndex; i++) {
+            const pp = p.#parts[i];
+            if (!(pp instanceof AST && pp.type === '!')) {
+                return false;
+            }
+        }
+        return true;
+    }
+    isEnd() {
+        if (this.#root === this)
+            return true;
+        if (this.#parent?.type === '!')
+            return true;
+        if (!this.#parent?.isEnd())
+            return false;
+        if (!this.type)
+            return this.#parent?.isEnd();
+        // if not root, it'll always have a parent
+        /* c8 ignore start */
+        const pl = this.#parent ? this.#parent.#parts.length : 0;
+        /* c8 ignore stop */
+        return this.#parentIndex === pl - 1;
+    }
+    copyIn(part) {
+        if (typeof part === 'string')
+            this.push(part);
+        else
+            this.push(part.clone(this));
+    }
+    clone(parent) {
+        const c = new AST(this.type, parent);
+        for (const p of this.#parts) {
+            c.copyIn(p);
+        }
+        return c;
+    }
+    static #parseAST(str, ast, pos, opt) {
+        let escaping = false;
+        let inBrace = false;
+        let braceStart = -1;
+        let braceNeg = false;
+        if (ast.type === null) {
+            // outside of a extglob, append until we find a start
+            let i = pos;
+            let acc = '';
+            while (i < str.length) {
+                const c = str.charAt(i++);
+                // still accumulate escapes at this point, but we do ignore
+                // starts that are escaped
+                if (escaping || c === '\\') {
+                    escaping = !escaping;
+                    acc += c;
+                    continue;
+                }
+                if (inBrace) {
+                    if (i === braceStart + 1) {
+                        if (c === '^' || c === '!') {
+                            braceNeg = true;
+                        }
+                    }
+                    else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
+                        inBrace = false;
+                    }
+                    acc += c;
+                    continue;
+                }
+                else if (c === '[') {
+                    inBrace = true;
+                    braceStart = i;
+                    braceNeg = false;
+                    acc += c;
+                    continue;
+                }
+                if (!opt.noext && isExtglobType(c) && str.charAt(i) === '(') {
+                    ast.push(acc);
+                    acc = '';
+                    const ext = new AST(c, ast);
+                    i = AST.#parseAST(str, ext, i, opt);
+                    ast.push(ext);
+                    continue;
+                }
+                acc += c;
+            }
+            ast.push(acc);
+            return i;
+        }
+        // some kind of extglob, pos is at the (
+        // find the next | or )
+        let i = pos + 1;
+        let part = new AST(null, ast);
+        const parts = [];
+        let acc = '';
+        while (i < str.length) {
+            const c = str.charAt(i++);
+            // still accumulate escapes at this point, but we do ignore
+            // starts that are escaped
+            if (escaping || c === '\\') {
+                escaping = !escaping;
+                acc += c;
+                continue;
+            }
+            if (inBrace) {
+                if (i === braceStart + 1) {
+                    if (c === '^' || c === '!') {
+                        braceNeg = true;
+                    }
+                }
+                else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
+                    inBrace = false;
+                }
+                acc += c;
+                continue;
+            }
+            else if (c === '[') {
+                inBrace = true;
+                braceStart = i;
+                braceNeg = false;
+                acc += c;
+                continue;
+            }
+            if (isExtglobType(c) && str.charAt(i) === '(') {
+                part.push(acc);
+                acc = '';
+                const ext = new AST(c, part);
+                part.push(ext);
+                i = AST.#parseAST(str, ext, i, opt);
+                continue;
+            }
+            if (c === '|') {
+                part.push(acc);
+                acc = '';
+                parts.push(part);
+                part = new AST(null, ast);
+                continue;
+            }
+            if (c === ')') {
+                if (acc === '' && ast.#parts.length === 0) {
+                    ast.#emptyExt = true;
+                }
+                part.push(acc);
+                acc = '';
+                ast.push(...parts, part);
+                return i;
+            }
+            acc += c;
+        }
+        // unfinished extglob
+        // if we got here, it was a malformed extglob! not an extglob, but
+        // maybe something else in there.
+        ast.type = null;
+        ast.#hasMagic = undefined;
+        ast.#parts = [str.substring(pos - 1)];
+        return i;
+    }
+    static fromGlob(pattern, options = {}) {
+        const ast = new AST(null, undefined, options);
+        AST.#parseAST(pattern, ast, 0, options);
+        return ast;
+    }
+    // returns the regular expression if there's magic, or the unescaped
+    // string if not.
+    toMMPattern() {
+        // should only be called on root
+        /* c8 ignore start */
+        if (this !== this.#root)
+            return this.#root.toMMPattern();
+        /* c8 ignore stop */
+        const glob = this.toString();
+        const [re, body, hasMagic, uflag] = this.toRegExpSource();
+        // if we're in nocase mode, and not nocaseMagicOnly, then we do
+        // still need a regular expression if we have to case-insensitively
+        // match capital/lowercase characters.
+        const anyMagic = hasMagic ||
+            this.#hasMagic ||
+            (this.#options.nocase &&
+                !this.#options.nocaseMagicOnly &&
+                glob.toUpperCase() !== glob.toLowerCase());
+        if (!anyMagic) {
+            return body;
+        }
+        const flags = (this.#options.nocase ? 'i' : '') + (uflag ? 'u' : '');
+        return Object.assign(new RegExp(`^${re}$`, flags), {
+            _src: re,
+            _glob: glob,
+        });
+    }
+    get options() {
+        return this.#options;
+    }
+    // returns the string match, the regexp source, whether there's magic
+    // in the regexp (so a regular expression is required) and whether or
+    // not the uflag is needed for the regular expression (for posix classes)
+    // TODO: instead of injecting the start/end at this point, just return
+    // the BODY of the regexp, along with the start/end portions suitable
+    // for binding the start/end in either a joined full-path makeRe context
+    // (where we bind to (^|/), or a standalone matchPart context (where
+    // we bind to ^, and not /).  Otherwise slashes get duped!
+    //
+    // In part-matching mode, the start is:
+    // - if not isStart: nothing
+    // - if traversal possible, but not allowed: ^(?!\.\.?$)
+    // - if dots allowed or not possible: ^
+    // - if dots possible and not allowed: ^(?!\.)
+    // end is:
+    // - if not isEnd(): nothing
+    // - else: $
+    //
+    // In full-path matching mode, we put the slash at the START of the
+    // pattern, so start is:
+    // - if first pattern: same as part-matching mode
+    // - if not isStart(): nothing
+    // - if traversal possible, but not allowed: /(?!\.\.?(?:$|/))
+    // - if dots allowed or not possible: /
+    // - if dots possible and not allowed: /(?!\.)
+    // end is:
+    // - if last pattern, same as part-matching mode
+    // - else nothing
+    //
+    // Always put the (?:$|/) on negated tails, though, because that has to be
+    // there to bind the end of the negated pattern portion, and it's easier to
+    // just stick it in now rather than try to inject it later in the middle of
+    // the pattern.
+    //
+    // We can just always return the same end, and leave it up to the caller
+    // to know whether it's going to be used joined or in parts.
+    // And, if the start is adjusted slightly, can do the same there:
+    // - if not isStart: nothing
+    // - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$)
+    // - if dots allowed or not possible: (?:/|^)
+    // - if dots possible and not allowed: (?:/|^)(?!\.)
+    //
+    // But it's better to have a simpler binding without a conditional, for
+    // performance, so probably better to return both start options.
+    //
+    // Then the caller just ignores the end if it's not the first pattern,
+    // and the start always gets applied.
+    //
+    // But that's always going to be $ if it's the ending pattern, or nothing,
+    // so the caller can just attach $ at the end of the pattern when building.
+    //
+    // So the todo is:
+    // - better detect what kind of start is needed
+    // - return both flavors of starting pattern
+    // - attach $ at the end of the pattern when creating the actual RegExp
+    //
+    // Ah, but wait, no, that all only applies to the root when the first pattern
+    // is not an extglob. If the first pattern IS an extglob, then we need all
+    // that dot prevention biz to live in the extglob portions, because eg
+    // +(*|.x*) can match .xy but not .yx.
+    //
+    // So, return the two flavors if it's #root and the first child is not an
+    // AST, otherwise leave it to the child AST to handle it, and there,
+    // use the (?:^|/) style of start binding.
+    //
+    // Even simplified further:
+    // - Since the start for a join is eg /(?!\.) and the start for a part
+    // is ^(?!\.), we can just prepend (?!\.) to the pattern (either root
+    // or start or whatever) and prepend ^ or / at the Regexp construction.
+    toRegExpSource(allowDot) {
+        const dot = allowDot ?? !!this.#options.dot;
+        if (this.#root === this)
+            this.#fillNegs();
+        if (!this.type) {
+            const noEmpty = this.isStart() && this.isEnd();
+            const src = this.#parts
+                .map(p => {
+                const [re, _, hasMagic, uflag] = typeof p === 'string'
+                    ? AST.#parseGlob(p, this.#hasMagic, noEmpty)
+                    : p.toRegExpSource(allowDot);
+                this.#hasMagic = this.#hasMagic || hasMagic;
+                this.#uflag = this.#uflag || uflag;
+                return re;
+            })
+                .join('');
+            let start = '';
+            if (this.isStart()) {
+                if (typeof this.#parts[0] === 'string') {
+                    // this is the string that will match the start of the pattern,
+                    // so we need to protect against dots and such.
+                    // '.' and '..' cannot match unless the pattern is that exactly,
+                    // even if it starts with . or dot:true is set.
+                    const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]);
+                    if (!dotTravAllowed) {
+                        const aps = addPatternStart;
+                        // check if we have a possibility of matching . or ..,
+                        // and prevent that.
+                        const needNoTrav =
+                        // dots are allowed, and the pattern starts with [ or .
+                        (dot && aps.has(src.charAt(0))) ||
+                            // the pattern starts with \., and then [ or .
+                            (src.startsWith('\\.') && aps.has(src.charAt(2))) ||
+                            // the pattern starts with \.\., and then [ or .
+                            (src.startsWith('\\.\\.') && aps.has(src.charAt(4)));
+                        // no need to prevent dots if it can't match a dot, or if a
+                        // sub-pattern will be preventing it anyway.
+                        const needNoDot = !dot && !allowDot && aps.has(src.charAt(0));
+                        start = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : '';
+                    }
+                }
+            }
+            // append the "end of path portion" pattern to negation tails
+            let end = '';
+            if (this.isEnd() &&
+                this.#root.#filledNegs &&
+                this.#parent?.type === '!') {
+                end = '(?:$|\\/)';
+            }
+            const final = start + src + end;
+            return [
+                final,
+                unescape(src),
+                (this.#hasMagic = !!this.#hasMagic),
+                this.#uflag,
+            ];
+        }
+        // We need to calculate the body *twice* if it's a repeat pattern
+        // at the start, once in nodot mode, then again in dot mode, so a
+        // pattern like *(?) can match 'x.y'
+        const repeated = this.type === '*' || this.type === '+';
+        // some kind of extglob
+        const start = this.type === '!' ? '(?:(?!(?:' : '(?:';
+        let body = this.#partsToRegExp(dot);
+        if (this.isStart() && this.isEnd() && !body && this.type !== '!') {
+            // invalid extglob, has to at least be *something* present, if it's
+            // the entire path portion.
+            const s = this.toString();
+            this.#parts = [s];
+            this.type = null;
+            this.#hasMagic = undefined;
+            return [s, unescape(this.toString()), false, false];
+        }
+        // XXX abstract out this map method
+        let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot
+            ? ''
+            : this.#partsToRegExp(true);
+        if (bodyDotAllowed === body) {
+            bodyDotAllowed = '';
+        }
+        if (bodyDotAllowed) {
+            body = `(?:${body})(?:${bodyDotAllowed})*?`;
+        }
+        // an empty !() is exactly equivalent to a starNoEmpty
+        let final = '';
+        if (this.type === '!' && this.#emptyExt) {
+            final = (this.isStart() && !dot ? startNoDot : '') + starNoEmpty;
+        }
+        else {
+            const close = this.type === '!'
+                ? // !() must match something,but !(x) can match ''
+                    '))' +
+                        (this.isStart() && !dot && !allowDot ? startNoDot : '') +
+                        star +
+                        ')'
+                : this.type === '@'
+                    ? ')'
+                    : this.type === '?'
+                        ? ')?'
+                        : this.type === '+' && bodyDotAllowed
+                            ? ')'
+                            : this.type === '*' && bodyDotAllowed
+                                ? `)?`
+                                : `)${this.type}`;
+            final = start + body + close;
+        }
+        return [
+            final,
+            unescape(body),
+            (this.#hasMagic = !!this.#hasMagic),
+            this.#uflag,
+        ];
+    }
+    #partsToRegExp(dot) {
+        return this.#parts
+            .map(p => {
+            // extglob ASTs should only contain parent ASTs
+            /* c8 ignore start */
+            if (typeof p === 'string') {
+                throw new Error('string type in extglob ast??');
+            }
+            /* c8 ignore stop */
+            // can ignore hasMagic, because extglobs are already always magic
+            const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot);
+            this.#uflag = this.#uflag || uflag;
+            return re;
+        })
+            .filter(p => !(this.isStart() && this.isEnd()) || !!p)
+            .join('|');
+    }
+    static #parseGlob(glob, hasMagic, noEmpty = false) {
+        let escaping = false;
+        let re = '';
+        let uflag = false;
+        for (let i = 0; i < glob.length; i++) {
+            const c = glob.charAt(i);
+            if (escaping) {
+                escaping = false;
+                re += (reSpecials.has(c) ? '\\' : '') + c;
+                continue;
+            }
+            if (c === '\\') {
+                if (i === glob.length - 1) {
+                    re += '\\\\';
+                }
+                else {
+                    escaping = true;
+                }
+                continue;
+            }
+            if (c === '[') {
+                const [src, needUflag, consumed, magic] = parseClass(glob, i);
+                if (consumed) {
+                    re += src;
+                    uflag = uflag || needUflag;
+                    i += consumed - 1;
+                    hasMagic = hasMagic || magic;
+                    continue;
+                }
+            }
+            if (c === '*') {
+                if (noEmpty && glob === '*')
+                    re += starNoEmpty;
+                else
+                    re += star;
+                hasMagic = true;
+                continue;
+            }
+            if (c === '?') {
+                re += qmark;
+                hasMagic = true;
+                continue;
+            }
+            re += regExpEscape(c);
+        }
+        return [re, unescape(glob), !!hasMagic, uflag];
+    }
+}
+//# sourceMappingURL=ast.js.map
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/esm/brace-expressions.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/esm/brace-expressions.js
new file mode 100644
index 0000000000000000000000000000000000000000..c629d6ae816e27fdcbbaaef272ac352bfd77a27b
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/esm/brace-expressions.js
@@ -0,0 +1,148 @@
+// translate the various posix character classes into unicode properties
+// this works across all unicode locales
+// { : [, /u flag required, negated]
+const posixClasses = {
+    '[:alnum:]': ['\\p{L}\\p{Nl}\\p{Nd}', true],
+    '[:alpha:]': ['\\p{L}\\p{Nl}', true],
+    '[:ascii:]': ['\\x' + '00-\\x' + '7f', false],
+    '[:blank:]': ['\\p{Zs}\\t', true],
+    '[:cntrl:]': ['\\p{Cc}', true],
+    '[:digit:]': ['\\p{Nd}', true],
+    '[:graph:]': ['\\p{Z}\\p{C}', true, true],
+    '[:lower:]': ['\\p{Ll}', true],
+    '[:print:]': ['\\p{C}', true],
+    '[:punct:]': ['\\p{P}', true],
+    '[:space:]': ['\\p{Z}\\t\\r\\n\\v\\f', true],
+    '[:upper:]': ['\\p{Lu}', true],
+    '[:word:]': ['\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}', true],
+    '[:xdigit:]': ['A-Fa-f0-9', false],
+};
+// only need to escape a few things inside of brace expressions
+// escapes: [ \ ] -
+const braceEscape = (s) => s.replace(/[[\]\\-]/g, '\\$&');
+// escape all regexp magic characters
+const regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+// everything has already been escaped, we just have to join
+const rangesToString = (ranges) => ranges.join('');
+// takes a glob string at a posix brace expression, and returns
+// an equivalent regular expression source, and boolean indicating
+// whether the /u flag needs to be applied, and the number of chars
+// consumed to parse the character class.
+// This also removes out of order ranges, and returns ($.) if the
+// entire class just no good.
+export const parseClass = (glob, position) => {
+    const pos = position;
+    /* c8 ignore start */
+    if (glob.charAt(pos) !== '[') {
+        throw new Error('not in a brace expression');
+    }
+    /* c8 ignore stop */
+    const ranges = [];
+    const negs = [];
+    let i = pos + 1;
+    let sawStart = false;
+    let uflag = false;
+    let escaping = false;
+    let negate = false;
+    let endPos = pos;
+    let rangeStart = '';
+    WHILE: while (i < glob.length) {
+        const c = glob.charAt(i);
+        if ((c === '!' || c === '^') && i === pos + 1) {
+            negate = true;
+            i++;
+            continue;
+        }
+        if (c === ']' && sawStart && !escaping) {
+            endPos = i + 1;
+            break;
+        }
+        sawStart = true;
+        if (c === '\\') {
+            if (!escaping) {
+                escaping = true;
+                i++;
+                continue;
+            }
+            // escaped \ char, fall through and treat like normal char
+        }
+        if (c === '[' && !escaping) {
+            // either a posix class, a collation equivalent, or just a [
+            for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) {
+                if (glob.startsWith(cls, i)) {
+                    // invalid, [a-[] is fine, but not [a-[:alpha]]
+                    if (rangeStart) {
+                        return ['$.', false, glob.length - pos, true];
+                    }
+                    i += cls.length;
+                    if (neg)
+                        negs.push(unip);
+                    else
+                        ranges.push(unip);
+                    uflag = uflag || u;
+                    continue WHILE;
+                }
+            }
+        }
+        // now it's just a normal character, effectively
+        escaping = false;
+        if (rangeStart) {
+            // throw this range away if it's not valid, but others
+            // can still match.
+            if (c > rangeStart) {
+                ranges.push(braceEscape(rangeStart) + '-' + braceEscape(c));
+            }
+            else if (c === rangeStart) {
+                ranges.push(braceEscape(c));
+            }
+            rangeStart = '';
+            i++;
+            continue;
+        }
+        // now might be the start of a range.
+        // can be either c-d or c-] or c] or c] at this point
+        if (glob.startsWith('-]', i + 1)) {
+            ranges.push(braceEscape(c + '-'));
+            i += 2;
+            continue;
+        }
+        if (glob.startsWith('-', i + 1)) {
+            rangeStart = c;
+            i += 2;
+            continue;
+        }
+        // not the start of a range, just a single character
+        ranges.push(braceEscape(c));
+        i++;
+    }
+    if (endPos < i) {
+        // didn't see the end of the class, not a valid class,
+        // but might still be valid as a literal match.
+        return ['', false, 0, false];
+    }
+    // if we got no ranges and no negates, then we have a range that
+    // cannot possibly match anything, and that poisons the whole glob
+    if (!ranges.length && !negs.length) {
+        return ['$.', false, glob.length - pos, true];
+    }
+    // if we got one positive range, and it's a single character, then that's
+    // not actually a magic pattern, it's just that one literal character.
+    // we should not treat that as "magic", we should just return the literal
+    // character. [_] is a perfectly valid way to escape glob magic chars.
+    if (negs.length === 0 &&
+        ranges.length === 1 &&
+        /^\\?.$/.test(ranges[0]) &&
+        !negate) {
+        const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0];
+        return [regexpEscape(r), false, endPos - pos, false];
+    }
+    const sranges = '[' + (negate ? '^' : '') + rangesToString(ranges) + ']';
+    const snegs = '[' + (negate ? '' : '^') + rangesToString(negs) + ']';
+    const comb = ranges.length && negs.length
+        ? '(' + sranges + '|' + snegs + ')'
+        : ranges.length
+            ? sranges
+            : snegs;
+    return [comb, uflag, endPos - pos, true];
+};
+//# sourceMappingURL=brace-expressions.js.map
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/esm/escape.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/esm/escape.js
new file mode 100644
index 0000000000000000000000000000000000000000..16f7c8c7bdc64645a201065cb264cb561eac851c
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/esm/escape.js
@@ -0,0 +1,18 @@
+/**
+ * Escape all magic characters in a glob pattern.
+ *
+ * If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape}
+ * option is used, then characters are escaped by wrapping in `[]`, because
+ * a magic character wrapped in a character class can only be satisfied by
+ * that exact character.  In this mode, `\` is _not_ escaped, because it is
+ * not interpreted as a magic character, but instead as a path separator.
+ */
+export const escape = (s, { windowsPathsNoEscape = false, } = {}) => {
+    // don't need to escape +@! because we escape the parens
+    // that make those magic, and escaping ! as [!] isn't valid,
+    // because [!]] is a valid glob class meaning not ']'.
+    return windowsPathsNoEscape
+        ? s.replace(/[?*()[\]]/g, '[$&]')
+        : s.replace(/[?*()[\]\\]/g, '\\$&');
+};
+//# sourceMappingURL=escape.js.map
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/esm/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/esm/index.js
new file mode 100644
index 0000000000000000000000000000000000000000..790d6c02a2f22e96cf10aeacc346c716de2f6644
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/esm/index.js
@@ -0,0 +1,1001 @@
+import { expand } from '@isaacs/brace-expansion';
+import { assertValidPattern } from './assert-valid-pattern.js';
+import { AST } from './ast.js';
+import { escape } from './escape.js';
+import { unescape } from './unescape.js';
+export const minimatch = (p, pattern, options = {}) => {
+    assertValidPattern(pattern);
+    // shortcut: comments match nothing.
+    if (!options.nocomment && pattern.charAt(0) === '#') {
+        return false;
+    }
+    return new Minimatch(pattern, options).match(p);
+};
+// Optimized checking for the most common glob patterns.
+const starDotExtRE = /^\*+([^+@!?\*\[\(]*)$/;
+const starDotExtTest = (ext) => (f) => !f.startsWith('.') && f.endsWith(ext);
+const starDotExtTestDot = (ext) => (f) => f.endsWith(ext);
+const starDotExtTestNocase = (ext) => {
+    ext = ext.toLowerCase();
+    return (f) => !f.startsWith('.') && f.toLowerCase().endsWith(ext);
+};
+const starDotExtTestNocaseDot = (ext) => {
+    ext = ext.toLowerCase();
+    return (f) => f.toLowerCase().endsWith(ext);
+};
+const starDotStarRE = /^\*+\.\*+$/;
+const starDotStarTest = (f) => !f.startsWith('.') && f.includes('.');
+const starDotStarTestDot = (f) => f !== '.' && f !== '..' && f.includes('.');
+const dotStarRE = /^\.\*+$/;
+const dotStarTest = (f) => f !== '.' && f !== '..' && f.startsWith('.');
+const starRE = /^\*+$/;
+const starTest = (f) => f.length !== 0 && !f.startsWith('.');
+const starTestDot = (f) => f.length !== 0 && f !== '.' && f !== '..';
+const qmarksRE = /^\?+([^+@!?\*\[\(]*)?$/;
+const qmarksTestNocase = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExt([$0]);
+    if (!ext)
+        return noext;
+    ext = ext.toLowerCase();
+    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
+};
+const qmarksTestNocaseDot = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExtDot([$0]);
+    if (!ext)
+        return noext;
+    ext = ext.toLowerCase();
+    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
+};
+const qmarksTestDot = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExtDot([$0]);
+    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
+};
+const qmarksTest = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExt([$0]);
+    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
+};
+const qmarksTestNoExt = ([$0]) => {
+    const len = $0.length;
+    return (f) => f.length === len && !f.startsWith('.');
+};
+const qmarksTestNoExtDot = ([$0]) => {
+    const len = $0.length;
+    return (f) => f.length === len && f !== '.' && f !== '..';
+};
+/* c8 ignore start */
+const defaultPlatform = (typeof process === 'object' && process
+    ? (typeof process.env === 'object' &&
+        process.env &&
+        process.env.__MINIMATCH_TESTING_PLATFORM__) ||
+        process.platform
+    : 'posix');
+const path = {
+    win32: { sep: '\\' },
+    posix: { sep: '/' },
+};
+/* c8 ignore stop */
+export const sep = defaultPlatform === 'win32' ? path.win32.sep : path.posix.sep;
+minimatch.sep = sep;
+export const GLOBSTAR = Symbol('globstar **');
+minimatch.GLOBSTAR = GLOBSTAR;
+// any single thing other than /
+// don't need to escape / when using new RegExp()
+const qmark = '[^/]';
+// * => any number of characters
+const star = qmark + '*?';
+// ** when dots are allowed.  Anything goes, except .. and .
+// not (^ or / followed by one or two dots followed by $ or /),
+// followed by anything, any number of times.
+const twoStarDot = '(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?';
+// not a ^ or / followed by a dot,
+// followed by anything, any number of times.
+const twoStarNoDot = '(?:(?!(?:\\/|^)\\.).)*?';
+export const filter = (pattern, options = {}) => (p) => minimatch(p, pattern, options);
+minimatch.filter = filter;
+const ext = (a, b = {}) => Object.assign({}, a, b);
+export const defaults = (def) => {
+    if (!def || typeof def !== 'object' || !Object.keys(def).length) {
+        return minimatch;
+    }
+    const orig = minimatch;
+    const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options));
+    return Object.assign(m, {
+        Minimatch: class Minimatch extends orig.Minimatch {
+            constructor(pattern, options = {}) {
+                super(pattern, ext(def, options));
+            }
+            static defaults(options) {
+                return orig.defaults(ext(def, options)).Minimatch;
+            }
+        },
+        AST: class AST extends orig.AST {
+            /* c8 ignore start */
+            constructor(type, parent, options = {}) {
+                super(type, parent, ext(def, options));
+            }
+            /* c8 ignore stop */
+            static fromGlob(pattern, options = {}) {
+                return orig.AST.fromGlob(pattern, ext(def, options));
+            }
+        },
+        unescape: (s, options = {}) => orig.unescape(s, ext(def, options)),
+        escape: (s, options = {}) => orig.escape(s, ext(def, options)),
+        filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)),
+        defaults: (options) => orig.defaults(ext(def, options)),
+        makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)),
+        braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)),
+        match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)),
+        sep: orig.sep,
+        GLOBSTAR: GLOBSTAR,
+    });
+};
+minimatch.defaults = defaults;
+// Brace expansion:
+// a{b,c}d -> abd acd
+// a{b,}c -> abc ac
+// a{0..3}d -> a0d a1d a2d a3d
+// a{b,c{d,e}f}g -> abg acdfg acefg
+// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
+//
+// Invalid sets are not expanded.
+// a{2..}b -> a{2..}b
+// a{b}c -> a{b}c
+export const braceExpand = (pattern, options = {}) => {
+    assertValidPattern(pattern);
+    // Thanks to Yeting Li  for
+    // improving this regexp to avoid a ReDOS vulnerability.
+    if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
+        // shortcut. no need to expand.
+        return [pattern];
+    }
+    return expand(pattern);
+};
+minimatch.braceExpand = braceExpand;
+// parse a component of the expanded set.
+// At this point, no pattern may contain "/" in it
+// so we're going to return a 2d array, where each entry is the full
+// pattern, split on '/', and then turned into a regular expression.
+// A regexp is made at the end which joins each array with an
+// escaped /, and another full one which joins each regexp with |.
+//
+// Following the lead of Bash 4.1, note that "**" only has special meaning
+// when it is the *only* thing in a path portion.  Otherwise, any series
+// of * is equivalent to a single *.  Globstar behavior is enabled by
+// default, and can be disabled by setting options.noglobstar.
+export const makeRe = (pattern, options = {}) => new Minimatch(pattern, options).makeRe();
+minimatch.makeRe = makeRe;
+export const match = (list, pattern, options = {}) => {
+    const mm = new Minimatch(pattern, options);
+    list = list.filter(f => mm.match(f));
+    if (mm.options.nonull && !list.length) {
+        list.push(pattern);
+    }
+    return list;
+};
+minimatch.match = match;
+// replace stuff like \* with *
+const globMagic = /[?*]|[+@!]\(.*?\)|\[|\]/;
+const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+export class Minimatch {
+    options;
+    set;
+    pattern;
+    windowsPathsNoEscape;
+    nonegate;
+    negate;
+    comment;
+    empty;
+    preserveMultipleSlashes;
+    partial;
+    globSet;
+    globParts;
+    nocase;
+    isWindows;
+    platform;
+    windowsNoMagicRoot;
+    regexp;
+    constructor(pattern, options = {}) {
+        assertValidPattern(pattern);
+        options = options || {};
+        this.options = options;
+        this.pattern = pattern;
+        this.platform = options.platform || defaultPlatform;
+        this.isWindows = this.platform === 'win32';
+        this.windowsPathsNoEscape =
+            !!options.windowsPathsNoEscape || options.allowWindowsEscape === false;
+        if (this.windowsPathsNoEscape) {
+            this.pattern = this.pattern.replace(/\\/g, '/');
+        }
+        this.preserveMultipleSlashes = !!options.preserveMultipleSlashes;
+        this.regexp = null;
+        this.negate = false;
+        this.nonegate = !!options.nonegate;
+        this.comment = false;
+        this.empty = false;
+        this.partial = !!options.partial;
+        this.nocase = !!this.options.nocase;
+        this.windowsNoMagicRoot =
+            options.windowsNoMagicRoot !== undefined
+                ? options.windowsNoMagicRoot
+                : !!(this.isWindows && this.nocase);
+        this.globSet = [];
+        this.globParts = [];
+        this.set = [];
+        // make the set of regexps etc.
+        this.make();
+    }
+    hasMagic() {
+        if (this.options.magicalBraces && this.set.length > 1) {
+            return true;
+        }
+        for (const pattern of this.set) {
+            for (const part of pattern) {
+                if (typeof part !== 'string')
+                    return true;
+            }
+        }
+        return false;
+    }
+    debug(..._) { }
+    make() {
+        const pattern = this.pattern;
+        const options = this.options;
+        // empty patterns and comments match nothing.
+        if (!options.nocomment && pattern.charAt(0) === '#') {
+            this.comment = true;
+            return;
+        }
+        if (!pattern) {
+            this.empty = true;
+            return;
+        }
+        // step 1: figure out negation, etc.
+        this.parseNegate();
+        // step 2: expand braces
+        this.globSet = [...new Set(this.braceExpand())];
+        if (options.debug) {
+            this.debug = (...args) => console.error(...args);
+        }
+        this.debug(this.pattern, this.globSet);
+        // step 3: now we have a set, so turn each one into a series of
+        // path-portion matching patterns.
+        // These will be regexps, except in the case of "**", which is
+        // set to the GLOBSTAR object for globstar behavior,
+        // and will not contain any / characters
+        //
+        // First, we preprocess to make the glob pattern sets a bit simpler
+        // and deduped.  There are some perf-killing patterns that can cause
+        // problems with a glob walk, but we can simplify them down a bit.
+        const rawGlobParts = this.globSet.map(s => this.slashSplit(s));
+        this.globParts = this.preprocess(rawGlobParts);
+        this.debug(this.pattern, this.globParts);
+        // glob --> regexps
+        let set = this.globParts.map((s, _, __) => {
+            if (this.isWindows && this.windowsNoMagicRoot) {
+                // check if it's a drive or unc path.
+                const isUNC = s[0] === '' &&
+                    s[1] === '' &&
+                    (s[2] === '?' || !globMagic.test(s[2])) &&
+                    !globMagic.test(s[3]);
+                const isDrive = /^[a-z]:/i.test(s[0]);
+                if (isUNC) {
+                    return [...s.slice(0, 4), ...s.slice(4).map(ss => this.parse(ss))];
+                }
+                else if (isDrive) {
+                    return [s[0], ...s.slice(1).map(ss => this.parse(ss))];
+                }
+            }
+            return s.map(ss => this.parse(ss));
+        });
+        this.debug(this.pattern, set);
+        // filter out everything that didn't compile properly.
+        this.set = set.filter(s => s.indexOf(false) === -1);
+        // do not treat the ? in UNC paths as magic
+        if (this.isWindows) {
+            for (let i = 0; i < this.set.length; i++) {
+                const p = this.set[i];
+                if (p[0] === '' &&
+                    p[1] === '' &&
+                    this.globParts[i][2] === '?' &&
+                    typeof p[3] === 'string' &&
+                    /^[a-z]:$/i.test(p[3])) {
+                    p[2] = '?';
+                }
+            }
+        }
+        this.debug(this.pattern, this.set);
+    }
+    // various transforms to equivalent pattern sets that are
+    // faster to process in a filesystem walk.  The goal is to
+    // eliminate what we can, and push all ** patterns as far
+    // to the right as possible, even if it increases the number
+    // of patterns that we have to process.
+    preprocess(globParts) {
+        // if we're not in globstar mode, then turn all ** into *
+        if (this.options.noglobstar) {
+            for (let i = 0; i < globParts.length; i++) {
+                for (let j = 0; j < globParts[i].length; j++) {
+                    if (globParts[i][j] === '**') {
+                        globParts[i][j] = '*';
+                    }
+                }
+            }
+        }
+        const { optimizationLevel = 1 } = this.options;
+        if (optimizationLevel >= 2) {
+            // aggressive optimization for the purpose of fs walking
+            globParts = this.firstPhasePreProcess(globParts);
+            globParts = this.secondPhasePreProcess(globParts);
+        }
+        else if (optimizationLevel >= 1) {
+            // just basic optimizations to remove some .. parts
+            globParts = this.levelOneOptimize(globParts);
+        }
+        else {
+            // just collapse multiple ** portions into one
+            globParts = this.adjascentGlobstarOptimize(globParts);
+        }
+        return globParts;
+    }
+    // just get rid of adjascent ** portions
+    adjascentGlobstarOptimize(globParts) {
+        return globParts.map(parts => {
+            let gs = -1;
+            while (-1 !== (gs = parts.indexOf('**', gs + 1))) {
+                let i = gs;
+                while (parts[i + 1] === '**') {
+                    i++;
+                }
+                if (i !== gs) {
+                    parts.splice(gs, i - gs);
+                }
+            }
+            return parts;
+        });
+    }
+    // get rid of adjascent ** and resolve .. portions
+    levelOneOptimize(globParts) {
+        return globParts.map(parts => {
+            parts = parts.reduce((set, part) => {
+                const prev = set[set.length - 1];
+                if (part === '**' && prev === '**') {
+                    return set;
+                }
+                if (part === '..') {
+                    if (prev && prev !== '..' && prev !== '.' && prev !== '**') {
+                        set.pop();
+                        return set;
+                    }
+                }
+                set.push(part);
+                return set;
+            }, []);
+            return parts.length === 0 ? [''] : parts;
+        });
+    }
+    levelTwoFileOptimize(parts) {
+        if (!Array.isArray(parts)) {
+            parts = this.slashSplit(parts);
+        }
+        let didSomething = false;
+        do {
+            didSomething = false;
+            // 
// -> 
/
+            if (!this.preserveMultipleSlashes) {
+                for (let i = 1; i < parts.length - 1; i++) {
+                    const p = parts[i];
+                    // don't squeeze out UNC patterns
+                    if (i === 1 && p === '' && parts[0] === '')
+                        continue;
+                    if (p === '.' || p === '') {
+                        didSomething = true;
+                        parts.splice(i, 1);
+                        i--;
+                    }
+                }
+                if (parts[0] === '.' &&
+                    parts.length === 2 &&
+                    (parts[1] === '.' || parts[1] === '')) {
+                    didSomething = true;
+                    parts.pop();
+                }
+            }
+            // 
/

/../ ->

/
+            let dd = 0;
+            while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                const p = parts[dd - 1];
+                if (p && p !== '.' && p !== '..' && p !== '**') {
+                    didSomething = true;
+                    parts.splice(dd - 1, 2);
+                    dd -= 2;
+                }
+            }
+        } while (didSomething);
+        return parts.length === 0 ? [''] : parts;
+    }
+    // First phase: single-pattern processing
+    // 
 is 1 or more portions
+    //  is 1 or more portions
+    // 

is any portion other than ., .., '', or ** + // is . or '' + // + // **/.. is *brutal* for filesystem walking performance, because + // it effectively resets the recursive walk each time it occurs, + // and ** cannot be reduced out by a .. pattern part like a regexp + // or most strings (other than .., ., and '') can be. + // + //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + //

// -> 
/
+    // 
/

/../ ->

/
+    // **/**/ -> **/
+    //
+    // **/*/ -> */**/ <== not valid because ** doesn't follow
+    // this WOULD be allowed if ** did follow symlinks, or * didn't
+    firstPhasePreProcess(globParts) {
+        let didSomething = false;
+        do {
+            didSomething = false;
+            // 
/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + for (let parts of globParts) { + let gs = -1; + while (-1 !== (gs = parts.indexOf('**', gs + 1))) { + let gss = gs; + while (parts[gss + 1] === '**') { + //

/**/**/ -> 
/**/
+                        gss++;
+                    }
+                    // eg, if gs is 2 and gss is 4, that means we have 3 **
+                    // parts, and can remove 2 of them.
+                    if (gss > gs) {
+                        parts.splice(gs + 1, gss - gs);
+                    }
+                    let next = parts[gs + 1];
+                    const p = parts[gs + 2];
+                    const p2 = parts[gs + 3];
+                    if (next !== '..')
+                        continue;
+                    if (!p ||
+                        p === '.' ||
+                        p === '..' ||
+                        !p2 ||
+                        p2 === '.' ||
+                        p2 === '..') {
+                        continue;
+                    }
+                    didSomething = true;
+                    // edit parts in place, and push the new one
+                    parts.splice(gs, 1);
+                    const other = parts.slice(0);
+                    other[gs] = '**';
+                    globParts.push(other);
+                    gs--;
+                }
+                // 
// -> 
/
+                if (!this.preserveMultipleSlashes) {
+                    for (let i = 1; i < parts.length - 1; i++) {
+                        const p = parts[i];
+                        // don't squeeze out UNC patterns
+                        if (i === 1 && p === '' && parts[0] === '')
+                            continue;
+                        if (p === '.' || p === '') {
+                            didSomething = true;
+                            parts.splice(i, 1);
+                            i--;
+                        }
+                    }
+                    if (parts[0] === '.' &&
+                        parts.length === 2 &&
+                        (parts[1] === '.' || parts[1] === '')) {
+                        didSomething = true;
+                        parts.pop();
+                    }
+                }
+                // 
/

/../ ->

/
+                let dd = 0;
+                while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                    const p = parts[dd - 1];
+                    if (p && p !== '.' && p !== '..' && p !== '**') {
+                        didSomething = true;
+                        const needDot = dd === 1 && parts[dd + 1] === '**';
+                        const splin = needDot ? ['.'] : [];
+                        parts.splice(dd - 1, 2, ...splin);
+                        if (parts.length === 0)
+                            parts.push('');
+                        dd -= 2;
+                    }
+                }
+            }
+        } while (didSomething);
+        return globParts;
+    }
+    // second phase: multi-pattern dedupes
+    // {
/*/,
/

/} ->

/*/
+    // {
/,
/} -> 
/
+    // {
/**/,
/} -> 
/**/
+    //
+    // {
/**/,
/**/

/} ->

/**/
+    // ^-- not valid because ** doens't follow symlinks
+    secondPhasePreProcess(globParts) {
+        for (let i = 0; i < globParts.length - 1; i++) {
+            for (let j = i + 1; j < globParts.length; j++) {
+                const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);
+                if (matched) {
+                    globParts[i] = [];
+                    globParts[j] = matched;
+                    break;
+                }
+            }
+        }
+        return globParts.filter(gs => gs.length);
+    }
+    partsMatch(a, b, emptyGSMatch = false) {
+        let ai = 0;
+        let bi = 0;
+        let result = [];
+        let which = '';
+        while (ai < a.length && bi < b.length) {
+            if (a[ai] === b[bi]) {
+                result.push(which === 'b' ? b[bi] : a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (emptyGSMatch && a[ai] === '**' && b[bi] === a[ai + 1]) {
+                result.push(a[ai]);
+                ai++;
+            }
+            else if (emptyGSMatch && b[bi] === '**' && a[ai] === b[bi + 1]) {
+                result.push(b[bi]);
+                bi++;
+            }
+            else if (a[ai] === '*' &&
+                b[bi] &&
+                (this.options.dot || !b[bi].startsWith('.')) &&
+                b[bi] !== '**') {
+                if (which === 'b')
+                    return false;
+                which = 'a';
+                result.push(a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (b[bi] === '*' &&
+                a[ai] &&
+                (this.options.dot || !a[ai].startsWith('.')) &&
+                a[ai] !== '**') {
+                if (which === 'a')
+                    return false;
+                which = 'b';
+                result.push(b[bi]);
+                ai++;
+                bi++;
+            }
+            else {
+                return false;
+            }
+        }
+        // if we fall out of the loop, it means they two are identical
+        // as long as their lengths match
+        return a.length === b.length && result;
+    }
+    parseNegate() {
+        if (this.nonegate)
+            return;
+        const pattern = this.pattern;
+        let negate = false;
+        let negateOffset = 0;
+        for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {
+            negate = !negate;
+            negateOffset++;
+        }
+        if (negateOffset)
+            this.pattern = pattern.slice(negateOffset);
+        this.negate = negate;
+    }
+    // set partial to true to test if, for example,
+    // "/a/b" matches the start of "/*/b/*/d"
+    // Partial means, if you run out of file before you run
+    // out of pattern, then that's fine, as long as all
+    // the parts match.
+    matchOne(file, pattern, partial = false) {
+        const options = this.options;
+        // UNC paths like //?/X:/... can match X:/... and vice versa
+        // Drive letters in absolute drive or unc paths are always compared
+        // case-insensitively.
+        if (this.isWindows) {
+            const fileDrive = typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0]);
+            const fileUNC = !fileDrive &&
+                file[0] === '' &&
+                file[1] === '' &&
+                file[2] === '?' &&
+                /^[a-z]:$/i.test(file[3]);
+            const patternDrive = typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0]);
+            const patternUNC = !patternDrive &&
+                pattern[0] === '' &&
+                pattern[1] === '' &&
+                pattern[2] === '?' &&
+                typeof pattern[3] === 'string' &&
+                /^[a-z]:$/i.test(pattern[3]);
+            const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined;
+            const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined;
+            if (typeof fdi === 'number' && typeof pdi === 'number') {
+                const [fd, pd] = [file[fdi], pattern[pdi]];
+                if (fd.toLowerCase() === pd.toLowerCase()) {
+                    pattern[pdi] = fd;
+                    if (pdi > fdi) {
+                        pattern = pattern.slice(pdi);
+                    }
+                    else if (fdi > pdi) {
+                        file = file.slice(fdi);
+                    }
+                }
+            }
+        }
+        // resolve and reduce . and .. portions in the file as well.
+        // dont' need to do the second phase, because it's only one string[]
+        const { optimizationLevel = 1 } = this.options;
+        if (optimizationLevel >= 2) {
+            file = this.levelTwoFileOptimize(file);
+        }
+        this.debug('matchOne', this, { file, pattern });
+        this.debug('matchOne', file.length, pattern.length);
+        for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
+            this.debug('matchOne loop');
+            var p = pattern[pi];
+            var f = file[fi];
+            this.debug(pattern, p, f);
+            // should be impossible.
+            // some invalid regexp stuff in the set.
+            /* c8 ignore start */
+            if (p === false) {
+                return false;
+            }
+            /* c8 ignore stop */
+            if (p === GLOBSTAR) {
+                this.debug('GLOBSTAR', [pattern, p, f]);
+                // "**"
+                // a/**/b/**/c would match the following:
+                // a/b/x/y/z/c
+                // a/x/y/z/b/c
+                // a/b/x/b/x/c
+                // a/b/c
+                // To do this, take the rest of the pattern after
+                // the **, and see if it would match the file remainder.
+                // If so, return success.
+                // If not, the ** "swallows" a segment, and try again.
+                // This is recursively awful.
+                //
+                // a/**/b/**/c matching a/b/x/y/z/c
+                // - a matches a
+                // - doublestar
+                //   - matchOne(b/x/y/z/c, b/**/c)
+                //     - b matches b
+                //     - doublestar
+                //       - matchOne(x/y/z/c, c) -> no
+                //       - matchOne(y/z/c, c) -> no
+                //       - matchOne(z/c, c) -> no
+                //       - matchOne(c, c) yes, hit
+                var fr = fi;
+                var pr = pi + 1;
+                if (pr === pl) {
+                    this.debug('** at the end');
+                    // a ** at the end will just swallow the rest.
+                    // We have found a match.
+                    // however, it will not swallow /.x, unless
+                    // options.dot is set.
+                    // . and .. are *never* matched by **, for explosively
+                    // exponential reasons.
+                    for (; fi < fl; fi++) {
+                        if (file[fi] === '.' ||
+                            file[fi] === '..' ||
+                            (!options.dot && file[fi].charAt(0) === '.'))
+                            return false;
+                    }
+                    return true;
+                }
+                // ok, let's see if we can swallow whatever we can.
+                while (fr < fl) {
+                    var swallowee = file[fr];
+                    this.debug('\nglobstar while', file, fr, pattern, pr, swallowee);
+                    // XXX remove this slice.  Just pass the start index.
+                    if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
+                        this.debug('globstar found match!', fr, fl, swallowee);
+                        // found a match.
+                        return true;
+                    }
+                    else {
+                        // can't swallow "." or ".." ever.
+                        // can only swallow ".foo" when explicitly asked.
+                        if (swallowee === '.' ||
+                            swallowee === '..' ||
+                            (!options.dot && swallowee.charAt(0) === '.')) {
+                            this.debug('dot detected!', file, fr, pattern, pr);
+                            break;
+                        }
+                        // ** swallows a segment, and continue.
+                        this.debug('globstar swallow a segment, and continue');
+                        fr++;
+                    }
+                }
+                // no match was found.
+                // However, in partial mode, we can't say this is necessarily over.
+                /* c8 ignore start */
+                if (partial) {
+                    // ran out of file
+                    this.debug('\n>>> no match, partial?', file, fr, pattern, pr);
+                    if (fr === fl) {
+                        return true;
+                    }
+                }
+                /* c8 ignore stop */
+                return false;
+            }
+            // something other than **
+            // non-magic patterns just have to match exactly
+            // patterns with magic have been turned into regexps.
+            let hit;
+            if (typeof p === 'string') {
+                hit = f === p;
+                this.debug('string match', p, f, hit);
+            }
+            else {
+                hit = p.test(f);
+                this.debug('pattern match', p, f, hit);
+            }
+            if (!hit)
+                return false;
+        }
+        // Note: ending in / means that we'll get a final ""
+        // at the end of the pattern.  This can only match a
+        // corresponding "" at the end of the file.
+        // If the file ends in /, then it can only match a
+        // a pattern that ends in /, unless the pattern just
+        // doesn't have any more for it. But, a/b/ should *not*
+        // match "a/b/*", even though "" matches against the
+        // [^/]*? pattern, except in partial mode, where it might
+        // simply not be reached yet.
+        // However, a/b/ should still satisfy a/*
+        // now either we fell off the end of the pattern, or we're done.
+        if (fi === fl && pi === pl) {
+            // ran out of pattern and filename at the same time.
+            // an exact hit!
+            return true;
+        }
+        else if (fi === fl) {
+            // ran out of file, but still had pattern left.
+            // this is ok if we're doing the match as part of
+            // a glob fs traversal.
+            return partial;
+        }
+        else if (pi === pl) {
+            // ran out of pattern, still have file left.
+            // this is only acceptable if we're on the very last
+            // empty segment of a file with a trailing slash.
+            // a/* should match a/b/
+            return fi === fl - 1 && file[fi] === '';
+            /* c8 ignore start */
+        }
+        else {
+            // should be unreachable.
+            throw new Error('wtf?');
+        }
+        /* c8 ignore stop */
+    }
+    braceExpand() {
+        return braceExpand(this.pattern, this.options);
+    }
+    parse(pattern) {
+        assertValidPattern(pattern);
+        const options = this.options;
+        // shortcuts
+        if (pattern === '**')
+            return GLOBSTAR;
+        if (pattern === '')
+            return '';
+        // far and away, the most common glob pattern parts are
+        // *, *.*, and *.  Add a fast check method for those.
+        let m;
+        let fastTest = null;
+        if ((m = pattern.match(starRE))) {
+            fastTest = options.dot ? starTestDot : starTest;
+        }
+        else if ((m = pattern.match(starDotExtRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? starDotExtTestNocaseDot
+                    : starDotExtTestNocase
+                : options.dot
+                    ? starDotExtTestDot
+                    : starDotExtTest)(m[1]);
+        }
+        else if ((m = pattern.match(qmarksRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? qmarksTestNocaseDot
+                    : qmarksTestNocase
+                : options.dot
+                    ? qmarksTestDot
+                    : qmarksTest)(m);
+        }
+        else if ((m = pattern.match(starDotStarRE))) {
+            fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
+        }
+        else if ((m = pattern.match(dotStarRE))) {
+            fastTest = dotStarTest;
+        }
+        const re = AST.fromGlob(pattern, this.options).toMMPattern();
+        if (fastTest && typeof re === 'object') {
+            // Avoids overriding in frozen environments
+            Reflect.defineProperty(re, 'test', { value: fastTest });
+        }
+        return re;
+    }
+    makeRe() {
+        if (this.regexp || this.regexp === false)
+            return this.regexp;
+        // at this point, this.set is a 2d array of partial
+        // pattern strings, or "**".
+        //
+        // It's better to use .match().  This function shouldn't
+        // be used, really, but it's pretty convenient sometimes,
+        // when you just want to work with a regex.
+        const set = this.set;
+        if (!set.length) {
+            this.regexp = false;
+            return this.regexp;
+        }
+        const options = this.options;
+        const twoStar = options.noglobstar
+            ? star
+            : options.dot
+                ? twoStarDot
+                : twoStarNoDot;
+        const flags = new Set(options.nocase ? ['i'] : []);
+        // regexpify non-globstar patterns
+        // if ** is only item, then we just do one twoStar
+        // if ** is first, and there are more, prepend (\/|twoStar\/)? to next
+        // if ** is last, append (\/twoStar|) to previous
+        // if ** is in the middle, append (\/|\/twoStar\/) to previous
+        // then filter out GLOBSTAR symbols
+        let re = set
+            .map(pattern => {
+            const pp = pattern.map(p => {
+                if (p instanceof RegExp) {
+                    for (const f of p.flags.split(''))
+                        flags.add(f);
+                }
+                return typeof p === 'string'
+                    ? regExpEscape(p)
+                    : p === GLOBSTAR
+                        ? GLOBSTAR
+                        : p._src;
+            });
+            pp.forEach((p, i) => {
+                const next = pp[i + 1];
+                const prev = pp[i - 1];
+                if (p !== GLOBSTAR || prev === GLOBSTAR) {
+                    return;
+                }
+                if (prev === undefined) {
+                    if (next !== undefined && next !== GLOBSTAR) {
+                        pp[i + 1] = '(?:\\/|' + twoStar + '\\/)?' + next;
+                    }
+                    else {
+                        pp[i] = twoStar;
+                    }
+                }
+                else if (next === undefined) {
+                    pp[i - 1] = prev + '(?:\\/|' + twoStar + ')?';
+                }
+                else if (next !== GLOBSTAR) {
+                    pp[i - 1] = prev + '(?:\\/|\\/' + twoStar + '\\/)' + next;
+                    pp[i + 1] = GLOBSTAR;
+                }
+            });
+            return pp.filter(p => p !== GLOBSTAR).join('/');
+        })
+            .join('|');
+        // need to wrap in parens if we had more than one thing with |,
+        // otherwise only the first will be anchored to ^ and the last to $
+        const [open, close] = set.length > 1 ? ['(?:', ')'] : ['', ''];
+        // must match entire pattern
+        // ending in a * or ** will make it less strict.
+        re = '^' + open + re + close + '$';
+        // can match anything, as long as it's not this.
+        if (this.negate)
+            re = '^(?!' + re + ').+$';
+        try {
+            this.regexp = new RegExp(re, [...flags].join(''));
+            /* c8 ignore start */
+        }
+        catch (ex) {
+            // should be impossible
+            this.regexp = false;
+        }
+        /* c8 ignore stop */
+        return this.regexp;
+    }
+    slashSplit(p) {
+        // if p starts with // on windows, we preserve that
+        // so that UNC paths aren't broken.  Otherwise, any number of
+        // / characters are coalesced into one, unless
+        // preserveMultipleSlashes is set to true.
+        if (this.preserveMultipleSlashes) {
+            return p.split('/');
+        }
+        else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
+            // add an extra '' for the one we lose
+            return ['', ...p.split(/\/+/)];
+        }
+        else {
+            return p.split(/\/+/);
+        }
+    }
+    match(f, partial = this.partial) {
+        this.debug('match', f, this.pattern);
+        // short-circuit in the case of busted things.
+        // comments, etc.
+        if (this.comment) {
+            return false;
+        }
+        if (this.empty) {
+            return f === '';
+        }
+        if (f === '/' && partial) {
+            return true;
+        }
+        const options = this.options;
+        // windows: need to use /, not \
+        if (this.isWindows) {
+            f = f.split('\\').join('/');
+        }
+        // treat the test path as a set of pathparts.
+        const ff = this.slashSplit(f);
+        this.debug(this.pattern, 'split', ff);
+        // just ONE of the pattern sets in this.set needs to match
+        // in order for it to be valid.  If negating, then just one
+        // match means that we have failed.
+        // Either way, return on the first hit.
+        const set = this.set;
+        this.debug(this.pattern, 'set', set);
+        // Find the basename of the path by looking for the last non-empty segment
+        let filename = ff[ff.length - 1];
+        if (!filename) {
+            for (let i = ff.length - 2; !filename && i >= 0; i--) {
+                filename = ff[i];
+            }
+        }
+        for (let i = 0; i < set.length; i++) {
+            const pattern = set[i];
+            let file = ff;
+            if (options.matchBase && pattern.length === 1) {
+                file = [filename];
+            }
+            const hit = this.matchOne(file, pattern, partial);
+            if (hit) {
+                if (options.flipNegate) {
+                    return true;
+                }
+                return !this.negate;
+            }
+        }
+        // didn't get any hits.  this is success if it's a negative
+        // pattern, failure otherwise.
+        if (options.flipNegate) {
+            return false;
+        }
+        return this.negate;
+    }
+    static defaults(def) {
+        return minimatch.defaults(def).Minimatch;
+    }
+}
+/* c8 ignore start */
+export { AST } from './ast.js';
+export { escape } from './escape.js';
+export { unescape } from './unescape.js';
+/* c8 ignore stop */
+minimatch.AST = AST;
+minimatch.Minimatch = Minimatch;
+minimatch.escape = escape;
+minimatch.unescape = unescape;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/esm/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/esm/package.json
new file mode 100644
index 0000000000000000000000000000000000000000..3dbc1ca591c0557e35b6004aeba250e6a70b56e3
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/esm/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/esm/unescape.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/esm/unescape.js
new file mode 100644
index 0000000000000000000000000000000000000000..0faf9a2b7306f7ad4ea2ce560d6806999cbba546
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minimatch/dist/esm/unescape.js
@@ -0,0 +1,20 @@
+/**
+ * Un-escape a string that has been escaped with {@link escape}.
+ *
+ * If the {@link windowsPathsNoEscape} option is used, then square-brace
+ * escapes are removed, but not backslash escapes.  For example, it will turn
+ * the string `'[*]'` into `*`, but it will not turn `'\\*'` into `'*'`,
+ * becuase `\` is a path separator in `windowsPathsNoEscape` mode.
+ *
+ * When `windowsPathsNoEscape` is not set, then both brace escapes and
+ * backslash escapes are removed.
+ *
+ * Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot be escaped
+ * or unescaped.
+ */
+export const unescape = (s, { windowsPathsNoEscape = false, } = {}) => {
+    return windowsPathsNoEscape
+        ? s.replace(/\[([^\/\\])\]/g, '$1')
+        : s.replace(/((?!\\).|^)\[([^\/\\])\]/g, '$1$2').replace(/\\([^\/])/g, '$1');
+};
+//# sourceMappingURL=unescape.js.map
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-fetch/lib/abort-error.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-fetch/lib/abort-error.js
new file mode 100644
index 0000000000000000000000000000000000000000..b18f643269e3751737e43ebc19dbb59e0cec6de4
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-fetch/lib/abort-error.js
@@ -0,0 +1,17 @@
+'use strict'
+class AbortError extends Error {
+  constructor (message) {
+    super(message)
+    this.code = 'FETCH_ABORTED'
+    this.type = 'aborted'
+    Error.captureStackTrace(this, this.constructor)
+  }
+
+  get name () {
+    return 'AbortError'
+  }
+
+  // don't allow name to be overridden, but don't throw either
+  set name (s) {}
+}
+module.exports = AbortError
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-fetch/lib/blob.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-fetch/lib/blob.js
new file mode 100644
index 0000000000000000000000000000000000000000..121b1730102e72ed1005b57d9a726de57782beb1
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-fetch/lib/blob.js
@@ -0,0 +1,97 @@
+'use strict'
+const { Minipass } = require('minipass')
+const TYPE = Symbol('type')
+const BUFFER = Symbol('buffer')
+
+class Blob {
+  constructor (blobParts, options) {
+    this[TYPE] = ''
+
+    const buffers = []
+    let size = 0
+
+    if (blobParts) {
+      const a = blobParts
+      const length = Number(a.length)
+      for (let i = 0; i < length; i++) {
+        const element = a[i]
+        const buffer = element instanceof Buffer ? element
+          : ArrayBuffer.isView(element)
+            ? Buffer.from(element.buffer, element.byteOffset, element.byteLength)
+            : element instanceof ArrayBuffer ? Buffer.from(element)
+            : element instanceof Blob ? element[BUFFER]
+            : typeof element === 'string' ? Buffer.from(element)
+            : Buffer.from(String(element))
+        size += buffer.length
+        buffers.push(buffer)
+      }
+    }
+
+    this[BUFFER] = Buffer.concat(buffers, size)
+
+    const type = options && options.type !== undefined
+      && String(options.type).toLowerCase()
+    if (type && !/[^\u0020-\u007E]/.test(type)) {
+      this[TYPE] = type
+    }
+  }
+
+  get size () {
+    return this[BUFFER].length
+  }
+
+  get type () {
+    return this[TYPE]
+  }
+
+  text () {
+    return Promise.resolve(this[BUFFER].toString())
+  }
+
+  arrayBuffer () {
+    const buf = this[BUFFER]
+    const off = buf.byteOffset
+    const len = buf.byteLength
+    const ab = buf.buffer.slice(off, off + len)
+    return Promise.resolve(ab)
+  }
+
+  stream () {
+    return new Minipass().end(this[BUFFER])
+  }
+
+  slice (start, end, type) {
+    const size = this.size
+    const relativeStart = start === undefined ? 0
+      : start < 0 ? Math.max(size + start, 0)
+      : Math.min(start, size)
+    const relativeEnd = end === undefined ? size
+      : end < 0 ? Math.max(size + end, 0)
+      : Math.min(end, size)
+    const span = Math.max(relativeEnd - relativeStart, 0)
+
+    const buffer = this[BUFFER]
+    const slicedBuffer = buffer.slice(
+      relativeStart,
+      relativeStart + span
+    )
+    const blob = new Blob([], { type })
+    blob[BUFFER] = slicedBuffer
+    return blob
+  }
+
+  get [Symbol.toStringTag] () {
+    return 'Blob'
+  }
+
+  static get BUFFER () {
+    return BUFFER
+  }
+}
+
+Object.defineProperties(Blob.prototype, {
+  size: { enumerable: true },
+  type: { enumerable: true },
+})
+
+module.exports = Blob
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-fetch/lib/body.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-fetch/lib/body.js
new file mode 100644
index 0000000000000000000000000000000000000000..62286bd1de0d9148c267652808b86fbb0d6b1799
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-fetch/lib/body.js
@@ -0,0 +1,350 @@
+'use strict'
+const { Minipass } = require('minipass')
+const MinipassSized = require('minipass-sized')
+
+const Blob = require('./blob.js')
+const { BUFFER } = Blob
+const FetchError = require('./fetch-error.js')
+
+// optional dependency on 'encoding'
+let convert
+try {
+  convert = require('encoding').convert
+} catch (e) {
+  // defer error until textConverted is called
+}
+
+const INTERNALS = Symbol('Body internals')
+const CONSUME_BODY = Symbol('consumeBody')
+
+class Body {
+  constructor (bodyArg, options = {}) {
+    const { size = 0, timeout = 0 } = options
+    const body = bodyArg === undefined || bodyArg === null ? null
+      : isURLSearchParams(bodyArg) ? Buffer.from(bodyArg.toString())
+      : isBlob(bodyArg) ? bodyArg
+      : Buffer.isBuffer(bodyArg) ? bodyArg
+      : Object.prototype.toString.call(bodyArg) === '[object ArrayBuffer]'
+        ? Buffer.from(bodyArg)
+        : ArrayBuffer.isView(bodyArg)
+          ? Buffer.from(bodyArg.buffer, bodyArg.byteOffset, bodyArg.byteLength)
+          : Minipass.isStream(bodyArg) ? bodyArg
+          : Buffer.from(String(bodyArg))
+
+    this[INTERNALS] = {
+      body,
+      disturbed: false,
+      error: null,
+    }
+
+    this.size = size
+    this.timeout = timeout
+
+    if (Minipass.isStream(body)) {
+      body.on('error', er => {
+        const error = er.name === 'AbortError' ? er
+          : new FetchError(`Invalid response while trying to fetch ${
+            this.url}: ${er.message}`, 'system', er)
+        this[INTERNALS].error = error
+      })
+    }
+  }
+
+  get body () {
+    return this[INTERNALS].body
+  }
+
+  get bodyUsed () {
+    return this[INTERNALS].disturbed
+  }
+
+  arrayBuffer () {
+    return this[CONSUME_BODY]().then(buf =>
+      buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength))
+  }
+
+  blob () {
+    const ct = this.headers && this.headers.get('content-type') || ''
+    return this[CONSUME_BODY]().then(buf => Object.assign(
+      new Blob([], { type: ct.toLowerCase() }),
+      { [BUFFER]: buf }
+    ))
+  }
+
+  async json () {
+    const buf = await this[CONSUME_BODY]()
+    try {
+      return JSON.parse(buf.toString())
+    } catch (er) {
+      throw new FetchError(
+        `invalid json response body at ${this.url} reason: ${er.message}`,
+        'invalid-json'
+      )
+    }
+  }
+
+  text () {
+    return this[CONSUME_BODY]().then(buf => buf.toString())
+  }
+
+  buffer () {
+    return this[CONSUME_BODY]()
+  }
+
+  textConverted () {
+    return this[CONSUME_BODY]().then(buf => convertBody(buf, this.headers))
+  }
+
+  [CONSUME_BODY] () {
+    if (this[INTERNALS].disturbed) {
+      return Promise.reject(new TypeError(`body used already for: ${
+        this.url}`))
+    }
+
+    this[INTERNALS].disturbed = true
+
+    if (this[INTERNALS].error) {
+      return Promise.reject(this[INTERNALS].error)
+    }
+
+    // body is null
+    if (this.body === null) {
+      return Promise.resolve(Buffer.alloc(0))
+    }
+
+    if (Buffer.isBuffer(this.body)) {
+      return Promise.resolve(this.body)
+    }
+
+    const upstream = isBlob(this.body) ? this.body.stream() : this.body
+
+    /* istanbul ignore if: should never happen */
+    if (!Minipass.isStream(upstream)) {
+      return Promise.resolve(Buffer.alloc(0))
+    }
+
+    const stream = this.size && upstream instanceof MinipassSized ? upstream
+      : !this.size && upstream instanceof Minipass &&
+        !(upstream instanceof MinipassSized) ? upstream
+      : this.size ? new MinipassSized({ size: this.size })
+      : new Minipass()
+
+    // allow timeout on slow response body, but only if the stream is still writable. this
+    // makes the timeout center on the socket stream from lib/index.js rather than the
+    // intermediary minipass stream we create to receive the data
+    const resTimeout = this.timeout && stream.writable ? setTimeout(() => {
+      stream.emit('error', new FetchError(
+        `Response timeout while trying to fetch ${
+          this.url} (over ${this.timeout}ms)`, 'body-timeout'))
+    }, this.timeout) : null
+
+    // do not keep the process open just for this timeout, even
+    // though we expect it'll get cleared eventually.
+    if (resTimeout && resTimeout.unref) {
+      resTimeout.unref()
+    }
+
+    // do the pipe in the promise, because the pipe() can send too much
+    // data through right away and upset the MP Sized object
+    return new Promise((resolve) => {
+      // if the stream is some other kind of stream, then pipe through a MP
+      // so we can collect it more easily.
+      if (stream !== upstream) {
+        upstream.on('error', er => stream.emit('error', er))
+        upstream.pipe(stream)
+      }
+      resolve()
+    }).then(() => stream.concat()).then(buf => {
+      clearTimeout(resTimeout)
+      return buf
+    }).catch(er => {
+      clearTimeout(resTimeout)
+      // request was aborted, reject with this Error
+      if (er.name === 'AbortError' || er.name === 'FetchError') {
+        throw er
+      } else if (er.name === 'RangeError') {
+        throw new FetchError(`Could not create Buffer from response body for ${
+          this.url}: ${er.message}`, 'system', er)
+      } else {
+        // other errors, such as incorrect content-encoding or content-length
+        throw new FetchError(`Invalid response body while trying to fetch ${
+          this.url}: ${er.message}`, 'system', er)
+      }
+    })
+  }
+
+  static clone (instance) {
+    if (instance.bodyUsed) {
+      throw new Error('cannot clone body after it is used')
+    }
+
+    const body = instance.body
+
+    // check that body is a stream and not form-data object
+    // NB: can't clone the form-data object without having it as a dependency
+    if (Minipass.isStream(body) && typeof body.getBoundary !== 'function') {
+      // create a dedicated tee stream so that we don't lose data
+      // potentially sitting in the body stream's buffer by writing it
+      // immediately to p1 and not having it for p2.
+      const tee = new Minipass()
+      const p1 = new Minipass()
+      const p2 = new Minipass()
+      tee.on('error', er => {
+        p1.emit('error', er)
+        p2.emit('error', er)
+      })
+      body.on('error', er => tee.emit('error', er))
+      tee.pipe(p1)
+      tee.pipe(p2)
+      body.pipe(tee)
+      // set instance body to one fork, return the other
+      instance[INTERNALS].body = p1
+      return p2
+    } else {
+      return instance.body
+    }
+  }
+
+  static extractContentType (body) {
+    return body === null || body === undefined ? null
+      : typeof body === 'string' ? 'text/plain;charset=UTF-8'
+      : isURLSearchParams(body)
+        ? 'application/x-www-form-urlencoded;charset=UTF-8'
+        : isBlob(body) ? body.type || null
+        : Buffer.isBuffer(body) ? null
+        : Object.prototype.toString.call(body) === '[object ArrayBuffer]' ? null
+        : ArrayBuffer.isView(body) ? null
+        : typeof body.getBoundary === 'function'
+          ? `multipart/form-data;boundary=${body.getBoundary()}`
+          : Minipass.isStream(body) ? null
+          : 'text/plain;charset=UTF-8'
+  }
+
+  static getTotalBytes (instance) {
+    const { body } = instance
+    return (body === null || body === undefined) ? 0
+      : isBlob(body) ? body.size
+      : Buffer.isBuffer(body) ? body.length
+      : body && typeof body.getLengthSync === 'function' && (
+        // detect form data input from form-data module
+        body._lengthRetrievers &&
+        /* istanbul ignore next */ body._lengthRetrievers.length === 0 || // 1.x
+        body.hasKnownLength && body.hasKnownLength()) // 2.x
+        ? body.getLengthSync()
+        : null
+  }
+
+  static writeToStream (dest, instance) {
+    const { body } = instance
+
+    if (body === null || body === undefined) {
+      dest.end()
+    } else if (Buffer.isBuffer(body) || typeof body === 'string') {
+      dest.end(body)
+    } else {
+      // body is stream or blob
+      const stream = isBlob(body) ? body.stream() : body
+      stream.on('error', er => dest.emit('error', er)).pipe(dest)
+    }
+
+    return dest
+  }
+}
+
+Object.defineProperties(Body.prototype, {
+  body: { enumerable: true },
+  bodyUsed: { enumerable: true },
+  arrayBuffer: { enumerable: true },
+  blob: { enumerable: true },
+  json: { enumerable: true },
+  text: { enumerable: true },
+})
+
+const isURLSearchParams = obj =>
+  // Duck-typing as a necessary condition.
+  (typeof obj !== 'object' ||
+    typeof obj.append !== 'function' ||
+    typeof obj.delete !== 'function' ||
+    typeof obj.get !== 'function' ||
+    typeof obj.getAll !== 'function' ||
+    typeof obj.has !== 'function' ||
+    typeof obj.set !== 'function') ? false
+  // Brand-checking and more duck-typing as optional condition.
+  : obj.constructor.name === 'URLSearchParams' ||
+    Object.prototype.toString.call(obj) === '[object URLSearchParams]' ||
+    typeof obj.sort === 'function'
+
+const isBlob = obj =>
+  typeof obj === 'object' &&
+  typeof obj.arrayBuffer === 'function' &&
+  typeof obj.type === 'string' &&
+  typeof obj.stream === 'function' &&
+  typeof obj.constructor === 'function' &&
+  typeof obj.constructor.name === 'string' &&
+  /^(Blob|File)$/.test(obj.constructor.name) &&
+  /^(Blob|File)$/.test(obj[Symbol.toStringTag])
+
+const convertBody = (buffer, headers) => {
+  /* istanbul ignore if */
+  if (typeof convert !== 'function') {
+    throw new Error('The package `encoding` must be installed to use the textConverted() function')
+  }
+
+  const ct = headers && headers.get('content-type')
+  let charset = 'utf-8'
+  let res
+
+  // header
+  if (ct) {
+    res = /charset=([^;]*)/i.exec(ct)
+  }
+
+  // no charset in content type, peek at response body for at most 1024 bytes
+  const str = buffer.slice(0, 1024).toString()
+
+  // html5
+  if (!res && str) {
+    res = / this.expect
+      ? 'max-size' : type
+    this.message = message
+    Error.captureStackTrace(this, this.constructor)
+  }
+
+  get name () {
+    return 'FetchError'
+  }
+
+  // don't allow name to be overwritten
+  set name (n) {}
+
+  get [Symbol.toStringTag] () {
+    return 'FetchError'
+  }
+}
+module.exports = FetchError
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-fetch/lib/headers.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-fetch/lib/headers.js
new file mode 100644
index 0000000000000000000000000000000000000000..dd6e854d5ba39900124e726eb5710640e77f898b
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-fetch/lib/headers.js
@@ -0,0 +1,267 @@
+'use strict'
+const invalidTokenRegex = /[^^_`a-zA-Z\-0-9!#$%&'*+.|~]/
+const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/
+
+const validateName = name => {
+  name = `${name}`
+  if (invalidTokenRegex.test(name) || name === '') {
+    throw new TypeError(`${name} is not a legal HTTP header name`)
+  }
+}
+
+const validateValue = value => {
+  value = `${value}`
+  if (invalidHeaderCharRegex.test(value)) {
+    throw new TypeError(`${value} is not a legal HTTP header value`)
+  }
+}
+
+const find = (map, name) => {
+  name = name.toLowerCase()
+  for (const key in map) {
+    if (key.toLowerCase() === name) {
+      return key
+    }
+  }
+  return undefined
+}
+
+const MAP = Symbol('map')
+class Headers {
+  constructor (init = undefined) {
+    this[MAP] = Object.create(null)
+    if (init instanceof Headers) {
+      const rawHeaders = init.raw()
+      const headerNames = Object.keys(rawHeaders)
+      for (const headerName of headerNames) {
+        for (const value of rawHeaders[headerName]) {
+          this.append(headerName, value)
+        }
+      }
+      return
+    }
+
+    // no-op
+    if (init === undefined || init === null) {
+      return
+    }
+
+    if (typeof init === 'object') {
+      const method = init[Symbol.iterator]
+      if (method !== null && method !== undefined) {
+        if (typeof method !== 'function') {
+          throw new TypeError('Header pairs must be iterable')
+        }
+
+        // sequence>
+        // Note: per spec we have to first exhaust the lists then process them
+        const pairs = []
+        for (const pair of init) {
+          if (typeof pair !== 'object' ||
+              typeof pair[Symbol.iterator] !== 'function') {
+            throw new TypeError('Each header pair must be iterable')
+          }
+          const arrPair = Array.from(pair)
+          if (arrPair.length !== 2) {
+            throw new TypeError('Each header pair must be a name/value tuple')
+          }
+          pairs.push(arrPair)
+        }
+
+        for (const pair of pairs) {
+          this.append(pair[0], pair[1])
+        }
+      } else {
+        // record
+        for (const key of Object.keys(init)) {
+          this.append(key, init[key])
+        }
+      }
+    } else {
+      throw new TypeError('Provided initializer must be an object')
+    }
+  }
+
+  get (name) {
+    name = `${name}`
+    validateName(name)
+    const key = find(this[MAP], name)
+    if (key === undefined) {
+      return null
+    }
+
+    return this[MAP][key].join(', ')
+  }
+
+  forEach (callback, thisArg = undefined) {
+    let pairs = getHeaders(this)
+    for (let i = 0; i < pairs.length; i++) {
+      const [name, value] = pairs[i]
+      callback.call(thisArg, value, name, this)
+      // refresh in case the callback added more headers
+      pairs = getHeaders(this)
+    }
+  }
+
+  set (name, value) {
+    name = `${name}`
+    value = `${value}`
+    validateName(name)
+    validateValue(value)
+    const key = find(this[MAP], name)
+    this[MAP][key !== undefined ? key : name] = [value]
+  }
+
+  append (name, value) {
+    name = `${name}`
+    value = `${value}`
+    validateName(name)
+    validateValue(value)
+    const key = find(this[MAP], name)
+    if (key !== undefined) {
+      this[MAP][key].push(value)
+    } else {
+      this[MAP][name] = [value]
+    }
+  }
+
+  has (name) {
+    name = `${name}`
+    validateName(name)
+    return find(this[MAP], name) !== undefined
+  }
+
+  delete (name) {
+    name = `${name}`
+    validateName(name)
+    const key = find(this[MAP], name)
+    if (key !== undefined) {
+      delete this[MAP][key]
+    }
+  }
+
+  raw () {
+    return this[MAP]
+  }
+
+  keys () {
+    return new HeadersIterator(this, 'key')
+  }
+
+  values () {
+    return new HeadersIterator(this, 'value')
+  }
+
+  [Symbol.iterator] () {
+    return new HeadersIterator(this, 'key+value')
+  }
+
+  entries () {
+    return new HeadersIterator(this, 'key+value')
+  }
+
+  get [Symbol.toStringTag] () {
+    return 'Headers'
+  }
+
+  static exportNodeCompatibleHeaders (headers) {
+    const obj = Object.assign(Object.create(null), headers[MAP])
+
+    // http.request() only supports string as Host header. This hack makes
+    // specifying custom Host header possible.
+    const hostHeaderKey = find(headers[MAP], 'Host')
+    if (hostHeaderKey !== undefined) {
+      obj[hostHeaderKey] = obj[hostHeaderKey][0]
+    }
+
+    return obj
+  }
+
+  static createHeadersLenient (obj) {
+    const headers = new Headers()
+    for (const name of Object.keys(obj)) {
+      if (invalidTokenRegex.test(name)) {
+        continue
+      }
+
+      if (Array.isArray(obj[name])) {
+        for (const val of obj[name]) {
+          if (invalidHeaderCharRegex.test(val)) {
+            continue
+          }
+
+          if (headers[MAP][name] === undefined) {
+            headers[MAP][name] = [val]
+          } else {
+            headers[MAP][name].push(val)
+          }
+        }
+      } else if (!invalidHeaderCharRegex.test(obj[name])) {
+        headers[MAP][name] = [obj[name]]
+      }
+    }
+    return headers
+  }
+}
+
+Object.defineProperties(Headers.prototype, {
+  get: { enumerable: true },
+  forEach: { enumerable: true },
+  set: { enumerable: true },
+  append: { enumerable: true },
+  has: { enumerable: true },
+  delete: { enumerable: true },
+  keys: { enumerable: true },
+  values: { enumerable: true },
+  entries: { enumerable: true },
+})
+
+const getHeaders = (headers, kind = 'key+value') =>
+  Object.keys(headers[MAP]).sort().map(
+    kind === 'key' ? k => k.toLowerCase()
+    : kind === 'value' ? k => headers[MAP][k].join(', ')
+    : k => [k.toLowerCase(), headers[MAP][k].join(', ')]
+  )
+
+const INTERNAL = Symbol('internal')
+
+class HeadersIterator {
+  constructor (target, kind) {
+    this[INTERNAL] = {
+      target,
+      kind,
+      index: 0,
+    }
+  }
+
+  get [Symbol.toStringTag] () {
+    return 'HeadersIterator'
+  }
+
+  next () {
+    /* istanbul ignore if: should be impossible */
+    if (!this || Object.getPrototypeOf(this) !== HeadersIterator.prototype) {
+      throw new TypeError('Value of `this` is not a HeadersIterator')
+    }
+
+    const { target, kind, index } = this[INTERNAL]
+    const values = getHeaders(target, kind)
+    const len = values.length
+    if (index >= len) {
+      return {
+        value: undefined,
+        done: true,
+      }
+    }
+
+    this[INTERNAL].index++
+
+    return { value: values[index], done: false }
+  }
+}
+
+// manually extend because 'extends' requires a ctor
+Object.setPrototypeOf(HeadersIterator.prototype,
+  Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())))
+
+module.exports = Headers
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-fetch/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-fetch/lib/index.js
new file mode 100644
index 0000000000000000000000000000000000000000..f0f4bb66dbb6735609ecb02b696f07ff9929f214
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-fetch/lib/index.js
@@ -0,0 +1,376 @@
+'use strict'
+const { URL } = require('url')
+const http = require('http')
+const https = require('https')
+const zlib = require('minizlib')
+const { Minipass } = require('minipass')
+
+const Body = require('./body.js')
+const { writeToStream, getTotalBytes } = Body
+const Response = require('./response.js')
+const Headers = require('./headers.js')
+const { createHeadersLenient } = Headers
+const Request = require('./request.js')
+const { getNodeRequestOptions } = Request
+const FetchError = require('./fetch-error.js')
+const AbortError = require('./abort-error.js')
+
+// XXX this should really be split up and unit-ized for easier testing
+// and better DRY implementation of data/http request aborting
+const fetch = async (url, opts) => {
+  if (/^data:/.test(url)) {
+    const request = new Request(url, opts)
+    // delay 1 promise tick so that the consumer can abort right away
+    return Promise.resolve().then(() => new Promise((resolve, reject) => {
+      let type, data
+      try {
+        const { pathname, search } = new URL(url)
+        const split = pathname.split(',')
+        if (split.length < 2) {
+          throw new Error('invalid data: URI')
+        }
+        const mime = split.shift()
+        const base64 = /;base64$/.test(mime)
+        type = base64 ? mime.slice(0, -1 * ';base64'.length) : mime
+        const rawData = decodeURIComponent(split.join(',') + search)
+        data = base64 ? Buffer.from(rawData, 'base64') : Buffer.from(rawData)
+      } catch (er) {
+        return reject(new FetchError(`[${request.method}] ${
+          request.url} invalid URL, ${er.message}`, 'system', er))
+      }
+
+      const { signal } = request
+      if (signal && signal.aborted) {
+        return reject(new AbortError('The user aborted a request.'))
+      }
+
+      const headers = { 'Content-Length': data.length }
+      if (type) {
+        headers['Content-Type'] = type
+      }
+      return resolve(new Response(data, { headers }))
+    }))
+  }
+
+  return new Promise((resolve, reject) => {
+    // build request object
+    const request = new Request(url, opts)
+    let options
+    try {
+      options = getNodeRequestOptions(request)
+    } catch (er) {
+      return reject(er)
+    }
+
+    const send = (options.protocol === 'https:' ? https : http).request
+    const { signal } = request
+    let response = null
+    const abort = () => {
+      const error = new AbortError('The user aborted a request.')
+      reject(error)
+      if (Minipass.isStream(request.body) &&
+          typeof request.body.destroy === 'function') {
+        request.body.destroy(error)
+      }
+      if (response && response.body) {
+        response.body.emit('error', error)
+      }
+    }
+
+    if (signal && signal.aborted) {
+      return abort()
+    }
+
+    const abortAndFinalize = () => {
+      abort()
+      finalize()
+    }
+
+    const finalize = () => {
+      req.abort()
+      if (signal) {
+        signal.removeEventListener('abort', abortAndFinalize)
+      }
+      clearTimeout(reqTimeout)
+    }
+
+    // send request
+    const req = send(options)
+
+    if (signal) {
+      signal.addEventListener('abort', abortAndFinalize)
+    }
+
+    let reqTimeout = null
+    if (request.timeout) {
+      req.once('socket', () => {
+        reqTimeout = setTimeout(() => {
+          reject(new FetchError(`network timeout at: ${
+            request.url}`, 'request-timeout'))
+          finalize()
+        }, request.timeout)
+      })
+    }
+
+    req.on('error', er => {
+      // if a 'response' event is emitted before the 'error' event, then by the
+      // time this handler is run it's too late to reject the Promise for the
+      // response. instead, we forward the error event to the response stream
+      // so that the error will surface to the user when they try to consume
+      // the body. this is done as a side effect of aborting the request except
+      // for in windows, where we must forward the event manually, otherwise
+      // there is no longer a ref'd socket attached to the request and the
+      // stream never ends so the event loop runs out of work and the process
+      // exits without warning.
+      // coverage skipped here due to the difficulty in testing
+      // istanbul ignore next
+      if (req.res) {
+        req.res.emit('error', er)
+      }
+      reject(new FetchError(`request to ${request.url} failed, reason: ${
+        er.message}`, 'system', er))
+      finalize()
+    })
+
+    req.on('response', res => {
+      clearTimeout(reqTimeout)
+
+      const headers = createHeadersLenient(res.headers)
+
+      // HTTP fetch step 5
+      if (fetch.isRedirect(res.statusCode)) {
+        // HTTP fetch step 5.2
+        const location = headers.get('Location')
+
+        // HTTP fetch step 5.3
+        let locationURL = null
+        try {
+          locationURL = location === null ? null : new URL(location, request.url).toString()
+        } catch {
+          // error here can only be invalid URL in Location: header
+          // do not throw when options.redirect == manual
+          // let the user extract the errorneous redirect URL
+          if (request.redirect !== 'manual') {
+            /* eslint-disable-next-line max-len */
+            reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect'))
+            finalize()
+            return
+          }
+        }
+
+        // HTTP fetch step 5.5
+        if (request.redirect === 'error') {
+          reject(new FetchError('uri requested responds with a redirect, ' +
+            `redirect mode is set to error: ${request.url}`, 'no-redirect'))
+          finalize()
+          return
+        } else if (request.redirect === 'manual') {
+          // node-fetch-specific step: make manual redirect a bit easier to
+          // use by setting the Location header value to the resolved URL.
+          if (locationURL !== null) {
+            // handle corrupted header
+            try {
+              headers.set('Location', locationURL)
+            } catch (err) {
+              /* istanbul ignore next: nodejs server prevent invalid
+                 response headers, we can't test this through normal
+                 request */
+              reject(err)
+            }
+          }
+        } else if (request.redirect === 'follow' && locationURL !== null) {
+          // HTTP-redirect fetch step 5
+          if (request.counter >= request.follow) {
+            reject(new FetchError(`maximum redirect reached at: ${
+              request.url}`, 'max-redirect'))
+            finalize()
+            return
+          }
+
+          // HTTP-redirect fetch step 9
+          if (res.statusCode !== 303 &&
+              request.body &&
+              getTotalBytes(request) === null) {
+            reject(new FetchError(
+              'Cannot follow redirect with body being a readable stream',
+              'unsupported-redirect'
+            ))
+            finalize()
+            return
+          }
+
+          // Update host due to redirection
+          request.headers.set('host', (new URL(locationURL)).host)
+
+          // HTTP-redirect fetch step 6 (counter increment)
+          // Create a new Request object.
+          const requestOpts = {
+            headers: new Headers(request.headers),
+            follow: request.follow,
+            counter: request.counter + 1,
+            agent: request.agent,
+            compress: request.compress,
+            method: request.method,
+            body: request.body,
+            signal: request.signal,
+            timeout: request.timeout,
+          }
+
+          // if the redirect is to a new hostname, strip the authorization and cookie headers
+          const parsedOriginal = new URL(request.url)
+          const parsedRedirect = new URL(locationURL)
+          if (parsedOriginal.hostname !== parsedRedirect.hostname) {
+            requestOpts.headers.delete('authorization')
+            requestOpts.headers.delete('cookie')
+          }
+
+          // HTTP-redirect fetch step 11
+          if (res.statusCode === 303 || (
+            (res.statusCode === 301 || res.statusCode === 302) &&
+              request.method === 'POST'
+          )) {
+            requestOpts.method = 'GET'
+            requestOpts.body = undefined
+            requestOpts.headers.delete('content-length')
+          }
+
+          // HTTP-redirect fetch step 15
+          resolve(fetch(new Request(locationURL, requestOpts)))
+          finalize()
+          return
+        }
+      } // end if(isRedirect)
+
+      // prepare response
+      res.once('end', () =>
+        signal && signal.removeEventListener('abort', abortAndFinalize))
+
+      const body = new Minipass()
+      // if an error occurs, either on the response stream itself, on one of the
+      // decoder streams, or a response length timeout from the Body class, we
+      // forward the error through to our internal body stream. If we see an
+      // error event on that, we call finalize to abort the request and ensure
+      // we don't leave a socket believing a request is in flight.
+      // this is difficult to test, so lacks specific coverage.
+      body.on('error', finalize)
+      // exceedingly rare that the stream would have an error,
+      // but just in case we proxy it to the stream in use.
+      res.on('error', /* istanbul ignore next */ er => body.emit('error', er))
+      res.on('data', (chunk) => body.write(chunk))
+      res.on('end', () => body.end())
+
+      const responseOptions = {
+        url: request.url,
+        status: res.statusCode,
+        statusText: res.statusMessage,
+        headers: headers,
+        size: request.size,
+        timeout: request.timeout,
+        counter: request.counter,
+        trailer: new Promise(resolveTrailer =>
+          res.on('end', () => resolveTrailer(createHeadersLenient(res.trailers)))),
+      }
+
+      // HTTP-network fetch step 12.1.1.3
+      const codings = headers.get('Content-Encoding')
+
+      // HTTP-network fetch step 12.1.1.4: handle content codings
+
+      // in following scenarios we ignore compression support
+      // 1. compression support is disabled
+      // 2. HEAD request
+      // 3. no Content-Encoding header
+      // 4. no content response (204)
+      // 5. content not modified response (304)
+      if (!request.compress ||
+          request.method === 'HEAD' ||
+          codings === null ||
+          res.statusCode === 204 ||
+          res.statusCode === 304) {
+        response = new Response(body, responseOptions)
+        resolve(response)
+        return
+      }
+
+      // Be less strict when decoding compressed responses, since sometimes
+      // servers send slightly invalid responses that are still accepted
+      // by common browsers.
+      // Always using Z_SYNC_FLUSH is what cURL does.
+      const zlibOptions = {
+        flush: zlib.constants.Z_SYNC_FLUSH,
+        finishFlush: zlib.constants.Z_SYNC_FLUSH,
+      }
+
+      // for gzip
+      if (codings === 'gzip' || codings === 'x-gzip') {
+        const unzip = new zlib.Gunzip(zlibOptions)
+        response = new Response(
+          // exceedingly rare that the stream would have an error,
+          // but just in case we proxy it to the stream in use.
+          body.on('error', /* istanbul ignore next */ er => unzip.emit('error', er)).pipe(unzip),
+          responseOptions
+        )
+        resolve(response)
+        return
+      }
+
+      // for deflate
+      if (codings === 'deflate' || codings === 'x-deflate') {
+        // handle the infamous raw deflate response from old servers
+        // a hack for old IIS and Apache servers
+        res.once('data', chunk => {
+          // see http://stackoverflow.com/questions/37519828
+          const decoder = (chunk[0] & 0x0F) === 0x08
+            ? new zlib.Inflate()
+            : new zlib.InflateRaw()
+          // exceedingly rare that the stream would have an error,
+          // but just in case we proxy it to the stream in use.
+          body.on('error', /* istanbul ignore next */ er => decoder.emit('error', er)).pipe(decoder)
+          response = new Response(decoder, responseOptions)
+          resolve(response)
+        })
+        return
+      }
+
+      // for br
+      if (codings === 'br') {
+        // ignoring coverage so tests don't have to fake support (or lack of) for brotli
+        // istanbul ignore next
+        try {
+          var decoder = new zlib.BrotliDecompress()
+        } catch (err) {
+          reject(err)
+          finalize()
+          return
+        }
+        // exceedingly rare that the stream would have an error,
+        // but just in case we proxy it to the stream in use.
+        body.on('error', /* istanbul ignore next */ er => decoder.emit('error', er)).pipe(decoder)
+        response = new Response(decoder, responseOptions)
+        resolve(response)
+        return
+      }
+
+      // otherwise, use response as-is
+      response = new Response(body, responseOptions)
+      resolve(response)
+    })
+
+    writeToStream(req, request)
+  })
+}
+
+module.exports = fetch
+
+fetch.isRedirect = code =>
+  code === 301 ||
+  code === 302 ||
+  code === 303 ||
+  code === 307 ||
+  code === 308
+
+fetch.Headers = Headers
+fetch.Request = Request
+fetch.Response = Response
+fetch.FetchError = FetchError
+fetch.AbortError = AbortError
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-fetch/lib/request.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-fetch/lib/request.js
new file mode 100644
index 0000000000000000000000000000000000000000..054439e66991072e4358c1f4cc19af70580778a6
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-fetch/lib/request.js
@@ -0,0 +1,282 @@
+'use strict'
+const { URL } = require('url')
+const { Minipass } = require('minipass')
+const Headers = require('./headers.js')
+const { exportNodeCompatibleHeaders } = Headers
+const Body = require('./body.js')
+const { clone, extractContentType, getTotalBytes } = Body
+
+const version = require('../package.json').version
+const defaultUserAgent =
+  `minipass-fetch/${version} (+https://github.com/isaacs/minipass-fetch)`
+
+const INTERNALS = Symbol('Request internals')
+
+const isRequest = input =>
+  typeof input === 'object' && typeof input[INTERNALS] === 'object'
+
+const isAbortSignal = signal => {
+  const proto = (
+    signal
+    && typeof signal === 'object'
+    && Object.getPrototypeOf(signal)
+  )
+  return !!(proto && proto.constructor.name === 'AbortSignal')
+}
+
+class Request extends Body {
+  constructor (input, init = {}) {
+    const parsedURL = isRequest(input) ? new URL(input.url)
+      : input && input.href ? new URL(input.href)
+      : new URL(`${input}`)
+
+    if (isRequest(input)) {
+      init = { ...input[INTERNALS], ...init }
+    } else if (!input || typeof input === 'string') {
+      input = {}
+    }
+
+    const method = (init.method || input.method || 'GET').toUpperCase()
+    const isGETHEAD = method === 'GET' || method === 'HEAD'
+
+    if ((init.body !== null && init.body !== undefined ||
+        isRequest(input) && input.body !== null) && isGETHEAD) {
+      throw new TypeError('Request with GET/HEAD method cannot have body')
+    }
+
+    const inputBody = init.body !== null && init.body !== undefined ? init.body
+      : isRequest(input) && input.body !== null ? clone(input)
+      : null
+
+    super(inputBody, {
+      timeout: init.timeout || input.timeout || 0,
+      size: init.size || input.size || 0,
+    })
+
+    const headers = new Headers(init.headers || input.headers || {})
+
+    if (inputBody !== null && inputBody !== undefined &&
+        !headers.has('Content-Type')) {
+      const contentType = extractContentType(inputBody)
+      if (contentType) {
+        headers.append('Content-Type', contentType)
+      }
+    }
+
+    const signal = 'signal' in init ? init.signal
+      : null
+
+    if (signal !== null && signal !== undefined && !isAbortSignal(signal)) {
+      throw new TypeError('Expected signal must be an instanceof AbortSignal')
+    }
+
+    // TLS specific options that are handled by node
+    const {
+      ca,
+      cert,
+      ciphers,
+      clientCertEngine,
+      crl,
+      dhparam,
+      ecdhCurve,
+      family,
+      honorCipherOrder,
+      key,
+      passphrase,
+      pfx,
+      rejectUnauthorized = process.env.NODE_TLS_REJECT_UNAUTHORIZED !== '0',
+      secureOptions,
+      secureProtocol,
+      servername,
+      sessionIdContext,
+    } = init
+
+    this[INTERNALS] = {
+      method,
+      redirect: init.redirect || input.redirect || 'follow',
+      headers,
+      parsedURL,
+      signal,
+      ca,
+      cert,
+      ciphers,
+      clientCertEngine,
+      crl,
+      dhparam,
+      ecdhCurve,
+      family,
+      honorCipherOrder,
+      key,
+      passphrase,
+      pfx,
+      rejectUnauthorized,
+      secureOptions,
+      secureProtocol,
+      servername,
+      sessionIdContext,
+    }
+
+    // node-fetch-only options
+    this.follow = init.follow !== undefined ? init.follow
+      : input.follow !== undefined ? input.follow
+      : 20
+    this.compress = init.compress !== undefined ? init.compress
+      : input.compress !== undefined ? input.compress
+      : true
+    this.counter = init.counter || input.counter || 0
+    this.agent = init.agent || input.agent
+  }
+
+  get method () {
+    return this[INTERNALS].method
+  }
+
+  get url () {
+    return this[INTERNALS].parsedURL.toString()
+  }
+
+  get headers () {
+    return this[INTERNALS].headers
+  }
+
+  get redirect () {
+    return this[INTERNALS].redirect
+  }
+
+  get signal () {
+    return this[INTERNALS].signal
+  }
+
+  clone () {
+    return new Request(this)
+  }
+
+  get [Symbol.toStringTag] () {
+    return 'Request'
+  }
+
+  static getNodeRequestOptions (request) {
+    const parsedURL = request[INTERNALS].parsedURL
+    const headers = new Headers(request[INTERNALS].headers)
+
+    // fetch step 1.3
+    if (!headers.has('Accept')) {
+      headers.set('Accept', '*/*')
+    }
+
+    // Basic fetch
+    if (!/^https?:$/.test(parsedURL.protocol)) {
+      throw new TypeError('Only HTTP(S) protocols are supported')
+    }
+
+    if (request.signal &&
+        Minipass.isStream(request.body) &&
+        typeof request.body.destroy !== 'function') {
+      throw new Error(
+        'Cancellation of streamed requests with AbortSignal is not supported')
+    }
+
+    // HTTP-network-or-cache fetch steps 2.4-2.7
+    const contentLengthValue =
+      (request.body === null || request.body === undefined) &&
+        /^(POST|PUT)$/i.test(request.method) ? '0'
+      : request.body !== null && request.body !== undefined
+        ? getTotalBytes(request)
+        : null
+
+    if (contentLengthValue) {
+      headers.set('Content-Length', contentLengthValue + '')
+    }
+
+    // HTTP-network-or-cache fetch step 2.11
+    if (!headers.has('User-Agent')) {
+      headers.set('User-Agent', defaultUserAgent)
+    }
+
+    // HTTP-network-or-cache fetch step 2.15
+    if (request.compress && !headers.has('Accept-Encoding')) {
+      headers.set('Accept-Encoding', 'gzip,deflate')
+    }
+
+    const agent = typeof request.agent === 'function'
+      ? request.agent(parsedURL)
+      : request.agent
+
+    if (!headers.has('Connection') && !agent) {
+      headers.set('Connection', 'close')
+    }
+
+    // TLS specific options that are handled by node
+    const {
+      ca,
+      cert,
+      ciphers,
+      clientCertEngine,
+      crl,
+      dhparam,
+      ecdhCurve,
+      family,
+      honorCipherOrder,
+      key,
+      passphrase,
+      pfx,
+      rejectUnauthorized,
+      secureOptions,
+      secureProtocol,
+      servername,
+      sessionIdContext,
+    } = request[INTERNALS]
+
+    // HTTP-network fetch step 4.2
+    // chunked encoding is handled by Node.js
+
+    // we cannot spread parsedURL directly, so we have to read each property one-by-one
+    // and map them to the equivalent https?.request() method options
+    const urlProps = {
+      auth: parsedURL.username || parsedURL.password
+        ? `${parsedURL.username}:${parsedURL.password}`
+        : '',
+      host: parsedURL.host,
+      hostname: parsedURL.hostname,
+      path: `${parsedURL.pathname}${parsedURL.search}`,
+      port: parsedURL.port,
+      protocol: parsedURL.protocol,
+    }
+
+    return {
+      ...urlProps,
+      method: request.method,
+      headers: exportNodeCompatibleHeaders(headers),
+      agent,
+      ca,
+      cert,
+      ciphers,
+      clientCertEngine,
+      crl,
+      dhparam,
+      ecdhCurve,
+      family,
+      honorCipherOrder,
+      key,
+      passphrase,
+      pfx,
+      rejectUnauthorized,
+      secureOptions,
+      secureProtocol,
+      servername,
+      sessionIdContext,
+      timeout: request.timeout,
+    }
+  }
+}
+
+module.exports = Request
+
+Object.defineProperties(Request.prototype, {
+  method: { enumerable: true },
+  url: { enumerable: true },
+  headers: { enumerable: true },
+  redirect: { enumerable: true },
+  clone: { enumerable: true },
+  signal: { enumerable: true },
+})
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-fetch/lib/response.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-fetch/lib/response.js
new file mode 100644
index 0000000000000000000000000000000000000000..54cb52db3594a7b9bb1e291fd1ccbc632a3a06a4
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-fetch/lib/response.js
@@ -0,0 +1,90 @@
+'use strict'
+const http = require('http')
+const { STATUS_CODES } = http
+
+const Headers = require('./headers.js')
+const Body = require('./body.js')
+const { clone, extractContentType } = Body
+
+const INTERNALS = Symbol('Response internals')
+
+class Response extends Body {
+  constructor (body = null, opts = {}) {
+    super(body, opts)
+
+    const status = opts.status || 200
+    const headers = new Headers(opts.headers)
+
+    if (body !== null && body !== undefined && !headers.has('Content-Type')) {
+      const contentType = extractContentType(body)
+      if (contentType) {
+        headers.append('Content-Type', contentType)
+      }
+    }
+
+    this[INTERNALS] = {
+      url: opts.url,
+      status,
+      statusText: opts.statusText || STATUS_CODES[status],
+      headers,
+      counter: opts.counter,
+      trailer: Promise.resolve(opts.trailer || new Headers()),
+    }
+  }
+
+  get trailer () {
+    return this[INTERNALS].trailer
+  }
+
+  get url () {
+    return this[INTERNALS].url || ''
+  }
+
+  get status () {
+    return this[INTERNALS].status
+  }
+
+  get ok () {
+    return this[INTERNALS].status >= 200 && this[INTERNALS].status < 300
+  }
+
+  get redirected () {
+    return this[INTERNALS].counter > 0
+  }
+
+  get statusText () {
+    return this[INTERNALS].statusText
+  }
+
+  get headers () {
+    return this[INTERNALS].headers
+  }
+
+  clone () {
+    return new Response(clone(this), {
+      url: this.url,
+      status: this.status,
+      statusText: this.statusText,
+      headers: this.headers,
+      ok: this.ok,
+      redirected: this.redirected,
+      trailer: this.trailer,
+    })
+  }
+
+  get [Symbol.toStringTag] () {
+    return 'Response'
+  }
+}
+
+module.exports = Response
+
+Object.defineProperties(Response.prototype, {
+  url: { enumerable: true },
+  status: { enumerable: true },
+  ok: { enumerable: true },
+  redirected: { enumerable: true },
+  statusText: { enumerable: true },
+  headers: { enumerable: true },
+  clone: { enumerable: true },
+})
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-flush/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-flush/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..19129e315fe593965a2fdd50ec0d1253bcbd2ece
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-flush/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-flush/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-flush/index.js
new file mode 100644
index 0000000000000000000000000000000000000000..cb2537fa6b2b57b790cf6e8704a90ea52351e638
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-flush/index.js
@@ -0,0 +1,39 @@
+const Minipass = require('minipass')
+const _flush = Symbol('_flush')
+const _flushed = Symbol('_flushed')
+const _flushing = Symbol('_flushing')
+class Flush extends Minipass {
+  constructor (opt = {}) {
+    if (typeof opt === 'function')
+      opt = { flush: opt }
+
+    super(opt)
+
+    // or extend this class and provide a 'flush' method in your subclass
+    if (typeof opt.flush !== 'function' && typeof this.flush !== 'function')
+      throw new TypeError('must provide flush function in options')
+
+    this[_flush] = opt.flush || this.flush
+  }
+
+  emit (ev, ...data) {
+    if ((ev !== 'end' && ev !== 'finish') || this[_flushed])
+      return super.emit(ev, ...data)
+
+    if (this[_flushing])
+      return
+
+    this[_flushing] = true
+
+    const afterFlush = er => {
+      this[_flushed] = true
+      er ? super.emit('error', er) : super.emit('end')
+    }
+
+    const ret = this[_flush](afterFlush)
+    if (ret && ret.then)
+      ret.then(() => afterFlush(), er => afterFlush(er))
+  }
+}
+
+module.exports = Flush
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-flush/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-flush/package.json
new file mode 100644
index 0000000000000000000000000000000000000000..09127d0ec2015cd969307c23ea7c8a1d7414a21c
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-flush/package.json
@@ -0,0 +1,39 @@
+{
+  "name": "minipass-flush",
+  "version": "1.0.5",
+  "description": "A Minipass stream that calls a flush function before emitting 'end'",
+  "author": "Isaac Z. Schlueter  (https://izs.me)",
+  "license": "ISC",
+  "scripts": {
+    "test": "tap",
+    "snap": "tap",
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "postpublish": "git push origin --follow-tags"
+  },
+  "tap": {
+    "check-coverage": true
+  },
+  "devDependencies": {
+    "tap": "^14.6.9"
+  },
+  "dependencies": {
+    "minipass": "^3.0.0"
+  },
+  "files": [
+    "index.js"
+  ],
+  "main": "index.js",
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/isaacs/minipass-flush.git"
+  },
+  "keywords": [
+    "minipass",
+    "flush",
+    "stream"
+  ],
+  "engines": {
+    "node": ">= 8"
+  }
+}
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-pipeline/node_modules/minipass/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-pipeline/node_modules/minipass/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..bf1dece2e1f122d1aa1f851dc9e57381665051ba
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-pipeline/node_modules/minipass/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) 2017-2022 npm, Inc., Isaac Z. Schlueter, and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-pipeline/node_modules/minipass/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-pipeline/node_modules/minipass/index.js
new file mode 100644
index 0000000000000000000000000000000000000000..e8797aab6cc27695ad6edcbb4eb31356b6f35c24
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-pipeline/node_modules/minipass/index.js
@@ -0,0 +1,649 @@
+'use strict'
+const proc = typeof process === 'object' && process ? process : {
+  stdout: null,
+  stderr: null,
+}
+const EE = require('events')
+const Stream = require('stream')
+const SD = require('string_decoder').StringDecoder
+
+const EOF = Symbol('EOF')
+const MAYBE_EMIT_END = Symbol('maybeEmitEnd')
+const EMITTED_END = Symbol('emittedEnd')
+const EMITTING_END = Symbol('emittingEnd')
+const EMITTED_ERROR = Symbol('emittedError')
+const CLOSED = Symbol('closed')
+const READ = Symbol('read')
+const FLUSH = Symbol('flush')
+const FLUSHCHUNK = Symbol('flushChunk')
+const ENCODING = Symbol('encoding')
+const DECODER = Symbol('decoder')
+const FLOWING = Symbol('flowing')
+const PAUSED = Symbol('paused')
+const RESUME = Symbol('resume')
+const BUFFERLENGTH = Symbol('bufferLength')
+const BUFFERPUSH = Symbol('bufferPush')
+const BUFFERSHIFT = Symbol('bufferShift')
+const OBJECTMODE = Symbol('objectMode')
+const DESTROYED = Symbol('destroyed')
+const EMITDATA = Symbol('emitData')
+const EMITEND = Symbol('emitEnd')
+const EMITEND2 = Symbol('emitEnd2')
+const ASYNC = Symbol('async')
+
+const defer = fn => Promise.resolve().then(fn)
+
+// TODO remove when Node v8 support drops
+const doIter = global._MP_NO_ITERATOR_SYMBOLS_  !== '1'
+const ASYNCITERATOR = doIter && Symbol.asyncIterator
+  || Symbol('asyncIterator not implemented')
+const ITERATOR = doIter && Symbol.iterator
+  || Symbol('iterator not implemented')
+
+// events that mean 'the stream is over'
+// these are treated specially, and re-emitted
+// if they are listened for after emitting.
+const isEndish = ev =>
+  ev === 'end' ||
+  ev === 'finish' ||
+  ev === 'prefinish'
+
+const isArrayBuffer = b => b instanceof ArrayBuffer ||
+  typeof b === 'object' &&
+  b.constructor &&
+  b.constructor.name === 'ArrayBuffer' &&
+  b.byteLength >= 0
+
+const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b)
+
+class Pipe {
+  constructor (src, dest, opts) {
+    this.src = src
+    this.dest = dest
+    this.opts = opts
+    this.ondrain = () => src[RESUME]()
+    dest.on('drain', this.ondrain)
+  }
+  unpipe () {
+    this.dest.removeListener('drain', this.ondrain)
+  }
+  // istanbul ignore next - only here for the prototype
+  proxyErrors () {}
+  end () {
+    this.unpipe()
+    if (this.opts.end)
+      this.dest.end()
+  }
+}
+
+class PipeProxyErrors extends Pipe {
+  unpipe () {
+    this.src.removeListener('error', this.proxyErrors)
+    super.unpipe()
+  }
+  constructor (src, dest, opts) {
+    super(src, dest, opts)
+    this.proxyErrors = er => dest.emit('error', er)
+    src.on('error', this.proxyErrors)
+  }
+}
+
+module.exports = class Minipass extends Stream {
+  constructor (options) {
+    super()
+    this[FLOWING] = false
+    // whether we're explicitly paused
+    this[PAUSED] = false
+    this.pipes = []
+    this.buffer = []
+    this[OBJECTMODE] = options && options.objectMode || false
+    if (this[OBJECTMODE])
+      this[ENCODING] = null
+    else
+      this[ENCODING] = options && options.encoding || null
+    if (this[ENCODING] === 'buffer')
+      this[ENCODING] = null
+    this[ASYNC] = options && !!options.async || false
+    this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null
+    this[EOF] = false
+    this[EMITTED_END] = false
+    this[EMITTING_END] = false
+    this[CLOSED] = false
+    this[EMITTED_ERROR] = null
+    this.writable = true
+    this.readable = true
+    this[BUFFERLENGTH] = 0
+    this[DESTROYED] = false
+  }
+
+  get bufferLength () { return this[BUFFERLENGTH] }
+
+  get encoding () { return this[ENCODING] }
+  set encoding (enc) {
+    if (this[OBJECTMODE])
+      throw new Error('cannot set encoding in objectMode')
+
+    if (this[ENCODING] && enc !== this[ENCODING] &&
+        (this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH]))
+      throw new Error('cannot change encoding')
+
+    if (this[ENCODING] !== enc) {
+      this[DECODER] = enc ? new SD(enc) : null
+      if (this.buffer.length)
+        this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk))
+    }
+
+    this[ENCODING] = enc
+  }
+
+  setEncoding (enc) {
+    this.encoding = enc
+  }
+
+  get objectMode () { return this[OBJECTMODE] }
+  set objectMode (om) { this[OBJECTMODE] = this[OBJECTMODE] || !!om }
+
+  get ['async'] () { return this[ASYNC] }
+  set ['async'] (a) { this[ASYNC] = this[ASYNC] || !!a }
+
+  write (chunk, encoding, cb) {
+    if (this[EOF])
+      throw new Error('write after end')
+
+    if (this[DESTROYED]) {
+      this.emit('error', Object.assign(
+        new Error('Cannot call write after a stream was destroyed'),
+        { code: 'ERR_STREAM_DESTROYED' }
+      ))
+      return true
+    }
+
+    if (typeof encoding === 'function')
+      cb = encoding, encoding = 'utf8'
+
+    if (!encoding)
+      encoding = 'utf8'
+
+    const fn = this[ASYNC] ? defer : f => f()
+
+    // convert array buffers and typed array views into buffers
+    // at some point in the future, we may want to do the opposite!
+    // leave strings and buffers as-is
+    // anything else switches us into object mode
+    if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
+      if (isArrayBufferView(chunk))
+        chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
+      else if (isArrayBuffer(chunk))
+        chunk = Buffer.from(chunk)
+      else if (typeof chunk !== 'string')
+        // use the setter so we throw if we have encoding set
+        this.objectMode = true
+    }
+
+    // handle object mode up front, since it's simpler
+    // this yields better performance, fewer checks later.
+    if (this[OBJECTMODE]) {
+      /* istanbul ignore if - maybe impossible? */
+      if (this.flowing && this[BUFFERLENGTH] !== 0)
+        this[FLUSH](true)
+
+      if (this.flowing)
+        this.emit('data', chunk)
+      else
+        this[BUFFERPUSH](chunk)
+
+      if (this[BUFFERLENGTH] !== 0)
+        this.emit('readable')
+
+      if (cb)
+        fn(cb)
+
+      return this.flowing
+    }
+
+    // at this point the chunk is a buffer or string
+    // don't buffer it up or send it to the decoder
+    if (!chunk.length) {
+      if (this[BUFFERLENGTH] !== 0)
+        this.emit('readable')
+      if (cb)
+        fn(cb)
+      return this.flowing
+    }
+
+    // fast-path writing strings of same encoding to a stream with
+    // an empty buffer, skipping the buffer/decoder dance
+    if (typeof chunk === 'string' &&
+        // unless it is a string already ready for us to use
+        !(encoding === this[ENCODING] && !this[DECODER].lastNeed)) {
+      chunk = Buffer.from(chunk, encoding)
+    }
+
+    if (Buffer.isBuffer(chunk) && this[ENCODING])
+      chunk = this[DECODER].write(chunk)
+
+    // Note: flushing CAN potentially switch us into not-flowing mode
+    if (this.flowing && this[BUFFERLENGTH] !== 0)
+      this[FLUSH](true)
+
+    if (this.flowing)
+      this.emit('data', chunk)
+    else
+      this[BUFFERPUSH](chunk)
+
+    if (this[BUFFERLENGTH] !== 0)
+      this.emit('readable')
+
+    if (cb)
+      fn(cb)
+
+    return this.flowing
+  }
+
+  read (n) {
+    if (this[DESTROYED])
+      return null
+
+    if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) {
+      this[MAYBE_EMIT_END]()
+      return null
+    }
+
+    if (this[OBJECTMODE])
+      n = null
+
+    if (this.buffer.length > 1 && !this[OBJECTMODE]) {
+      if (this.encoding)
+        this.buffer = [this.buffer.join('')]
+      else
+        this.buffer = [Buffer.concat(this.buffer, this[BUFFERLENGTH])]
+    }
+
+    const ret = this[READ](n || null, this.buffer[0])
+    this[MAYBE_EMIT_END]()
+    return ret
+  }
+
+  [READ] (n, chunk) {
+    if (n === chunk.length || n === null)
+      this[BUFFERSHIFT]()
+    else {
+      this.buffer[0] = chunk.slice(n)
+      chunk = chunk.slice(0, n)
+      this[BUFFERLENGTH] -= n
+    }
+
+    this.emit('data', chunk)
+
+    if (!this.buffer.length && !this[EOF])
+      this.emit('drain')
+
+    return chunk
+  }
+
+  end (chunk, encoding, cb) {
+    if (typeof chunk === 'function')
+      cb = chunk, chunk = null
+    if (typeof encoding === 'function')
+      cb = encoding, encoding = 'utf8'
+    if (chunk)
+      this.write(chunk, encoding)
+    if (cb)
+      this.once('end', cb)
+    this[EOF] = true
+    this.writable = false
+
+    // if we haven't written anything, then go ahead and emit,
+    // even if we're not reading.
+    // we'll re-emit if a new 'end' listener is added anyway.
+    // This makes MP more suitable to write-only use cases.
+    if (this.flowing || !this[PAUSED])
+      this[MAYBE_EMIT_END]()
+    return this
+  }
+
+  // don't let the internal resume be overwritten
+  [RESUME] () {
+    if (this[DESTROYED])
+      return
+
+    this[PAUSED] = false
+    this[FLOWING] = true
+    this.emit('resume')
+    if (this.buffer.length)
+      this[FLUSH]()
+    else if (this[EOF])
+      this[MAYBE_EMIT_END]()
+    else
+      this.emit('drain')
+  }
+
+  resume () {
+    return this[RESUME]()
+  }
+
+  pause () {
+    this[FLOWING] = false
+    this[PAUSED] = true
+  }
+
+  get destroyed () {
+    return this[DESTROYED]
+  }
+
+  get flowing () {
+    return this[FLOWING]
+  }
+
+  get paused () {
+    return this[PAUSED]
+  }
+
+  [BUFFERPUSH] (chunk) {
+    if (this[OBJECTMODE])
+      this[BUFFERLENGTH] += 1
+    else
+      this[BUFFERLENGTH] += chunk.length
+    this.buffer.push(chunk)
+  }
+
+  [BUFFERSHIFT] () {
+    if (this.buffer.length) {
+      if (this[OBJECTMODE])
+        this[BUFFERLENGTH] -= 1
+      else
+        this[BUFFERLENGTH] -= this.buffer[0].length
+    }
+    return this.buffer.shift()
+  }
+
+  [FLUSH] (noDrain) {
+    do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()))
+
+    if (!noDrain && !this.buffer.length && !this[EOF])
+      this.emit('drain')
+  }
+
+  [FLUSHCHUNK] (chunk) {
+    return chunk ? (this.emit('data', chunk), this.flowing) : false
+  }
+
+  pipe (dest, opts) {
+    if (this[DESTROYED])
+      return
+
+    const ended = this[EMITTED_END]
+    opts = opts || {}
+    if (dest === proc.stdout || dest === proc.stderr)
+      opts.end = false
+    else
+      opts.end = opts.end !== false
+    opts.proxyErrors = !!opts.proxyErrors
+
+    // piping an ended stream ends immediately
+    if (ended) {
+      if (opts.end)
+        dest.end()
+    } else {
+      this.pipes.push(!opts.proxyErrors ? new Pipe(this, dest, opts)
+        : new PipeProxyErrors(this, dest, opts))
+      if (this[ASYNC])
+        defer(() => this[RESUME]())
+      else
+        this[RESUME]()
+    }
+
+    return dest
+  }
+
+  unpipe (dest) {
+    const p = this.pipes.find(p => p.dest === dest)
+    if (p) {
+      this.pipes.splice(this.pipes.indexOf(p), 1)
+      p.unpipe()
+    }
+  }
+
+  addListener (ev, fn) {
+    return this.on(ev, fn)
+  }
+
+  on (ev, fn) {
+    const ret = super.on(ev, fn)
+    if (ev === 'data' && !this.pipes.length && !this.flowing)
+      this[RESUME]()
+    else if (ev === 'readable' && this[BUFFERLENGTH] !== 0)
+      super.emit('readable')
+    else if (isEndish(ev) && this[EMITTED_END]) {
+      super.emit(ev)
+      this.removeAllListeners(ev)
+    } else if (ev === 'error' && this[EMITTED_ERROR]) {
+      if (this[ASYNC])
+        defer(() => fn.call(this, this[EMITTED_ERROR]))
+      else
+        fn.call(this, this[EMITTED_ERROR])
+    }
+    return ret
+  }
+
+  get emittedEnd () {
+    return this[EMITTED_END]
+  }
+
+  [MAYBE_EMIT_END] () {
+    if (!this[EMITTING_END] &&
+        !this[EMITTED_END] &&
+        !this[DESTROYED] &&
+        this.buffer.length === 0 &&
+        this[EOF]) {
+      this[EMITTING_END] = true
+      this.emit('end')
+      this.emit('prefinish')
+      this.emit('finish')
+      if (this[CLOSED])
+        this.emit('close')
+      this[EMITTING_END] = false
+    }
+  }
+
+  emit (ev, data, ...extra) {
+    // error and close are only events allowed after calling destroy()
+    if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED])
+      return
+    else if (ev === 'data') {
+      return !data ? false
+        : this[ASYNC] ? defer(() => this[EMITDATA](data))
+        : this[EMITDATA](data)
+    } else if (ev === 'end') {
+      return this[EMITEND]()
+    } else if (ev === 'close') {
+      this[CLOSED] = true
+      // don't emit close before 'end' and 'finish'
+      if (!this[EMITTED_END] && !this[DESTROYED])
+        return
+      const ret = super.emit('close')
+      this.removeAllListeners('close')
+      return ret
+    } else if (ev === 'error') {
+      this[EMITTED_ERROR] = data
+      const ret = super.emit('error', data)
+      this[MAYBE_EMIT_END]()
+      return ret
+    } else if (ev === 'resume') {
+      const ret = super.emit('resume')
+      this[MAYBE_EMIT_END]()
+      return ret
+    } else if (ev === 'finish' || ev === 'prefinish') {
+      const ret = super.emit(ev)
+      this.removeAllListeners(ev)
+      return ret
+    }
+
+    // Some other unknown event
+    const ret = super.emit(ev, data, ...extra)
+    this[MAYBE_EMIT_END]()
+    return ret
+  }
+
+  [EMITDATA] (data) {
+    for (const p of this.pipes) {
+      if (p.dest.write(data) === false)
+        this.pause()
+    }
+    const ret = super.emit('data', data)
+    this[MAYBE_EMIT_END]()
+    return ret
+  }
+
+  [EMITEND] () {
+    if (this[EMITTED_END])
+      return
+
+    this[EMITTED_END] = true
+    this.readable = false
+    if (this[ASYNC])
+      defer(() => this[EMITEND2]())
+    else
+      this[EMITEND2]()
+  }
+
+  [EMITEND2] () {
+    if (this[DECODER]) {
+      const data = this[DECODER].end()
+      if (data) {
+        for (const p of this.pipes) {
+          p.dest.write(data)
+        }
+        super.emit('data', data)
+      }
+    }
+
+    for (const p of this.pipes) {
+      p.end()
+    }
+    const ret = super.emit('end')
+    this.removeAllListeners('end')
+    return ret
+  }
+
+  // const all = await stream.collect()
+  collect () {
+    const buf = []
+    if (!this[OBJECTMODE])
+      buf.dataLength = 0
+    // set the promise first, in case an error is raised
+    // by triggering the flow here.
+    const p = this.promise()
+    this.on('data', c => {
+      buf.push(c)
+      if (!this[OBJECTMODE])
+        buf.dataLength += c.length
+    })
+    return p.then(() => buf)
+  }
+
+  // const data = await stream.concat()
+  concat () {
+    return this[OBJECTMODE]
+      ? Promise.reject(new Error('cannot concat in objectMode'))
+      : this.collect().then(buf =>
+          this[OBJECTMODE]
+            ? Promise.reject(new Error('cannot concat in objectMode'))
+            : this[ENCODING] ? buf.join('') : Buffer.concat(buf, buf.dataLength))
+  }
+
+  // stream.promise().then(() => done, er => emitted error)
+  promise () {
+    return new Promise((resolve, reject) => {
+      this.on(DESTROYED, () => reject(new Error('stream destroyed')))
+      this.on('error', er => reject(er))
+      this.on('end', () => resolve())
+    })
+  }
+
+  // for await (let chunk of stream)
+  [ASYNCITERATOR] () {
+    const next = () => {
+      const res = this.read()
+      if (res !== null)
+        return Promise.resolve({ done: false, value: res })
+
+      if (this[EOF])
+        return Promise.resolve({ done: true })
+
+      let resolve = null
+      let reject = null
+      const onerr = er => {
+        this.removeListener('data', ondata)
+        this.removeListener('end', onend)
+        reject(er)
+      }
+      const ondata = value => {
+        this.removeListener('error', onerr)
+        this.removeListener('end', onend)
+        this.pause()
+        resolve({ value: value, done: !!this[EOF] })
+      }
+      const onend = () => {
+        this.removeListener('error', onerr)
+        this.removeListener('data', ondata)
+        resolve({ done: true })
+      }
+      const ondestroy = () => onerr(new Error('stream destroyed'))
+      return new Promise((res, rej) => {
+        reject = rej
+        resolve = res
+        this.once(DESTROYED, ondestroy)
+        this.once('error', onerr)
+        this.once('end', onend)
+        this.once('data', ondata)
+      })
+    }
+
+    return { next }
+  }
+
+  // for (let chunk of stream)
+  [ITERATOR] () {
+    const next = () => {
+      const value = this.read()
+      const done = value === null
+      return { value, done }
+    }
+    return { next }
+  }
+
+  destroy (er) {
+    if (this[DESTROYED]) {
+      if (er)
+        this.emit('error', er)
+      else
+        this.emit(DESTROYED)
+      return this
+    }
+
+    this[DESTROYED] = true
+
+    // throw away all buffered data, it's never coming out
+    this.buffer.length = 0
+    this[BUFFERLENGTH] = 0
+
+    if (typeof this.close === 'function' && !this[CLOSED])
+      this.close()
+
+    if (er)
+      this.emit('error', er)
+    else // if no error to emit, still reject pending promises
+      this.emit(DESTROYED)
+
+    return this
+  }
+
+  static isStream (s) {
+    return !!s && (s instanceof Minipass || s instanceof Stream ||
+      s instanceof EE && (
+        typeof s.pipe === 'function' || // readable
+        (typeof s.write === 'function' && typeof s.end === 'function') // writable
+      ))
+  }
+}
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-pipeline/node_modules/minipass/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-pipeline/node_modules/minipass/package.json
new file mode 100644
index 0000000000000000000000000000000000000000..548d03fa6d5d4b1eb4f2e6299f151f131ee79492
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-pipeline/node_modules/minipass/package.json
@@ -0,0 +1,56 @@
+{
+  "name": "minipass",
+  "version": "3.3.6",
+  "description": "minimal implementation of a PassThrough stream",
+  "main": "index.js",
+  "types": "index.d.ts",
+  "dependencies": {
+    "yallist": "^4.0.0"
+  },
+  "devDependencies": {
+    "@types/node": "^17.0.41",
+    "end-of-stream": "^1.4.0",
+    "prettier": "^2.6.2",
+    "tap": "^16.2.0",
+    "through2": "^2.0.3",
+    "ts-node": "^10.8.1",
+    "typescript": "^4.7.3"
+  },
+  "scripts": {
+    "test": "tap",
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "postpublish": "git push origin --follow-tags"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/isaacs/minipass.git"
+  },
+  "keywords": [
+    "passthrough",
+    "stream"
+  ],
+  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
+  "license": "ISC",
+  "files": [
+    "index.d.ts",
+    "index.js"
+  ],
+  "tap": {
+    "check-coverage": true
+  },
+  "engines": {
+    "node": ">=8"
+  },
+  "prettier": {
+    "semi": false,
+    "printWidth": 80,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  }
+}
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-sized/node_modules/minipass/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-sized/node_modules/minipass/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..bf1dece2e1f122d1aa1f851dc9e57381665051ba
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-sized/node_modules/minipass/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) 2017-2022 npm, Inc., Isaac Z. Schlueter, and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-sized/node_modules/minipass/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-sized/node_modules/minipass/index.js
new file mode 100644
index 0000000000000000000000000000000000000000..e8797aab6cc27695ad6edcbb4eb31356b6f35c24
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-sized/node_modules/minipass/index.js
@@ -0,0 +1,649 @@
+'use strict'
+const proc = typeof process === 'object' && process ? process : {
+  stdout: null,
+  stderr: null,
+}
+const EE = require('events')
+const Stream = require('stream')
+const SD = require('string_decoder').StringDecoder
+
+const EOF = Symbol('EOF')
+const MAYBE_EMIT_END = Symbol('maybeEmitEnd')
+const EMITTED_END = Symbol('emittedEnd')
+const EMITTING_END = Symbol('emittingEnd')
+const EMITTED_ERROR = Symbol('emittedError')
+const CLOSED = Symbol('closed')
+const READ = Symbol('read')
+const FLUSH = Symbol('flush')
+const FLUSHCHUNK = Symbol('flushChunk')
+const ENCODING = Symbol('encoding')
+const DECODER = Symbol('decoder')
+const FLOWING = Symbol('flowing')
+const PAUSED = Symbol('paused')
+const RESUME = Symbol('resume')
+const BUFFERLENGTH = Symbol('bufferLength')
+const BUFFERPUSH = Symbol('bufferPush')
+const BUFFERSHIFT = Symbol('bufferShift')
+const OBJECTMODE = Symbol('objectMode')
+const DESTROYED = Symbol('destroyed')
+const EMITDATA = Symbol('emitData')
+const EMITEND = Symbol('emitEnd')
+const EMITEND2 = Symbol('emitEnd2')
+const ASYNC = Symbol('async')
+
+const defer = fn => Promise.resolve().then(fn)
+
+// TODO remove when Node v8 support drops
+const doIter = global._MP_NO_ITERATOR_SYMBOLS_  !== '1'
+const ASYNCITERATOR = doIter && Symbol.asyncIterator
+  || Symbol('asyncIterator not implemented')
+const ITERATOR = doIter && Symbol.iterator
+  || Symbol('iterator not implemented')
+
+// events that mean 'the stream is over'
+// these are treated specially, and re-emitted
+// if they are listened for after emitting.
+const isEndish = ev =>
+  ev === 'end' ||
+  ev === 'finish' ||
+  ev === 'prefinish'
+
+const isArrayBuffer = b => b instanceof ArrayBuffer ||
+  typeof b === 'object' &&
+  b.constructor &&
+  b.constructor.name === 'ArrayBuffer' &&
+  b.byteLength >= 0
+
+const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b)
+
+class Pipe {
+  constructor (src, dest, opts) {
+    this.src = src
+    this.dest = dest
+    this.opts = opts
+    this.ondrain = () => src[RESUME]()
+    dest.on('drain', this.ondrain)
+  }
+  unpipe () {
+    this.dest.removeListener('drain', this.ondrain)
+  }
+  // istanbul ignore next - only here for the prototype
+  proxyErrors () {}
+  end () {
+    this.unpipe()
+    if (this.opts.end)
+      this.dest.end()
+  }
+}
+
+class PipeProxyErrors extends Pipe {
+  unpipe () {
+    this.src.removeListener('error', this.proxyErrors)
+    super.unpipe()
+  }
+  constructor (src, dest, opts) {
+    super(src, dest, opts)
+    this.proxyErrors = er => dest.emit('error', er)
+    src.on('error', this.proxyErrors)
+  }
+}
+
+module.exports = class Minipass extends Stream {
+  constructor (options) {
+    super()
+    this[FLOWING] = false
+    // whether we're explicitly paused
+    this[PAUSED] = false
+    this.pipes = []
+    this.buffer = []
+    this[OBJECTMODE] = options && options.objectMode || false
+    if (this[OBJECTMODE])
+      this[ENCODING] = null
+    else
+      this[ENCODING] = options && options.encoding || null
+    if (this[ENCODING] === 'buffer')
+      this[ENCODING] = null
+    this[ASYNC] = options && !!options.async || false
+    this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null
+    this[EOF] = false
+    this[EMITTED_END] = false
+    this[EMITTING_END] = false
+    this[CLOSED] = false
+    this[EMITTED_ERROR] = null
+    this.writable = true
+    this.readable = true
+    this[BUFFERLENGTH] = 0
+    this[DESTROYED] = false
+  }
+
+  get bufferLength () { return this[BUFFERLENGTH] }
+
+  get encoding () { return this[ENCODING] }
+  set encoding (enc) {
+    if (this[OBJECTMODE])
+      throw new Error('cannot set encoding in objectMode')
+
+    if (this[ENCODING] && enc !== this[ENCODING] &&
+        (this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH]))
+      throw new Error('cannot change encoding')
+
+    if (this[ENCODING] !== enc) {
+      this[DECODER] = enc ? new SD(enc) : null
+      if (this.buffer.length)
+        this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk))
+    }
+
+    this[ENCODING] = enc
+  }
+
+  setEncoding (enc) {
+    this.encoding = enc
+  }
+
+  get objectMode () { return this[OBJECTMODE] }
+  set objectMode (om) { this[OBJECTMODE] = this[OBJECTMODE] || !!om }
+
+  get ['async'] () { return this[ASYNC] }
+  set ['async'] (a) { this[ASYNC] = this[ASYNC] || !!a }
+
+  write (chunk, encoding, cb) {
+    if (this[EOF])
+      throw new Error('write after end')
+
+    if (this[DESTROYED]) {
+      this.emit('error', Object.assign(
+        new Error('Cannot call write after a stream was destroyed'),
+        { code: 'ERR_STREAM_DESTROYED' }
+      ))
+      return true
+    }
+
+    if (typeof encoding === 'function')
+      cb = encoding, encoding = 'utf8'
+
+    if (!encoding)
+      encoding = 'utf8'
+
+    const fn = this[ASYNC] ? defer : f => f()
+
+    // convert array buffers and typed array views into buffers
+    // at some point in the future, we may want to do the opposite!
+    // leave strings and buffers as-is
+    // anything else switches us into object mode
+    if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
+      if (isArrayBufferView(chunk))
+        chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
+      else if (isArrayBuffer(chunk))
+        chunk = Buffer.from(chunk)
+      else if (typeof chunk !== 'string')
+        // use the setter so we throw if we have encoding set
+        this.objectMode = true
+    }
+
+    // handle object mode up front, since it's simpler
+    // this yields better performance, fewer checks later.
+    if (this[OBJECTMODE]) {
+      /* istanbul ignore if - maybe impossible? */
+      if (this.flowing && this[BUFFERLENGTH] !== 0)
+        this[FLUSH](true)
+
+      if (this.flowing)
+        this.emit('data', chunk)
+      else
+        this[BUFFERPUSH](chunk)
+
+      if (this[BUFFERLENGTH] !== 0)
+        this.emit('readable')
+
+      if (cb)
+        fn(cb)
+
+      return this.flowing
+    }
+
+    // at this point the chunk is a buffer or string
+    // don't buffer it up or send it to the decoder
+    if (!chunk.length) {
+      if (this[BUFFERLENGTH] !== 0)
+        this.emit('readable')
+      if (cb)
+        fn(cb)
+      return this.flowing
+    }
+
+    // fast-path writing strings of same encoding to a stream with
+    // an empty buffer, skipping the buffer/decoder dance
+    if (typeof chunk === 'string' &&
+        // unless it is a string already ready for us to use
+        !(encoding === this[ENCODING] && !this[DECODER].lastNeed)) {
+      chunk = Buffer.from(chunk, encoding)
+    }
+
+    if (Buffer.isBuffer(chunk) && this[ENCODING])
+      chunk = this[DECODER].write(chunk)
+
+    // Note: flushing CAN potentially switch us into not-flowing mode
+    if (this.flowing && this[BUFFERLENGTH] !== 0)
+      this[FLUSH](true)
+
+    if (this.flowing)
+      this.emit('data', chunk)
+    else
+      this[BUFFERPUSH](chunk)
+
+    if (this[BUFFERLENGTH] !== 0)
+      this.emit('readable')
+
+    if (cb)
+      fn(cb)
+
+    return this.flowing
+  }
+
+  read (n) {
+    if (this[DESTROYED])
+      return null
+
+    if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) {
+      this[MAYBE_EMIT_END]()
+      return null
+    }
+
+    if (this[OBJECTMODE])
+      n = null
+
+    if (this.buffer.length > 1 && !this[OBJECTMODE]) {
+      if (this.encoding)
+        this.buffer = [this.buffer.join('')]
+      else
+        this.buffer = [Buffer.concat(this.buffer, this[BUFFERLENGTH])]
+    }
+
+    const ret = this[READ](n || null, this.buffer[0])
+    this[MAYBE_EMIT_END]()
+    return ret
+  }
+
+  [READ] (n, chunk) {
+    if (n === chunk.length || n === null)
+      this[BUFFERSHIFT]()
+    else {
+      this.buffer[0] = chunk.slice(n)
+      chunk = chunk.slice(0, n)
+      this[BUFFERLENGTH] -= n
+    }
+
+    this.emit('data', chunk)
+
+    if (!this.buffer.length && !this[EOF])
+      this.emit('drain')
+
+    return chunk
+  }
+
+  end (chunk, encoding, cb) {
+    if (typeof chunk === 'function')
+      cb = chunk, chunk = null
+    if (typeof encoding === 'function')
+      cb = encoding, encoding = 'utf8'
+    if (chunk)
+      this.write(chunk, encoding)
+    if (cb)
+      this.once('end', cb)
+    this[EOF] = true
+    this.writable = false
+
+    // if we haven't written anything, then go ahead and emit,
+    // even if we're not reading.
+    // we'll re-emit if a new 'end' listener is added anyway.
+    // This makes MP more suitable to write-only use cases.
+    if (this.flowing || !this[PAUSED])
+      this[MAYBE_EMIT_END]()
+    return this
+  }
+
+  // don't let the internal resume be overwritten
+  [RESUME] () {
+    if (this[DESTROYED])
+      return
+
+    this[PAUSED] = false
+    this[FLOWING] = true
+    this.emit('resume')
+    if (this.buffer.length)
+      this[FLUSH]()
+    else if (this[EOF])
+      this[MAYBE_EMIT_END]()
+    else
+      this.emit('drain')
+  }
+
+  resume () {
+    return this[RESUME]()
+  }
+
+  pause () {
+    this[FLOWING] = false
+    this[PAUSED] = true
+  }
+
+  get destroyed () {
+    return this[DESTROYED]
+  }
+
+  get flowing () {
+    return this[FLOWING]
+  }
+
+  get paused () {
+    return this[PAUSED]
+  }
+
+  [BUFFERPUSH] (chunk) {
+    if (this[OBJECTMODE])
+      this[BUFFERLENGTH] += 1
+    else
+      this[BUFFERLENGTH] += chunk.length
+    this.buffer.push(chunk)
+  }
+
+  [BUFFERSHIFT] () {
+    if (this.buffer.length) {
+      if (this[OBJECTMODE])
+        this[BUFFERLENGTH] -= 1
+      else
+        this[BUFFERLENGTH] -= this.buffer[0].length
+    }
+    return this.buffer.shift()
+  }
+
+  [FLUSH] (noDrain) {
+    do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()))
+
+    if (!noDrain && !this.buffer.length && !this[EOF])
+      this.emit('drain')
+  }
+
+  [FLUSHCHUNK] (chunk) {
+    return chunk ? (this.emit('data', chunk), this.flowing) : false
+  }
+
+  pipe (dest, opts) {
+    if (this[DESTROYED])
+      return
+
+    const ended = this[EMITTED_END]
+    opts = opts || {}
+    if (dest === proc.stdout || dest === proc.stderr)
+      opts.end = false
+    else
+      opts.end = opts.end !== false
+    opts.proxyErrors = !!opts.proxyErrors
+
+    // piping an ended stream ends immediately
+    if (ended) {
+      if (opts.end)
+        dest.end()
+    } else {
+      this.pipes.push(!opts.proxyErrors ? new Pipe(this, dest, opts)
+        : new PipeProxyErrors(this, dest, opts))
+      if (this[ASYNC])
+        defer(() => this[RESUME]())
+      else
+        this[RESUME]()
+    }
+
+    return dest
+  }
+
+  unpipe (dest) {
+    const p = this.pipes.find(p => p.dest === dest)
+    if (p) {
+      this.pipes.splice(this.pipes.indexOf(p), 1)
+      p.unpipe()
+    }
+  }
+
+  addListener (ev, fn) {
+    return this.on(ev, fn)
+  }
+
+  on (ev, fn) {
+    const ret = super.on(ev, fn)
+    if (ev === 'data' && !this.pipes.length && !this.flowing)
+      this[RESUME]()
+    else if (ev === 'readable' && this[BUFFERLENGTH] !== 0)
+      super.emit('readable')
+    else if (isEndish(ev) && this[EMITTED_END]) {
+      super.emit(ev)
+      this.removeAllListeners(ev)
+    } else if (ev === 'error' && this[EMITTED_ERROR]) {
+      if (this[ASYNC])
+        defer(() => fn.call(this, this[EMITTED_ERROR]))
+      else
+        fn.call(this, this[EMITTED_ERROR])
+    }
+    return ret
+  }
+
+  get emittedEnd () {
+    return this[EMITTED_END]
+  }
+
+  [MAYBE_EMIT_END] () {
+    if (!this[EMITTING_END] &&
+        !this[EMITTED_END] &&
+        !this[DESTROYED] &&
+        this.buffer.length === 0 &&
+        this[EOF]) {
+      this[EMITTING_END] = true
+      this.emit('end')
+      this.emit('prefinish')
+      this.emit('finish')
+      if (this[CLOSED])
+        this.emit('close')
+      this[EMITTING_END] = false
+    }
+  }
+
+  emit (ev, data, ...extra) {
+    // error and close are only events allowed after calling destroy()
+    if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED])
+      return
+    else if (ev === 'data') {
+      return !data ? false
+        : this[ASYNC] ? defer(() => this[EMITDATA](data))
+        : this[EMITDATA](data)
+    } else if (ev === 'end') {
+      return this[EMITEND]()
+    } else if (ev === 'close') {
+      this[CLOSED] = true
+      // don't emit close before 'end' and 'finish'
+      if (!this[EMITTED_END] && !this[DESTROYED])
+        return
+      const ret = super.emit('close')
+      this.removeAllListeners('close')
+      return ret
+    } else if (ev === 'error') {
+      this[EMITTED_ERROR] = data
+      const ret = super.emit('error', data)
+      this[MAYBE_EMIT_END]()
+      return ret
+    } else if (ev === 'resume') {
+      const ret = super.emit('resume')
+      this[MAYBE_EMIT_END]()
+      return ret
+    } else if (ev === 'finish' || ev === 'prefinish') {
+      const ret = super.emit(ev)
+      this.removeAllListeners(ev)
+      return ret
+    }
+
+    // Some other unknown event
+    const ret = super.emit(ev, data, ...extra)
+    this[MAYBE_EMIT_END]()
+    return ret
+  }
+
+  [EMITDATA] (data) {
+    for (const p of this.pipes) {
+      if (p.dest.write(data) === false)
+        this.pause()
+    }
+    const ret = super.emit('data', data)
+    this[MAYBE_EMIT_END]()
+    return ret
+  }
+
+  [EMITEND] () {
+    if (this[EMITTED_END])
+      return
+
+    this[EMITTED_END] = true
+    this.readable = false
+    if (this[ASYNC])
+      defer(() => this[EMITEND2]())
+    else
+      this[EMITEND2]()
+  }
+
+  [EMITEND2] () {
+    if (this[DECODER]) {
+      const data = this[DECODER].end()
+      if (data) {
+        for (const p of this.pipes) {
+          p.dest.write(data)
+        }
+        super.emit('data', data)
+      }
+    }
+
+    for (const p of this.pipes) {
+      p.end()
+    }
+    const ret = super.emit('end')
+    this.removeAllListeners('end')
+    return ret
+  }
+
+  // const all = await stream.collect()
+  collect () {
+    const buf = []
+    if (!this[OBJECTMODE])
+      buf.dataLength = 0
+    // set the promise first, in case an error is raised
+    // by triggering the flow here.
+    const p = this.promise()
+    this.on('data', c => {
+      buf.push(c)
+      if (!this[OBJECTMODE])
+        buf.dataLength += c.length
+    })
+    return p.then(() => buf)
+  }
+
+  // const data = await stream.concat()
+  concat () {
+    return this[OBJECTMODE]
+      ? Promise.reject(new Error('cannot concat in objectMode'))
+      : this.collect().then(buf =>
+          this[OBJECTMODE]
+            ? Promise.reject(new Error('cannot concat in objectMode'))
+            : this[ENCODING] ? buf.join('') : Buffer.concat(buf, buf.dataLength))
+  }
+
+  // stream.promise().then(() => done, er => emitted error)
+  promise () {
+    return new Promise((resolve, reject) => {
+      this.on(DESTROYED, () => reject(new Error('stream destroyed')))
+      this.on('error', er => reject(er))
+      this.on('end', () => resolve())
+    })
+  }
+
+  // for await (let chunk of stream)
+  [ASYNCITERATOR] () {
+    const next = () => {
+      const res = this.read()
+      if (res !== null)
+        return Promise.resolve({ done: false, value: res })
+
+      if (this[EOF])
+        return Promise.resolve({ done: true })
+
+      let resolve = null
+      let reject = null
+      const onerr = er => {
+        this.removeListener('data', ondata)
+        this.removeListener('end', onend)
+        reject(er)
+      }
+      const ondata = value => {
+        this.removeListener('error', onerr)
+        this.removeListener('end', onend)
+        this.pause()
+        resolve({ value: value, done: !!this[EOF] })
+      }
+      const onend = () => {
+        this.removeListener('error', onerr)
+        this.removeListener('data', ondata)
+        resolve({ done: true })
+      }
+      const ondestroy = () => onerr(new Error('stream destroyed'))
+      return new Promise((res, rej) => {
+        reject = rej
+        resolve = res
+        this.once(DESTROYED, ondestroy)
+        this.once('error', onerr)
+        this.once('end', onend)
+        this.once('data', ondata)
+      })
+    }
+
+    return { next }
+  }
+
+  // for (let chunk of stream)
+  [ITERATOR] () {
+    const next = () => {
+      const value = this.read()
+      const done = value === null
+      return { value, done }
+    }
+    return { next }
+  }
+
+  destroy (er) {
+    if (this[DESTROYED]) {
+      if (er)
+        this.emit('error', er)
+      else
+        this.emit(DESTROYED)
+      return this
+    }
+
+    this[DESTROYED] = true
+
+    // throw away all buffered data, it's never coming out
+    this.buffer.length = 0
+    this[BUFFERLENGTH] = 0
+
+    if (typeof this.close === 'function' && !this[CLOSED])
+      this.close()
+
+    if (er)
+      this.emit('error', er)
+    else // if no error to emit, still reject pending promises
+      this.emit(DESTROYED)
+
+    return this
+  }
+
+  static isStream (s) {
+    return !!s && (s instanceof Minipass || s instanceof Stream ||
+      s instanceof EE && (
+        typeof s.pipe === 'function' || // readable
+        (typeof s.write === 'function' && typeof s.end === 'function') // writable
+      ))
+  }
+}
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-sized/node_modules/minipass/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-sized/node_modules/minipass/package.json
new file mode 100644
index 0000000000000000000000000000000000000000..548d03fa6d5d4b1eb4f2e6299f151f131ee79492
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass-sized/node_modules/minipass/package.json
@@ -0,0 +1,56 @@
+{
+  "name": "minipass",
+  "version": "3.3.6",
+  "description": "minimal implementation of a PassThrough stream",
+  "main": "index.js",
+  "types": "index.d.ts",
+  "dependencies": {
+    "yallist": "^4.0.0"
+  },
+  "devDependencies": {
+    "@types/node": "^17.0.41",
+    "end-of-stream": "^1.4.0",
+    "prettier": "^2.6.2",
+    "tap": "^16.2.0",
+    "through2": "^2.0.3",
+    "ts-node": "^10.8.1",
+    "typescript": "^4.7.3"
+  },
+  "scripts": {
+    "test": "tap",
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "postpublish": "git push origin --follow-tags"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/isaacs/minipass.git"
+  },
+  "keywords": [
+    "passthrough",
+    "stream"
+  ],
+  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
+  "license": "ISC",
+  "files": [
+    "index.d.ts",
+    "index.js"
+  ],
+  "tap": {
+    "check-coverage": true
+  },
+  "engines": {
+    "node": ">=8"
+  },
+  "prettier": {
+    "semi": false,
+    "printWidth": 80,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  }
+}
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..97f8e32ed82e4c5254e55c9d1855ea9d922ff761
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) 2017-2023 npm, Inc., Isaac Z. Schlueter, and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass/package.json
new file mode 100644
index 0000000000000000000000000000000000000000..771969b0285469744ca0acdbd276ba239383318a
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/minipass/package.json
@@ -0,0 +1,82 @@
+{
+  "name": "minipass",
+  "version": "7.1.2",
+  "description": "minimal implementation of a PassThrough stream",
+  "main": "./dist/commonjs/index.js",
+  "types": "./dist/commonjs/index.d.ts",
+  "type": "module",
+  "tshy": {
+    "selfLink": false,
+    "main": true,
+    "exports": {
+      "./package.json": "./package.json",
+      ".": "./src/index.ts"
+    }
+  },
+  "exports": {
+    "./package.json": "./package.json",
+    ".": {
+      "import": {
+        "types": "./dist/esm/index.d.ts",
+        "default": "./dist/esm/index.js"
+      },
+      "require": {
+        "types": "./dist/commonjs/index.d.ts",
+        "default": "./dist/commonjs/index.js"
+      }
+    }
+  },
+  "files": [
+    "dist"
+  ],
+  "scripts": {
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "prepare": "tshy",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "test": "tap",
+    "snap": "tap",
+    "format": "prettier --write . --loglevel warn",
+    "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
+  },
+  "prettier": {
+    "semi": false,
+    "printWidth": 75,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "devDependencies": {
+    "@types/end-of-stream": "^1.4.2",
+    "@types/node": "^20.1.2",
+    "end-of-stream": "^1.4.0",
+    "node-abort-controller": "^3.1.1",
+    "prettier": "^2.6.2",
+    "tap": "^19.0.0",
+    "through2": "^2.0.3",
+    "tshy": "^1.14.0",
+    "typedoc": "^0.25.1"
+  },
+  "repository": "https://github.com/isaacs/minipass",
+  "keywords": [
+    "passthrough",
+    "stream"
+  ],
+  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
+  "license": "ISC",
+  "engines": {
+    "node": ">=16 || 14 >=14.17"
+  },
+  "tap": {
+    "typecheck": true,
+    "include": [
+      "test/*.ts"
+    ]
+  }
+}
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/mute-stream/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/mute-stream/lib/index.js
new file mode 100644
index 0000000000000000000000000000000000000000..368f727e2c3ed8354e824915a119346442e09486
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/mute-stream/lib/index.js
@@ -0,0 +1,142 @@
+const Stream = require('stream')
+
+class MuteStream extends Stream {
+  #isTTY = null
+
+  constructor (opts = {}) {
+    super(opts)
+    this.writable = this.readable = true
+    this.muted = false
+    this.on('pipe', this._onpipe)
+    this.replace = opts.replace
+
+    // For readline-type situations
+    // This much at the start of a line being redrawn after a ctrl char
+    // is seen (such as backspace) won't be redrawn as the replacement
+    this._prompt = opts.prompt || null
+    this._hadControl = false
+  }
+
+  #destSrc (key, def) {
+    if (this._dest) {
+      return this._dest[key]
+    }
+    if (this._src) {
+      return this._src[key]
+    }
+    return def
+  }
+
+  #proxy (method, ...args) {
+    if (typeof this._dest?.[method] === 'function') {
+      this._dest[method](...args)
+    }
+    if (typeof this._src?.[method] === 'function') {
+      this._src[method](...args)
+    }
+  }
+
+  get isTTY () {
+    if (this.#isTTY !== null) {
+      return this.#isTTY
+    }
+    return this.#destSrc('isTTY', false)
+  }
+
+  // basically just get replace the getter/setter with a regular value
+  set isTTY (val) {
+    this.#isTTY = val
+  }
+
+  get rows () {
+    return this.#destSrc('rows')
+  }
+
+  get columns () {
+    return this.#destSrc('columns')
+  }
+
+  mute () {
+    this.muted = true
+  }
+
+  unmute () {
+    this.muted = false
+  }
+
+  _onpipe (src) {
+    this._src = src
+  }
+
+  pipe (dest, options) {
+    this._dest = dest
+    return super.pipe(dest, options)
+  }
+
+  pause () {
+    if (this._src) {
+      return this._src.pause()
+    }
+  }
+
+  resume () {
+    if (this._src) {
+      return this._src.resume()
+    }
+  }
+
+  write (c) {
+    if (this.muted) {
+      if (!this.replace) {
+        return true
+      }
+      // eslint-disable-next-line no-control-regex
+      if (c.match(/^\u001b/)) {
+        if (c.indexOf(this._prompt) === 0) {
+          c = c.slice(this._prompt.length)
+          c = c.replace(/./g, this.replace)
+          c = this._prompt + c
+        }
+        this._hadControl = true
+        return this.emit('data', c)
+      } else {
+        if (this._prompt && this._hadControl &&
+          c.indexOf(this._prompt) === 0) {
+          this._hadControl = false
+          this.emit('data', this._prompt)
+          c = c.slice(this._prompt.length)
+        }
+        c = c.toString().replace(/./g, this.replace)
+      }
+    }
+    this.emit('data', c)
+  }
+
+  end (c) {
+    if (this.muted) {
+      if (c && this.replace) {
+        c = c.toString().replace(/./g, this.replace)
+      } else {
+        c = null
+      }
+    }
+    if (c) {
+      this.emit('data', c)
+    }
+    this.emit('end')
+  }
+
+  destroy (...args) {
+    return this.#proxy('destroy', ...args)
+  }
+
+  destroySoon (...args) {
+    return this.#proxy('destroySoon', ...args)
+  }
+
+  close (...args) {
+    return this.#proxy('close', ...args)
+  }
+}
+
+module.exports = MuteStream
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/negotiator/lib/charset.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/negotiator/lib/charset.js
new file mode 100644
index 0000000000000000000000000000000000000000..cdd014803474a4b76b981c475a32ebcaa81a36e5
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/negotiator/lib/charset.js
@@ -0,0 +1,169 @@
+/**
+ * negotiator
+ * Copyright(c) 2012 Isaac Z. Schlueter
+ * Copyright(c) 2014 Federico Romero
+ * Copyright(c) 2014-2015 Douglas Christopher Wilson
+ * MIT Licensed
+ */
+
+'use strict';
+
+/**
+ * Module exports.
+ * @public
+ */
+
+module.exports = preferredCharsets;
+module.exports.preferredCharsets = preferredCharsets;
+
+/**
+ * Module variables.
+ * @private
+ */
+
+var simpleCharsetRegExp = /^\s*([^\s;]+)\s*(?:;(.*))?$/;
+
+/**
+ * Parse the Accept-Charset header.
+ * @private
+ */
+
+function parseAcceptCharset(accept) {
+  var accepts = accept.split(',');
+
+  for (var i = 0, j = 0; i < accepts.length; i++) {
+    var charset = parseCharset(accepts[i].trim(), i);
+
+    if (charset) {
+      accepts[j++] = charset;
+    }
+  }
+
+  // trim accepts
+  accepts.length = j;
+
+  return accepts;
+}
+
+/**
+ * Parse a charset from the Accept-Charset header.
+ * @private
+ */
+
+function parseCharset(str, i) {
+  var match = simpleCharsetRegExp.exec(str);
+  if (!match) return null;
+
+  var charset = match[1];
+  var q = 1;
+  if (match[2]) {
+    var params = match[2].split(';')
+    for (var j = 0; j < params.length; j++) {
+      var p = params[j].trim().split('=');
+      if (p[0] === 'q') {
+        q = parseFloat(p[1]);
+        break;
+      }
+    }
+  }
+
+  return {
+    charset: charset,
+    q: q,
+    i: i
+  };
+}
+
+/**
+ * Get the priority of a charset.
+ * @private
+ */
+
+function getCharsetPriority(charset, accepted, index) {
+  var priority = {o: -1, q: 0, s: 0};
+
+  for (var i = 0; i < accepted.length; i++) {
+    var spec = specify(charset, accepted[i], index);
+
+    if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
+      priority = spec;
+    }
+  }
+
+  return priority;
+}
+
+/**
+ * Get the specificity of the charset.
+ * @private
+ */
+
+function specify(charset, spec, index) {
+  var s = 0;
+  if(spec.charset.toLowerCase() === charset.toLowerCase()){
+    s |= 1;
+  } else if (spec.charset !== '*' ) {
+    return null
+  }
+
+  return {
+    i: index,
+    o: spec.i,
+    q: spec.q,
+    s: s
+  }
+}
+
+/**
+ * Get the preferred charsets from an Accept-Charset header.
+ * @public
+ */
+
+function preferredCharsets(accept, provided) {
+  // RFC 2616 sec 14.2: no header = *
+  var accepts = parseAcceptCharset(accept === undefined ? '*' : accept || '');
+
+  if (!provided) {
+    // sorted list of all charsets
+    return accepts
+      .filter(isQuality)
+      .sort(compareSpecs)
+      .map(getFullCharset);
+  }
+
+  var priorities = provided.map(function getPriority(type, index) {
+    return getCharsetPriority(type, accepts, index);
+  });
+
+  // sorted list of accepted charsets
+  return priorities.filter(isQuality).sort(compareSpecs).map(function getCharset(priority) {
+    return provided[priorities.indexOf(priority)];
+  });
+}
+
+/**
+ * Compare two specs.
+ * @private
+ */
+
+function compareSpecs(a, b) {
+  return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0;
+}
+
+/**
+ * Get full charset string.
+ * @private
+ */
+
+function getFullCharset(spec) {
+  return spec.charset;
+}
+
+/**
+ * Check if a spec has any quality.
+ * @private
+ */
+
+function isQuality(spec) {
+  return spec.q > 0;
+}
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/negotiator/lib/encoding.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/negotiator/lib/encoding.js
new file mode 100644
index 0000000000000000000000000000000000000000..9ebb633d67743316863baf3504bfe1d47d53b033
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/negotiator/lib/encoding.js
@@ -0,0 +1,205 @@
+/**
+ * negotiator
+ * Copyright(c) 2012 Isaac Z. Schlueter
+ * Copyright(c) 2014 Federico Romero
+ * Copyright(c) 2014-2015 Douglas Christopher Wilson
+ * MIT Licensed
+ */
+
+'use strict';
+
+/**
+ * Module exports.
+ * @public
+ */
+
+module.exports = preferredEncodings;
+module.exports.preferredEncodings = preferredEncodings;
+
+/**
+ * Module variables.
+ * @private
+ */
+
+var simpleEncodingRegExp = /^\s*([^\s;]+)\s*(?:;(.*))?$/;
+
+/**
+ * Parse the Accept-Encoding header.
+ * @private
+ */
+
+function parseAcceptEncoding(accept) {
+  var accepts = accept.split(',');
+  var hasIdentity = false;
+  var minQuality = 1;
+
+  for (var i = 0, j = 0; i < accepts.length; i++) {
+    var encoding = parseEncoding(accepts[i].trim(), i);
+
+    if (encoding) {
+      accepts[j++] = encoding;
+      hasIdentity = hasIdentity || specify('identity', encoding);
+      minQuality = Math.min(minQuality, encoding.q || 1);
+    }
+  }
+
+  if (!hasIdentity) {
+    /*
+     * If identity doesn't explicitly appear in the accept-encoding header,
+     * it's added to the list of acceptable encoding with the lowest q
+     */
+    accepts[j++] = {
+      encoding: 'identity',
+      q: minQuality,
+      i: i
+    };
+  }
+
+  // trim accepts
+  accepts.length = j;
+
+  return accepts;
+}
+
+/**
+ * Parse an encoding from the Accept-Encoding header.
+ * @private
+ */
+
+function parseEncoding(str, i) {
+  var match = simpleEncodingRegExp.exec(str);
+  if (!match) return null;
+
+  var encoding = match[1];
+  var q = 1;
+  if (match[2]) {
+    var params = match[2].split(';');
+    for (var j = 0; j < params.length; j++) {
+      var p = params[j].trim().split('=');
+      if (p[0] === 'q') {
+        q = parseFloat(p[1]);
+        break;
+      }
+    }
+  }
+
+  return {
+    encoding: encoding,
+    q: q,
+    i: i
+  };
+}
+
+/**
+ * Get the priority of an encoding.
+ * @private
+ */
+
+function getEncodingPriority(encoding, accepted, index) {
+  var priority = {encoding: encoding, o: -1, q: 0, s: 0};
+
+  for (var i = 0; i < accepted.length; i++) {
+    var spec = specify(encoding, accepted[i], index);
+
+    if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
+      priority = spec;
+    }
+  }
+
+  return priority;
+}
+
+/**
+ * Get the specificity of the encoding.
+ * @private
+ */
+
+function specify(encoding, spec, index) {
+  var s = 0;
+  if(spec.encoding.toLowerCase() === encoding.toLowerCase()){
+    s |= 1;
+  } else if (spec.encoding !== '*' ) {
+    return null
+  }
+
+  return {
+    encoding: encoding,
+    i: index,
+    o: spec.i,
+    q: spec.q,
+    s: s
+  }
+};
+
+/**
+ * Get the preferred encodings from an Accept-Encoding header.
+ * @public
+ */
+
+function preferredEncodings(accept, provided, preferred) {
+  var accepts = parseAcceptEncoding(accept || '');
+
+  var comparator = preferred ? function comparator (a, b) {
+    if (a.q !== b.q) {
+      return b.q - a.q // higher quality first
+    }
+
+    var aPreferred = preferred.indexOf(a.encoding)
+    var bPreferred = preferred.indexOf(b.encoding)
+
+    if (aPreferred === -1 && bPreferred === -1) {
+      // consider the original specifity/order
+      return (b.s - a.s) || (a.o - b.o) || (a.i - b.i)
+    }
+
+    if (aPreferred !== -1 && bPreferred !== -1) {
+      return aPreferred - bPreferred // consider the preferred order
+    }
+
+    return aPreferred === -1 ? 1 : -1 // preferred first
+  } : compareSpecs;
+
+  if (!provided) {
+    // sorted list of all encodings
+    return accepts
+      .filter(isQuality)
+      .sort(comparator)
+      .map(getFullEncoding);
+  }
+
+  var priorities = provided.map(function getPriority(type, index) {
+    return getEncodingPriority(type, accepts, index);
+  });
+
+  // sorted list of accepted encodings
+  return priorities.filter(isQuality).sort(comparator).map(function getEncoding(priority) {
+    return provided[priorities.indexOf(priority)];
+  });
+}
+
+/**
+ * Compare two specs.
+ * @private
+ */
+
+function compareSpecs(a, b) {
+  return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i);
+}
+
+/**
+ * Get full encoding string.
+ * @private
+ */
+
+function getFullEncoding(spec) {
+  return spec.encoding;
+}
+
+/**
+ * Check if a spec has any quality.
+ * @private
+ */
+
+function isQuality(spec) {
+  return spec.q > 0;
+}
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/negotiator/lib/language.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/negotiator/lib/language.js
new file mode 100644
index 0000000000000000000000000000000000000000..a23167252719be841ad570eb655a703a4ae8fe9e
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/negotiator/lib/language.js
@@ -0,0 +1,179 @@
+/**
+ * negotiator
+ * Copyright(c) 2012 Isaac Z. Schlueter
+ * Copyright(c) 2014 Federico Romero
+ * Copyright(c) 2014-2015 Douglas Christopher Wilson
+ * MIT Licensed
+ */
+
+'use strict';
+
+/**
+ * Module exports.
+ * @public
+ */
+
+module.exports = preferredLanguages;
+module.exports.preferredLanguages = preferredLanguages;
+
+/**
+ * Module variables.
+ * @private
+ */
+
+var simpleLanguageRegExp = /^\s*([^\s\-;]+)(?:-([^\s;]+))?\s*(?:;(.*))?$/;
+
+/**
+ * Parse the Accept-Language header.
+ * @private
+ */
+
+function parseAcceptLanguage(accept) {
+  var accepts = accept.split(',');
+
+  for (var i = 0, j = 0; i < accepts.length; i++) {
+    var language = parseLanguage(accepts[i].trim(), i);
+
+    if (language) {
+      accepts[j++] = language;
+    }
+  }
+
+  // trim accepts
+  accepts.length = j;
+
+  return accepts;
+}
+
+/**
+ * Parse a language from the Accept-Language header.
+ * @private
+ */
+
+function parseLanguage(str, i) {
+  var match = simpleLanguageRegExp.exec(str);
+  if (!match) return null;
+
+  var prefix = match[1]
+  var suffix = match[2]
+  var full = prefix
+
+  if (suffix) full += "-" + suffix;
+
+  var q = 1;
+  if (match[3]) {
+    var params = match[3].split(';')
+    for (var j = 0; j < params.length; j++) {
+      var p = params[j].split('=');
+      if (p[0] === 'q') q = parseFloat(p[1]);
+    }
+  }
+
+  return {
+    prefix: prefix,
+    suffix: suffix,
+    q: q,
+    i: i,
+    full: full
+  };
+}
+
+/**
+ * Get the priority of a language.
+ * @private
+ */
+
+function getLanguagePriority(language, accepted, index) {
+  var priority = {o: -1, q: 0, s: 0};
+
+  for (var i = 0; i < accepted.length; i++) {
+    var spec = specify(language, accepted[i], index);
+
+    if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
+      priority = spec;
+    }
+  }
+
+  return priority;
+}
+
+/**
+ * Get the specificity of the language.
+ * @private
+ */
+
+function specify(language, spec, index) {
+  var p = parseLanguage(language)
+  if (!p) return null;
+  var s = 0;
+  if(spec.full.toLowerCase() === p.full.toLowerCase()){
+    s |= 4;
+  } else if (spec.prefix.toLowerCase() === p.full.toLowerCase()) {
+    s |= 2;
+  } else if (spec.full.toLowerCase() === p.prefix.toLowerCase()) {
+    s |= 1;
+  } else if (spec.full !== '*' ) {
+    return null
+  }
+
+  return {
+    i: index,
+    o: spec.i,
+    q: spec.q,
+    s: s
+  }
+};
+
+/**
+ * Get the preferred languages from an Accept-Language header.
+ * @public
+ */
+
+function preferredLanguages(accept, provided) {
+  // RFC 2616 sec 14.4: no header = *
+  var accepts = parseAcceptLanguage(accept === undefined ? '*' : accept || '');
+
+  if (!provided) {
+    // sorted list of all languages
+    return accepts
+      .filter(isQuality)
+      .sort(compareSpecs)
+      .map(getFullLanguage);
+  }
+
+  var priorities = provided.map(function getPriority(type, index) {
+    return getLanguagePriority(type, accepts, index);
+  });
+
+  // sorted list of accepted languages
+  return priorities.filter(isQuality).sort(compareSpecs).map(function getLanguage(priority) {
+    return provided[priorities.indexOf(priority)];
+  });
+}
+
+/**
+ * Compare two specs.
+ * @private
+ */
+
+function compareSpecs(a, b) {
+  return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0;
+}
+
+/**
+ * Get full language string.
+ * @private
+ */
+
+function getFullLanguage(spec) {
+  return spec.full;
+}
+
+/**
+ * Check if a spec has any quality.
+ * @private
+ */
+
+function isQuality(spec) {
+  return spec.q > 0;
+}
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/negotiator/lib/mediaType.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/negotiator/lib/mediaType.js
new file mode 100644
index 0000000000000000000000000000000000000000..8e402ea88394c040bf5964f65b8eba33b50bd4a1
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/negotiator/lib/mediaType.js
@@ -0,0 +1,294 @@
+/**
+ * negotiator
+ * Copyright(c) 2012 Isaac Z. Schlueter
+ * Copyright(c) 2014 Federico Romero
+ * Copyright(c) 2014-2015 Douglas Christopher Wilson
+ * MIT Licensed
+ */
+
+'use strict';
+
+/**
+ * Module exports.
+ * @public
+ */
+
+module.exports = preferredMediaTypes;
+module.exports.preferredMediaTypes = preferredMediaTypes;
+
+/**
+ * Module variables.
+ * @private
+ */
+
+var simpleMediaTypeRegExp = /^\s*([^\s\/;]+)\/([^;\s]+)\s*(?:;(.*))?$/;
+
+/**
+ * Parse the Accept header.
+ * @private
+ */
+
+function parseAccept(accept) {
+  var accepts = splitMediaTypes(accept);
+
+  for (var i = 0, j = 0; i < accepts.length; i++) {
+    var mediaType = parseMediaType(accepts[i].trim(), i);
+
+    if (mediaType) {
+      accepts[j++] = mediaType;
+    }
+  }
+
+  // trim accepts
+  accepts.length = j;
+
+  return accepts;
+}
+
+/**
+ * Parse a media type from the Accept header.
+ * @private
+ */
+
+function parseMediaType(str, i) {
+  var match = simpleMediaTypeRegExp.exec(str);
+  if (!match) return null;
+
+  var params = Object.create(null);
+  var q = 1;
+  var subtype = match[2];
+  var type = match[1];
+
+  if (match[3]) {
+    var kvps = splitParameters(match[3]).map(splitKeyValuePair);
+
+    for (var j = 0; j < kvps.length; j++) {
+      var pair = kvps[j];
+      var key = pair[0].toLowerCase();
+      var val = pair[1];
+
+      // get the value, unwrapping quotes
+      var value = val && val[0] === '"' && val[val.length - 1] === '"'
+        ? val.slice(1, -1)
+        : val;
+
+      if (key === 'q') {
+        q = parseFloat(value);
+        break;
+      }
+
+      // store parameter
+      params[key] = value;
+    }
+  }
+
+  return {
+    type: type,
+    subtype: subtype,
+    params: params,
+    q: q,
+    i: i
+  };
+}
+
+/**
+ * Get the priority of a media type.
+ * @private
+ */
+
+function getMediaTypePriority(type, accepted, index) {
+  var priority = {o: -1, q: 0, s: 0};
+
+  for (var i = 0; i < accepted.length; i++) {
+    var spec = specify(type, accepted[i], index);
+
+    if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
+      priority = spec;
+    }
+  }
+
+  return priority;
+}
+
+/**
+ * Get the specificity of the media type.
+ * @private
+ */
+
+function specify(type, spec, index) {
+  var p = parseMediaType(type);
+  var s = 0;
+
+  if (!p) {
+    return null;
+  }
+
+  if(spec.type.toLowerCase() == p.type.toLowerCase()) {
+    s |= 4
+  } else if(spec.type != '*') {
+    return null;
+  }
+
+  if(spec.subtype.toLowerCase() == p.subtype.toLowerCase()) {
+    s |= 2
+  } else if(spec.subtype != '*') {
+    return null;
+  }
+
+  var keys = Object.keys(spec.params);
+  if (keys.length > 0) {
+    if (keys.every(function (k) {
+      return spec.params[k] == '*' || (spec.params[k] || '').toLowerCase() == (p.params[k] || '').toLowerCase();
+    })) {
+      s |= 1
+    } else {
+      return null
+    }
+  }
+
+  return {
+    i: index,
+    o: spec.i,
+    q: spec.q,
+    s: s,
+  }
+}
+
+/**
+ * Get the preferred media types from an Accept header.
+ * @public
+ */
+
+function preferredMediaTypes(accept, provided) {
+  // RFC 2616 sec 14.2: no header = */*
+  var accepts = parseAccept(accept === undefined ? '*/*' : accept || '');
+
+  if (!provided) {
+    // sorted list of all types
+    return accepts
+      .filter(isQuality)
+      .sort(compareSpecs)
+      .map(getFullType);
+  }
+
+  var priorities = provided.map(function getPriority(type, index) {
+    return getMediaTypePriority(type, accepts, index);
+  });
+
+  // sorted list of accepted types
+  return priorities.filter(isQuality).sort(compareSpecs).map(function getType(priority) {
+    return provided[priorities.indexOf(priority)];
+  });
+}
+
+/**
+ * Compare two specs.
+ * @private
+ */
+
+function compareSpecs(a, b) {
+  return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0;
+}
+
+/**
+ * Get full type string.
+ * @private
+ */
+
+function getFullType(spec) {
+  return spec.type + '/' + spec.subtype;
+}
+
+/**
+ * Check if a spec has any quality.
+ * @private
+ */
+
+function isQuality(spec) {
+  return spec.q > 0;
+}
+
+/**
+ * Count the number of quotes in a string.
+ * @private
+ */
+
+function quoteCount(string) {
+  var count = 0;
+  var index = 0;
+
+  while ((index = string.indexOf('"', index)) !== -1) {
+    count++;
+    index++;
+  }
+
+  return count;
+}
+
+/**
+ * Split a key value pair.
+ * @private
+ */
+
+function splitKeyValuePair(str) {
+  var index = str.indexOf('=');
+  var key;
+  var val;
+
+  if (index === -1) {
+    key = str;
+  } else {
+    key = str.slice(0, index);
+    val = str.slice(index + 1);
+  }
+
+  return [key, val];
+}
+
+/**
+ * Split an Accept header into media types.
+ * @private
+ */
+
+function splitMediaTypes(accept) {
+  var accepts = accept.split(',');
+
+  for (var i = 1, j = 0; i < accepts.length; i++) {
+    if (quoteCount(accepts[j]) % 2 == 0) {
+      accepts[++j] = accepts[i];
+    } else {
+      accepts[j] += ',' + accepts[i];
+    }
+  }
+
+  // trim accepts
+  accepts.length = j + 1;
+
+  return accepts;
+}
+
+/**
+ * Split a string of parameters.
+ * @private
+ */
+
+function splitParameters(str) {
+  var parameters = str.split(';');
+
+  for (var i = 1, j = 0; i < parameters.length; i++) {
+    if (quoteCount(parameters[j]) % 2 == 0) {
+      parameters[++j] = parameters[i];
+    } else {
+      parameters[j] += ';' + parameters[i];
+    }
+  }
+
+  // trim parameters
+  parameters.length = j + 1;
+
+  for (var i = 0; i < parameters.length; i++) {
+    parameters[i] = parameters[i].trim();
+  }
+
+  return parameters;
+}
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/node-gyp/node_modules/jackspeak/dist/commonjs/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/node-gyp/node_modules/jackspeak/dist/commonjs/index.js
new file mode 100644
index 0000000000000000000000000000000000000000..f7fc9cb69a2af0fad420b7a8a07ec11d6202bf1b
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/node-gyp/node_modules/jackspeak/dist/commonjs/index.js
@@ -0,0 +1,1010 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.jack = exports.Jack = exports.isConfigOption = exports.isConfigType = void 0;
+const node_util_1 = require("node:util");
+const parse_args_js_1 = require("./parse-args.js");
+// it's a tiny API, just cast it inline, it's fine
+//@ts-ignore
+const cliui_1 = __importDefault(require("@isaacs/cliui"));
+const node_path_1 = require("node:path");
+const width = Math.min((process && process.stdout && process.stdout.columns) || 80, 80);
+// indentation spaces from heading level
+const indent = (n) => (n - 1) * 2;
+const toEnvKey = (pref, key) => {
+    return [pref, key.replace(/[^a-zA-Z0-9]+/g, ' ')]
+        .join(' ')
+        .trim()
+        .toUpperCase()
+        .replace(/ /g, '_');
+};
+const toEnvVal = (value, delim = '\n') => {
+    const str = typeof value === 'string' ? value
+        : typeof value === 'boolean' ?
+            value ? '1'
+                : '0'
+            : typeof value === 'number' ? String(value)
+                : Array.isArray(value) ?
+                    value.map((v) => toEnvVal(v)).join(delim)
+                    : /* c8 ignore start */ undefined;
+    if (typeof str !== 'string') {
+        throw new Error(`could not serialize value to environment: ${JSON.stringify(value)}`);
+    }
+    /* c8 ignore stop */
+    return str;
+};
+const fromEnvVal = (env, type, multiple, delim = '\n') => (multiple ?
+    env ? env.split(delim).map(v => fromEnvVal(v, type, false))
+        : []
+    : type === 'string' ? env
+        : type === 'boolean' ? env === '1'
+            : +env.trim());
+const isConfigType = (t) => typeof t === 'string' &&
+    (t === 'string' || t === 'number' || t === 'boolean');
+exports.isConfigType = isConfigType;
+const undefOrType = (v, t) => v === undefined || typeof v === t;
+const undefOrTypeArray = (v, t) => v === undefined || (Array.isArray(v) && v.every(x => typeof x === t));
+const isValidOption = (v, vo) => Array.isArray(v) ? v.every(x => isValidOption(x, vo)) : vo.includes(v);
+// print the value type, for error message reporting
+const valueType = (v) => typeof v === 'string' ? 'string'
+    : typeof v === 'boolean' ? 'boolean'
+        : typeof v === 'number' ? 'number'
+            : Array.isArray(v) ?
+                joinTypes([...new Set(v.map(v => valueType(v)))]) + '[]'
+                : `${v.type}${v.multiple ? '[]' : ''}`;
+const joinTypes = (types) => types.length === 1 && typeof types[0] === 'string' ?
+    types[0]
+    : `(${types.join('|')})`;
+const isValidValue = (v, type, multi) => {
+    if (multi) {
+        if (!Array.isArray(v))
+            return false;
+        return !v.some((v) => !isValidValue(v, type, false));
+    }
+    if (Array.isArray(v))
+        return false;
+    return typeof v === type;
+};
+const isConfigOption = (o, type, multi) => !!o &&
+    typeof o === 'object' &&
+    (0, exports.isConfigType)(o.type) &&
+    o.type === type &&
+    undefOrType(o.short, 'string') &&
+    undefOrType(o.description, 'string') &&
+    undefOrType(o.hint, 'string') &&
+    undefOrType(o.validate, 'function') &&
+    (o.type === 'boolean' ?
+        o.validOptions === undefined
+        : undefOrTypeArray(o.validOptions, o.type)) &&
+    (o.default === undefined || isValidValue(o.default, type, multi)) &&
+    !!o.multiple === multi;
+exports.isConfigOption = isConfigOption;
+function num(o = {}) {
+    const { default: def, validate: val, validOptions, ...rest } = o;
+    if (def !== undefined && !isValidValue(def, 'number', false)) {
+        throw new TypeError('invalid default value', {
+            cause: {
+                found: def,
+                wanted: 'number',
+            },
+        });
+    }
+    if (!undefOrTypeArray(validOptions, 'number')) {
+        throw new TypeError('invalid validOptions', {
+            cause: {
+                found: validOptions,
+                wanted: 'number[]',
+            },
+        });
+    }
+    const validate = val ?
+        val
+        : undefined;
+    return {
+        ...rest,
+        default: def,
+        validate,
+        validOptions,
+        type: 'number',
+        multiple: false,
+    };
+}
+function numList(o = {}) {
+    const { default: def, validate: val, validOptions, ...rest } = o;
+    if (def !== undefined && !isValidValue(def, 'number', true)) {
+        throw new TypeError('invalid default value', {
+            cause: {
+                found: def,
+                wanted: 'number[]',
+            },
+        });
+    }
+    if (!undefOrTypeArray(validOptions, 'number')) {
+        throw new TypeError('invalid validOptions', {
+            cause: {
+                found: validOptions,
+                wanted: 'number[]',
+            },
+        });
+    }
+    const validate = val ?
+        val
+        : undefined;
+    return {
+        ...rest,
+        default: def,
+        validate,
+        validOptions,
+        type: 'number',
+        multiple: true,
+    };
+}
+function opt(o = {}) {
+    const { default: def, validate: val, validOptions, ...rest } = o;
+    if (def !== undefined && !isValidValue(def, 'string', false)) {
+        throw new TypeError('invalid default value', {
+            cause: {
+                found: def,
+                wanted: 'string',
+            },
+        });
+    }
+    if (!undefOrTypeArray(validOptions, 'string')) {
+        throw new TypeError('invalid validOptions', {
+            cause: {
+                found: validOptions,
+                wanted: 'string[]',
+            },
+        });
+    }
+    const validate = val ?
+        val
+        : undefined;
+    return {
+        ...rest,
+        default: def,
+        validate,
+        validOptions,
+        type: 'string',
+        multiple: false,
+    };
+}
+function optList(o = {}) {
+    const { default: def, validate: val, validOptions, ...rest } = o;
+    if (def !== undefined && !isValidValue(def, 'string', true)) {
+        throw new TypeError('invalid default value', {
+            cause: {
+                found: def,
+                wanted: 'string[]',
+            },
+        });
+    }
+    if (!undefOrTypeArray(validOptions, 'string')) {
+        throw new TypeError('invalid validOptions', {
+            cause: {
+                found: validOptions,
+                wanted: 'string[]',
+            },
+        });
+    }
+    const validate = val ?
+        val
+        : undefined;
+    return {
+        ...rest,
+        default: def,
+        validate,
+        validOptions,
+        type: 'string',
+        multiple: true,
+    };
+}
+function flag(o = {}) {
+    const { hint, default: def, validate: val, ...rest } = o;
+    delete rest.validOptions;
+    if (def !== undefined && !isValidValue(def, 'boolean', false)) {
+        throw new TypeError('invalid default value');
+    }
+    const validate = val ?
+        val
+        : undefined;
+    if (hint !== undefined) {
+        throw new TypeError('cannot provide hint for flag');
+    }
+    return {
+        ...rest,
+        default: def,
+        validate,
+        type: 'boolean',
+        multiple: false,
+    };
+}
+function flagList(o = {}) {
+    const { hint, default: def, validate: val, ...rest } = o;
+    delete rest.validOptions;
+    if (def !== undefined && !isValidValue(def, 'boolean', true)) {
+        throw new TypeError('invalid default value');
+    }
+    const validate = val ?
+        val
+        : undefined;
+    if (hint !== undefined) {
+        throw new TypeError('cannot provide hint for flag list');
+    }
+    return {
+        ...rest,
+        default: def,
+        validate,
+        type: 'boolean',
+        multiple: true,
+    };
+}
+const toParseArgsOptionsConfig = (options) => {
+    const c = {};
+    for (const longOption in options) {
+        const config = options[longOption];
+        /* c8 ignore start */
+        if (!config) {
+            throw new Error('config must be an object: ' + longOption);
+        }
+        /* c8 ignore start */
+        if ((0, exports.isConfigOption)(config, 'number', true)) {
+            c[longOption] = {
+                type: 'string',
+                multiple: true,
+                default: config.default?.map(c => String(c)),
+            };
+        }
+        else if ((0, exports.isConfigOption)(config, 'number', false)) {
+            c[longOption] = {
+                type: 'string',
+                multiple: false,
+                default: config.default === undefined ?
+                    undefined
+                    : String(config.default),
+            };
+        }
+        else {
+            const conf = config;
+            c[longOption] = {
+                type: conf.type,
+                multiple: !!conf.multiple,
+                default: conf.default,
+            };
+        }
+        const clo = c[longOption];
+        if (typeof config.short === 'string') {
+            clo.short = config.short;
+        }
+        if (config.type === 'boolean' &&
+            !longOption.startsWith('no-') &&
+            !options[`no-${longOption}`]) {
+            c[`no-${longOption}`] = {
+                type: 'boolean',
+                multiple: config.multiple,
+            };
+        }
+    }
+    return c;
+};
+const isHeading = (r) => r.type === 'heading';
+const isDescription = (r) => r.type === 'description';
+/**
+ * Class returned by the {@link jack} function and all configuration
+ * definition methods.  This is what gets chained together.
+ */
+class Jack {
+    #configSet;
+    #shorts;
+    #options;
+    #fields = [];
+    #env;
+    #envPrefix;
+    #allowPositionals;
+    #usage;
+    #usageMarkdown;
+    constructor(options = {}) {
+        this.#options = options;
+        this.#allowPositionals = options.allowPositionals !== false;
+        this.#env =
+            this.#options.env === undefined ? process.env : this.#options.env;
+        this.#envPrefix = options.envPrefix;
+        // We need to fib a little, because it's always the same object, but it
+        // starts out as having an empty config set.  Then each method that adds
+        // fields returns `this as Jack`
+        this.#configSet = Object.create(null);
+        this.#shorts = Object.create(null);
+    }
+    /**
+     * Set the default value (which will still be overridden by env or cli)
+     * as if from a parsed config file. The optional `source` param, if
+     * provided, will be included in error messages if a value is invalid or
+     * unknown.
+     */
+    setConfigValues(values, source = '') {
+        try {
+            this.validate(values);
+        }
+        catch (er) {
+            const e = er;
+            if (source && e && typeof e === 'object') {
+                if (e.cause && typeof e.cause === 'object') {
+                    Object.assign(e.cause, { path: source });
+                }
+                else {
+                    e.cause = { path: source };
+                }
+            }
+            throw e;
+        }
+        for (const [field, value] of Object.entries(values)) {
+            const my = this.#configSet[field];
+            // already validated, just for TS's benefit
+            /* c8 ignore start */
+            if (!my) {
+                throw new Error('unexpected field in config set: ' + field, {
+                    cause: { found: field },
+                });
+            }
+            /* c8 ignore stop */
+            my.default = value;
+        }
+        return this;
+    }
+    /**
+     * Parse a string of arguments, and return the resulting
+     * `{ values, positionals }` object.
+     *
+     * If an {@link JackOptions#envPrefix} is set, then it will read default
+     * values from the environment, and write the resulting values back
+     * to the environment as well.
+     *
+     * Environment values always take precedence over any other value, except
+     * an explicit CLI setting.
+     */
+    parse(args = process.argv) {
+        this.loadEnvDefaults();
+        const p = this.parseRaw(args);
+        this.applyDefaults(p);
+        this.writeEnv(p);
+        return p;
+    }
+    loadEnvDefaults() {
+        if (this.#envPrefix) {
+            for (const [field, my] of Object.entries(this.#configSet)) {
+                const ek = toEnvKey(this.#envPrefix, field);
+                const env = this.#env[ek];
+                if (env !== undefined) {
+                    my.default = fromEnvVal(env, my.type, !!my.multiple, my.delim);
+                }
+            }
+        }
+    }
+    applyDefaults(p) {
+        for (const [field, c] of Object.entries(this.#configSet)) {
+            if (c.default !== undefined && !(field in p.values)) {
+                //@ts-ignore
+                p.values[field] = c.default;
+            }
+        }
+    }
+    /**
+     * Only parse the command line arguments passed in.
+     * Does not strip off the `node script.js` bits, so it must be just the
+     * arguments you wish to have parsed.
+     * Does not read from or write to the environment, or set defaults.
+     */
+    parseRaw(args) {
+        if (args === process.argv) {
+            args = args.slice(process._eval !== undefined ? 1 : 2);
+        }
+        const options = toParseArgsOptionsConfig(this.#configSet);
+        const result = (0, parse_args_js_1.parseArgs)({
+            args,
+            options,
+            // always strict, but using our own logic
+            strict: false,
+            allowPositionals: this.#allowPositionals,
+            tokens: true,
+        });
+        const p = {
+            values: {},
+            positionals: [],
+        };
+        for (const token of result.tokens) {
+            if (token.kind === 'positional') {
+                p.positionals.push(token.value);
+                if (this.#options.stopAtPositional ||
+                    this.#options.stopAtPositionalTest?.(token.value)) {
+                    p.positionals.push(...args.slice(token.index + 1));
+                    break;
+                }
+            }
+            else if (token.kind === 'option') {
+                let value = undefined;
+                if (token.name.startsWith('no-')) {
+                    const my = this.#configSet[token.name];
+                    const pname = token.name.substring('no-'.length);
+                    const pos = this.#configSet[pname];
+                    if (pos &&
+                        pos.type === 'boolean' &&
+                        (!my ||
+                            (my.type === 'boolean' && !!my.multiple === !!pos.multiple))) {
+                        value = false;
+                        token.name = pname;
+                    }
+                }
+                const my = this.#configSet[token.name];
+                if (!my) {
+                    throw new Error(`Unknown option '${token.rawName}'. ` +
+                        `To specify a positional argument starting with a '-', ` +
+                        `place it at the end of the command after '--', as in ` +
+                        `'-- ${token.rawName}'`, {
+                        cause: {
+                            found: token.rawName + (token.value ? `=${token.value}` : ''),
+                        },
+                    });
+                }
+                if (value === undefined) {
+                    if (token.value === undefined) {
+                        if (my.type !== 'boolean') {
+                            throw new Error(`No value provided for ${token.rawName}, expected ${my.type}`, {
+                                cause: {
+                                    name: token.rawName,
+                                    wanted: valueType(my),
+                                },
+                            });
+                        }
+                        value = true;
+                    }
+                    else {
+                        if (my.type === 'boolean') {
+                            throw new Error(`Flag ${token.rawName} does not take a value, received '${token.value}'`, { cause: { found: token } });
+                        }
+                        if (my.type === 'string') {
+                            value = token.value;
+                        }
+                        else {
+                            value = +token.value;
+                            if (value !== value) {
+                                throw new Error(`Invalid value '${token.value}' provided for ` +
+                                    `'${token.rawName}' option, expected number`, {
+                                    cause: {
+                                        name: token.rawName,
+                                        found: token.value,
+                                        wanted: 'number',
+                                    },
+                                });
+                            }
+                        }
+                    }
+                }
+                if (my.multiple) {
+                    const pv = p.values;
+                    const tn = pv[token.name] ?? [];
+                    pv[token.name] = tn;
+                    tn.push(value);
+                }
+                else {
+                    const pv = p.values;
+                    pv[token.name] = value;
+                }
+            }
+        }
+        for (const [field, value] of Object.entries(p.values)) {
+            const valid = this.#configSet[field]?.validate;
+            const validOptions = this.#configSet[field]?.validOptions;
+            let cause;
+            if (validOptions && !isValidOption(value, validOptions)) {
+                cause = { name: field, found: value, validOptions: validOptions };
+            }
+            if (valid && !valid(value)) {
+                cause = cause || { name: field, found: value };
+            }
+            if (cause) {
+                throw new Error(`Invalid value provided for --${field}: ${JSON.stringify(value)}`, { cause });
+            }
+        }
+        return p;
+    }
+    /**
+     * do not set fields as 'no-foo' if 'foo' exists and both are bools
+     * just set foo.
+     */
+    #noNoFields(f, val, s = f) {
+        if (!f.startsWith('no-') || typeof val !== 'boolean')
+            return;
+        const yes = f.substring('no-'.length);
+        // recurse so we get the core config key we care about.
+        this.#noNoFields(yes, val, s);
+        if (this.#configSet[yes]?.type === 'boolean') {
+            throw new Error(`do not set '${s}', instead set '${yes}' as desired.`, { cause: { found: s, wanted: yes } });
+        }
+    }
+    /**
+     * Validate that any arbitrary object is a valid configuration `values`
+     * object.  Useful when loading config files or other sources.
+     */
+    validate(o) {
+        if (!o || typeof o !== 'object') {
+            throw new Error('Invalid config: not an object', {
+                cause: { found: o },
+            });
+        }
+        const opts = o;
+        for (const field in o) {
+            const value = opts[field];
+            /* c8 ignore next - for TS */
+            if (value === undefined)
+                continue;
+            this.#noNoFields(field, value);
+            const config = this.#configSet[field];
+            if (!config) {
+                throw new Error(`Unknown config option: ${field}`, {
+                    cause: { found: field },
+                });
+            }
+            if (!isValidValue(value, config.type, !!config.multiple)) {
+                throw new Error(`Invalid value ${valueType(value)} for ${field}, expected ${valueType(config)}`, {
+                    cause: {
+                        name: field,
+                        found: value,
+                        wanted: valueType(config),
+                    },
+                });
+            }
+            let cause;
+            if (config.validOptions &&
+                !isValidOption(value, config.validOptions)) {
+                cause = {
+                    name: field,
+                    found: value,
+                    validOptions: config.validOptions,
+                };
+            }
+            if (config.validate && !config.validate(value)) {
+                cause = cause || { name: field, found: value };
+            }
+            if (cause) {
+                throw new Error(`Invalid config value for ${field}: ${value}`, {
+                    cause,
+                });
+            }
+        }
+    }
+    writeEnv(p) {
+        if (!this.#env || !this.#envPrefix)
+            return;
+        for (const [field, value] of Object.entries(p.values)) {
+            const my = this.#configSet[field];
+            this.#env[toEnvKey(this.#envPrefix, field)] = toEnvVal(value, my?.delim);
+        }
+    }
+    /**
+     * Add a heading to the usage output banner
+     */
+    heading(text, level, { pre = false } = {}) {
+        if (level === undefined) {
+            level = this.#fields.some(r => isHeading(r)) ? 2 : 1;
+        }
+        this.#fields.push({ type: 'heading', text, level, pre });
+        return this;
+    }
+    /**
+     * Add a long-form description to the usage output at this position.
+     */
+    description(text, { pre } = {}) {
+        this.#fields.push({ type: 'description', text, pre });
+        return this;
+    }
+    /**
+     * Add one or more number fields.
+     */
+    num(fields) {
+        return this.#addFields(fields, num);
+    }
+    /**
+     * Add one or more multiple number fields.
+     */
+    numList(fields) {
+        return this.#addFields(fields, numList);
+    }
+    /**
+     * Add one or more string option fields.
+     */
+    opt(fields) {
+        return this.#addFields(fields, opt);
+    }
+    /**
+     * Add one or more multiple string option fields.
+     */
+    optList(fields) {
+        return this.#addFields(fields, optList);
+    }
+    /**
+     * Add one or more flag fields.
+     */
+    flag(fields) {
+        return this.#addFields(fields, flag);
+    }
+    /**
+     * Add one or more multiple flag fields.
+     */
+    flagList(fields) {
+        return this.#addFields(fields, flagList);
+    }
+    /**
+     * Generic field definition method. Similar to flag/flagList/number/etc,
+     * but you must specify the `type` (and optionally `multiple` and `delim`)
+     * fields on each one, or Jack won't know how to define them.
+     */
+    addFields(fields) {
+        const next = this;
+        for (const [name, field] of Object.entries(fields)) {
+            this.#validateName(name, field);
+            next.#fields.push({
+                type: 'config',
+                name,
+                value: field,
+            });
+        }
+        Object.assign(next.#configSet, fields);
+        return next;
+    }
+    #addFields(fields, fn) {
+        const next = this;
+        Object.assign(next.#configSet, Object.fromEntries(Object.entries(fields).map(([name, field]) => {
+            this.#validateName(name, field);
+            const option = fn(field);
+            next.#fields.push({
+                type: 'config',
+                name,
+                value: option,
+            });
+            return [name, option];
+        })));
+        return next;
+    }
+    #validateName(name, field) {
+        if (!/^[a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?$/.test(name)) {
+            throw new TypeError(`Invalid option name: ${name}, ` +
+                `must be '-' delimited ASCII alphanumeric`);
+        }
+        if (this.#configSet[name]) {
+            throw new TypeError(`Cannot redefine option ${field}`);
+        }
+        if (this.#shorts[name]) {
+            throw new TypeError(`Cannot redefine option ${name}, already ` +
+                `in use for ${this.#shorts[name]}`);
+        }
+        if (field.short) {
+            if (!/^[a-zA-Z0-9]$/.test(field.short)) {
+                throw new TypeError(`Invalid ${name} short option: ${field.short}, ` +
+                    'must be 1 ASCII alphanumeric character');
+            }
+            if (this.#shorts[field.short]) {
+                throw new TypeError(`Invalid ${name} short option: ${field.short}, ` +
+                    `already in use for ${this.#shorts[field.short]}`);
+            }
+            this.#shorts[field.short] = name;
+            this.#shorts[name] = name;
+        }
+    }
+    /**
+     * Return the usage banner for the given configuration
+     */
+    usage() {
+        if (this.#usage)
+            return this.#usage;
+        let headingLevel = 1;
+        const ui = (0, cliui_1.default)({ width });
+        const first = this.#fields[0];
+        let start = first?.type === 'heading' ? 1 : 0;
+        if (first?.type === 'heading') {
+            ui.div({
+                padding: [0, 0, 0, 0],
+                text: normalize(first.text),
+            });
+        }
+        ui.div({ padding: [0, 0, 0, 0], text: 'Usage:' });
+        if (this.#options.usage) {
+            ui.div({
+                text: this.#options.usage,
+                padding: [0, 0, 0, 2],
+            });
+        }
+        else {
+            const cmd = (0, node_path_1.basename)(String(process.argv[1]));
+            const shortFlags = [];
+            const shorts = [];
+            const flags = [];
+            const opts = [];
+            for (const [field, config] of Object.entries(this.#configSet)) {
+                if (config.short) {
+                    if (config.type === 'boolean')
+                        shortFlags.push(config.short);
+                    else
+                        shorts.push([config.short, config.hint || field]);
+                }
+                else {
+                    if (config.type === 'boolean')
+                        flags.push(field);
+                    else
+                        opts.push([field, config.hint || field]);
+                }
+            }
+            const sf = shortFlags.length ? ' -' + shortFlags.join('') : '';
+            const so = shorts.map(([k, v]) => ` --${k}=<${v}>`).join('');
+            const lf = flags.map(k => ` --${k}`).join('');
+            const lo = opts.map(([k, v]) => ` --${k}=<${v}>`).join('');
+            const usage = `${cmd}${sf}${so}${lf}${lo}`.trim();
+            ui.div({
+                text: usage,
+                padding: [0, 0, 0, 2],
+            });
+        }
+        ui.div({ padding: [0, 0, 0, 0], text: '' });
+        const maybeDesc = this.#fields[start];
+        if (maybeDesc && isDescription(maybeDesc)) {
+            const print = normalize(maybeDesc.text, maybeDesc.pre);
+            start++;
+            ui.div({ padding: [0, 0, 0, 0], text: print });
+            ui.div({ padding: [0, 0, 0, 0], text: '' });
+        }
+        const { rows, maxWidth } = this.#usageRows(start);
+        // every heading/description after the first gets indented by 2
+        // extra spaces.
+        for (const row of rows) {
+            if (row.left) {
+                // If the row is too long, don't wrap it
+                // Bump the right-hand side down a line to make room
+                const configIndent = indent(Math.max(headingLevel, 2));
+                if (row.left.length > maxWidth - 3) {
+                    ui.div({ text: row.left, padding: [0, 0, 0, configIndent] });
+                    ui.div({ text: row.text, padding: [0, 0, 0, maxWidth] });
+                }
+                else {
+                    ui.div({
+                        text: row.left,
+                        padding: [0, 1, 0, configIndent],
+                        width: maxWidth,
+                    }, { padding: [0, 0, 0, 0], text: row.text });
+                }
+                if (row.skipLine) {
+                    ui.div({ padding: [0, 0, 0, 0], text: '' });
+                }
+            }
+            else {
+                if (isHeading(row)) {
+                    const { level } = row;
+                    headingLevel = level;
+                    // only h1 and h2 have bottom padding
+                    // h3-h6 do not
+                    const b = level <= 2 ? 1 : 0;
+                    ui.div({ ...row, padding: [0, 0, b, indent(level)] });
+                }
+                else {
+                    ui.div({ ...row, padding: [0, 0, 1, indent(headingLevel + 1)] });
+                }
+            }
+        }
+        return (this.#usage = ui.toString());
+    }
+    /**
+     * Return the usage banner markdown for the given configuration
+     */
+    usageMarkdown() {
+        if (this.#usageMarkdown)
+            return this.#usageMarkdown;
+        const out = [];
+        let headingLevel = 1;
+        const first = this.#fields[0];
+        let start = first?.type === 'heading' ? 1 : 0;
+        if (first?.type === 'heading') {
+            out.push(`# ${normalizeOneLine(first.text)}`);
+        }
+        out.push('Usage:');
+        if (this.#options.usage) {
+            out.push(normalizeMarkdown(this.#options.usage, true));
+        }
+        else {
+            const cmd = (0, node_path_1.basename)(String(process.argv[1]));
+            const shortFlags = [];
+            const shorts = [];
+            const flags = [];
+            const opts = [];
+            for (const [field, config] of Object.entries(this.#configSet)) {
+                if (config.short) {
+                    if (config.type === 'boolean')
+                        shortFlags.push(config.short);
+                    else
+                        shorts.push([config.short, config.hint || field]);
+                }
+                else {
+                    if (config.type === 'boolean')
+                        flags.push(field);
+                    else
+                        opts.push([field, config.hint || field]);
+                }
+            }
+            const sf = shortFlags.length ? ' -' + shortFlags.join('') : '';
+            const so = shorts.map(([k, v]) => ` --${k}=<${v}>`).join('');
+            const lf = flags.map(k => ` --${k}`).join('');
+            const lo = opts.map(([k, v]) => ` --${k}=<${v}>`).join('');
+            const usage = `${cmd}${sf}${so}${lf}${lo}`.trim();
+            out.push(normalizeMarkdown(usage, true));
+        }
+        const maybeDesc = this.#fields[start];
+        if (maybeDesc && isDescription(maybeDesc)) {
+            out.push(normalizeMarkdown(maybeDesc.text, maybeDesc.pre));
+            start++;
+        }
+        const { rows } = this.#usageRows(start);
+        // heading level in markdown is number of # ahead of text
+        for (const row of rows) {
+            if (row.left) {
+                out.push('#'.repeat(headingLevel + 1) +
+                    ' ' +
+                    normalizeOneLine(row.left, true));
+                if (row.text)
+                    out.push(normalizeMarkdown(row.text));
+            }
+            else if (isHeading(row)) {
+                const { level } = row;
+                headingLevel = level;
+                out.push(`${'#'.repeat(headingLevel)} ${normalizeOneLine(row.text, row.pre)}`);
+            }
+            else {
+                out.push(normalizeMarkdown(row.text, !!row.pre));
+            }
+        }
+        return (this.#usageMarkdown = out.join('\n\n') + '\n');
+    }
+    #usageRows(start) {
+        // turn each config type into a row, and figure out the width of the
+        // left hand indentation for the option descriptions.
+        let maxMax = Math.max(12, Math.min(26, Math.floor(width / 3)));
+        let maxWidth = 8;
+        let prev = undefined;
+        const rows = [];
+        for (const field of this.#fields.slice(start)) {
+            if (field.type !== 'config') {
+                if (prev?.type === 'config')
+                    prev.skipLine = true;
+                prev = undefined;
+                field.text = normalize(field.text, !!field.pre);
+                rows.push(field);
+                continue;
+            }
+            const { value } = field;
+            const desc = value.description || '';
+            const mult = value.multiple ? 'Can be set multiple times' : '';
+            const opts = value.validOptions?.length ?
+                `Valid options:${value.validOptions.map(v => ` ${JSON.stringify(v)}`)}`
+                : '';
+            const dmDelim = desc.includes('\n') ? '\n\n' : '\n';
+            const extra = [opts, mult].join(dmDelim).trim();
+            const text = (normalize(desc) + dmDelim + extra).trim();
+            const hint = value.hint ||
+                (value.type === 'number' ? 'n'
+                    : value.type === 'string' ? field.name
+                        : undefined);
+            const short = !value.short ? ''
+                : value.type === 'boolean' ? `-${value.short} `
+                    : `-${value.short}<${hint}> `;
+            const left = value.type === 'boolean' ?
+                `${short}--${field.name}`
+                : `${short}--${field.name}=<${hint}>`;
+            const row = { text, left, type: 'config' };
+            if (text.length > width - maxMax) {
+                row.skipLine = true;
+            }
+            if (prev && left.length > maxMax)
+                prev.skipLine = true;
+            prev = row;
+            const len = left.length + 4;
+            if (len > maxWidth && len < maxMax) {
+                maxWidth = len;
+            }
+            rows.push(row);
+        }
+        return { rows, maxWidth };
+    }
+    /**
+     * Return the configuration options as a plain object
+     */
+    toJSON() {
+        return Object.fromEntries(Object.entries(this.#configSet).map(([field, def]) => [
+            field,
+            {
+                type: def.type,
+                ...(def.multiple ? { multiple: true } : {}),
+                ...(def.delim ? { delim: def.delim } : {}),
+                ...(def.short ? { short: def.short } : {}),
+                ...(def.description ?
+                    { description: normalize(def.description) }
+                    : {}),
+                ...(def.validate ? { validate: def.validate } : {}),
+                ...(def.validOptions ? { validOptions: def.validOptions } : {}),
+                ...(def.default !== undefined ? { default: def.default } : {}),
+                ...(def.hint ? { hint: def.hint } : {}),
+            },
+        ]));
+    }
+    /**
+     * Custom printer for `util.inspect`
+     */
+    [node_util_1.inspect.custom](_, options) {
+        return `Jack ${(0, node_util_1.inspect)(this.toJSON(), options)}`;
+    }
+}
+exports.Jack = Jack;
+// Unwrap and un-indent, so we can wrap description
+// strings however makes them look nice in the code.
+const normalize = (s, pre = false) => {
+    if (pre)
+        // prepend a ZWSP to each line so cliui doesn't strip it.
+        return s
+            .split('\n')
+            .map(l => `\u200b${l}`)
+            .join('\n');
+    return s
+        .split(/^\s*```\s*$/gm)
+        .map((s, i) => {
+        if (i % 2 === 1) {
+            if (!s.trim()) {
+                return `\`\`\`\n\`\`\`\n`;
+            }
+            // outdent the ``` blocks, but preserve whitespace otherwise.
+            const split = s.split('\n');
+            // throw out the \n at the start and end
+            split.pop();
+            split.shift();
+            const si = split.reduce((shortest, l) => {
+                /* c8 ignore next */
+                const ind = l.match(/^\s*/)?.[0] ?? '';
+                if (ind.length)
+                    return Math.min(ind.length, shortest);
+                else
+                    return shortest;
+            }, Infinity);
+            /* c8 ignore next */
+            const i = isFinite(si) ? si : 0;
+            return ('\n```\n' +
+                split.map(s => `\u200b${s.substring(i)}`).join('\n') +
+                '\n```\n');
+        }
+        return (s
+            // remove single line breaks, except for lists
+            .replace(/([^\n])\n[ \t]*([^\n])/g, (_, $1, $2) => !/^[-*]/.test($2) ? `${$1} ${$2}` : `${$1}\n${$2}`)
+            // normalize mid-line whitespace
+            .replace(/([^\n])[ \t]+([^\n])/g, '$1 $2')
+            // two line breaks are enough
+            .replace(/\n{3,}/g, '\n\n')
+            // remove any spaces at the start of a line
+            .replace(/\n[ \t]+/g, '\n')
+            .trim());
+    })
+        .join('\n');
+};
+// normalize for markdown printing, remove leading spaces on lines
+const normalizeMarkdown = (s, pre = false) => {
+    const n = normalize(s, pre).replace(/\\/g, '\\\\');
+    return pre ?
+        `\`\`\`\n${n.replace(/\u200b/g, '')}\n\`\`\``
+        : n.replace(/\n +/g, '\n').trim();
+};
+const normalizeOneLine = (s, pre = false) => {
+    const n = normalize(s, pre)
+        .replace(/[\s\u200b]+/g, ' ')
+        .trim();
+    return pre ? `\`${n}\`` : n;
+};
+/**
+ * Main entry point. Create and return a {@link Jack} object.
+ */
+const jack = (options = {}) => new Jack(options);
+exports.jack = jack;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/node-gyp/node_modules/jackspeak/dist/commonjs/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/node-gyp/node_modules/jackspeak/dist/commonjs/package.json
new file mode 100644
index 0000000000000000000000000000000000000000..5bbefffbabee392d1855491b84dc0a716b6a3bf2
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/node-gyp/node_modules/jackspeak/dist/commonjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/node-gyp/node_modules/jackspeak/dist/commonjs/parse-args.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/node-gyp/node_modules/jackspeak/dist/commonjs/parse-args.js
new file mode 100644
index 0000000000000000000000000000000000000000..fc918a41fe603d3a0223290ffb0a84b532e522b4
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/node-gyp/node_modules/jackspeak/dist/commonjs/parse-args.js
@@ -0,0 +1,50 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.parseArgs = void 0;
+const util = __importStar(require("util"));
+const pv = (typeof process === 'object' &&
+    !!process &&
+    typeof process.version === 'string') ?
+    process.version
+    : 'v0.0.0';
+const pvs = pv
+    .replace(/^v/, '')
+    .split('.')
+    .map(s => parseInt(s, 10));
+/* c8 ignore start */
+const [major = 0, minor = 0] = pvs;
+/* c8 ignore stop */
+let { parseArgs: pa } = util;
+/* c8 ignore start */
+if (!pa ||
+    major < 16 ||
+    (major === 18 && minor < 11) ||
+    (major === 16 && minor < 19)) {
+    /* c8 ignore stop */
+    pa = require('@pkgjs/parseargs').parseArgs;
+}
+exports.parseArgs = pa;
+//# sourceMappingURL=parse-args-cjs.cjs.map
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/node-gyp/node_modules/jackspeak/dist/esm/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/node-gyp/node_modules/jackspeak/dist/esm/index.js
new file mode 100644
index 0000000000000000000000000000000000000000..78fdfa8155472a63b878b6460610c6168332882e
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/node-gyp/node_modules/jackspeak/dist/esm/index.js
@@ -0,0 +1,1000 @@
+import { inspect } from 'node:util';
+import { parseArgs } from './parse-args.js';
+// it's a tiny API, just cast it inline, it's fine
+//@ts-ignore
+import cliui from '@isaacs/cliui';
+import { basename } from 'node:path';
+const width = Math.min((process && process.stdout && process.stdout.columns) || 80, 80);
+// indentation spaces from heading level
+const indent = (n) => (n - 1) * 2;
+const toEnvKey = (pref, key) => {
+    return [pref, key.replace(/[^a-zA-Z0-9]+/g, ' ')]
+        .join(' ')
+        .trim()
+        .toUpperCase()
+        .replace(/ /g, '_');
+};
+const toEnvVal = (value, delim = '\n') => {
+    const str = typeof value === 'string' ? value
+        : typeof value === 'boolean' ?
+            value ? '1'
+                : '0'
+            : typeof value === 'number' ? String(value)
+                : Array.isArray(value) ?
+                    value.map((v) => toEnvVal(v)).join(delim)
+                    : /* c8 ignore start */ undefined;
+    if (typeof str !== 'string') {
+        throw new Error(`could not serialize value to environment: ${JSON.stringify(value)}`);
+    }
+    /* c8 ignore stop */
+    return str;
+};
+const fromEnvVal = (env, type, multiple, delim = '\n') => (multiple ?
+    env ? env.split(delim).map(v => fromEnvVal(v, type, false))
+        : []
+    : type === 'string' ? env
+        : type === 'boolean' ? env === '1'
+            : +env.trim());
+export const isConfigType = (t) => typeof t === 'string' &&
+    (t === 'string' || t === 'number' || t === 'boolean');
+const undefOrType = (v, t) => v === undefined || typeof v === t;
+const undefOrTypeArray = (v, t) => v === undefined || (Array.isArray(v) && v.every(x => typeof x === t));
+const isValidOption = (v, vo) => Array.isArray(v) ? v.every(x => isValidOption(x, vo)) : vo.includes(v);
+// print the value type, for error message reporting
+const valueType = (v) => typeof v === 'string' ? 'string'
+    : typeof v === 'boolean' ? 'boolean'
+        : typeof v === 'number' ? 'number'
+            : Array.isArray(v) ?
+                joinTypes([...new Set(v.map(v => valueType(v)))]) + '[]'
+                : `${v.type}${v.multiple ? '[]' : ''}`;
+const joinTypes = (types) => types.length === 1 && typeof types[0] === 'string' ?
+    types[0]
+    : `(${types.join('|')})`;
+const isValidValue = (v, type, multi) => {
+    if (multi) {
+        if (!Array.isArray(v))
+            return false;
+        return !v.some((v) => !isValidValue(v, type, false));
+    }
+    if (Array.isArray(v))
+        return false;
+    return typeof v === type;
+};
+export const isConfigOption = (o, type, multi) => !!o &&
+    typeof o === 'object' &&
+    isConfigType(o.type) &&
+    o.type === type &&
+    undefOrType(o.short, 'string') &&
+    undefOrType(o.description, 'string') &&
+    undefOrType(o.hint, 'string') &&
+    undefOrType(o.validate, 'function') &&
+    (o.type === 'boolean' ?
+        o.validOptions === undefined
+        : undefOrTypeArray(o.validOptions, o.type)) &&
+    (o.default === undefined || isValidValue(o.default, type, multi)) &&
+    !!o.multiple === multi;
+function num(o = {}) {
+    const { default: def, validate: val, validOptions, ...rest } = o;
+    if (def !== undefined && !isValidValue(def, 'number', false)) {
+        throw new TypeError('invalid default value', {
+            cause: {
+                found: def,
+                wanted: 'number',
+            },
+        });
+    }
+    if (!undefOrTypeArray(validOptions, 'number')) {
+        throw new TypeError('invalid validOptions', {
+            cause: {
+                found: validOptions,
+                wanted: 'number[]',
+            },
+        });
+    }
+    const validate = val ?
+        val
+        : undefined;
+    return {
+        ...rest,
+        default: def,
+        validate,
+        validOptions,
+        type: 'number',
+        multiple: false,
+    };
+}
+function numList(o = {}) {
+    const { default: def, validate: val, validOptions, ...rest } = o;
+    if (def !== undefined && !isValidValue(def, 'number', true)) {
+        throw new TypeError('invalid default value', {
+            cause: {
+                found: def,
+                wanted: 'number[]',
+            },
+        });
+    }
+    if (!undefOrTypeArray(validOptions, 'number')) {
+        throw new TypeError('invalid validOptions', {
+            cause: {
+                found: validOptions,
+                wanted: 'number[]',
+            },
+        });
+    }
+    const validate = val ?
+        val
+        : undefined;
+    return {
+        ...rest,
+        default: def,
+        validate,
+        validOptions,
+        type: 'number',
+        multiple: true,
+    };
+}
+function opt(o = {}) {
+    const { default: def, validate: val, validOptions, ...rest } = o;
+    if (def !== undefined && !isValidValue(def, 'string', false)) {
+        throw new TypeError('invalid default value', {
+            cause: {
+                found: def,
+                wanted: 'string',
+            },
+        });
+    }
+    if (!undefOrTypeArray(validOptions, 'string')) {
+        throw new TypeError('invalid validOptions', {
+            cause: {
+                found: validOptions,
+                wanted: 'string[]',
+            },
+        });
+    }
+    const validate = val ?
+        val
+        : undefined;
+    return {
+        ...rest,
+        default: def,
+        validate,
+        validOptions,
+        type: 'string',
+        multiple: false,
+    };
+}
+function optList(o = {}) {
+    const { default: def, validate: val, validOptions, ...rest } = o;
+    if (def !== undefined && !isValidValue(def, 'string', true)) {
+        throw new TypeError('invalid default value', {
+            cause: {
+                found: def,
+                wanted: 'string[]',
+            },
+        });
+    }
+    if (!undefOrTypeArray(validOptions, 'string')) {
+        throw new TypeError('invalid validOptions', {
+            cause: {
+                found: validOptions,
+                wanted: 'string[]',
+            },
+        });
+    }
+    const validate = val ?
+        val
+        : undefined;
+    return {
+        ...rest,
+        default: def,
+        validate,
+        validOptions,
+        type: 'string',
+        multiple: true,
+    };
+}
+function flag(o = {}) {
+    const { hint, default: def, validate: val, ...rest } = o;
+    delete rest.validOptions;
+    if (def !== undefined && !isValidValue(def, 'boolean', false)) {
+        throw new TypeError('invalid default value');
+    }
+    const validate = val ?
+        val
+        : undefined;
+    if (hint !== undefined) {
+        throw new TypeError('cannot provide hint for flag');
+    }
+    return {
+        ...rest,
+        default: def,
+        validate,
+        type: 'boolean',
+        multiple: false,
+    };
+}
+function flagList(o = {}) {
+    const { hint, default: def, validate: val, ...rest } = o;
+    delete rest.validOptions;
+    if (def !== undefined && !isValidValue(def, 'boolean', true)) {
+        throw new TypeError('invalid default value');
+    }
+    const validate = val ?
+        val
+        : undefined;
+    if (hint !== undefined) {
+        throw new TypeError('cannot provide hint for flag list');
+    }
+    return {
+        ...rest,
+        default: def,
+        validate,
+        type: 'boolean',
+        multiple: true,
+    };
+}
+const toParseArgsOptionsConfig = (options) => {
+    const c = {};
+    for (const longOption in options) {
+        const config = options[longOption];
+        /* c8 ignore start */
+        if (!config) {
+            throw new Error('config must be an object: ' + longOption);
+        }
+        /* c8 ignore start */
+        if (isConfigOption(config, 'number', true)) {
+            c[longOption] = {
+                type: 'string',
+                multiple: true,
+                default: config.default?.map(c => String(c)),
+            };
+        }
+        else if (isConfigOption(config, 'number', false)) {
+            c[longOption] = {
+                type: 'string',
+                multiple: false,
+                default: config.default === undefined ?
+                    undefined
+                    : String(config.default),
+            };
+        }
+        else {
+            const conf = config;
+            c[longOption] = {
+                type: conf.type,
+                multiple: !!conf.multiple,
+                default: conf.default,
+            };
+        }
+        const clo = c[longOption];
+        if (typeof config.short === 'string') {
+            clo.short = config.short;
+        }
+        if (config.type === 'boolean' &&
+            !longOption.startsWith('no-') &&
+            !options[`no-${longOption}`]) {
+            c[`no-${longOption}`] = {
+                type: 'boolean',
+                multiple: config.multiple,
+            };
+        }
+    }
+    return c;
+};
+const isHeading = (r) => r.type === 'heading';
+const isDescription = (r) => r.type === 'description';
+/**
+ * Class returned by the {@link jack} function and all configuration
+ * definition methods.  This is what gets chained together.
+ */
+export class Jack {
+    #configSet;
+    #shorts;
+    #options;
+    #fields = [];
+    #env;
+    #envPrefix;
+    #allowPositionals;
+    #usage;
+    #usageMarkdown;
+    constructor(options = {}) {
+        this.#options = options;
+        this.#allowPositionals = options.allowPositionals !== false;
+        this.#env =
+            this.#options.env === undefined ? process.env : this.#options.env;
+        this.#envPrefix = options.envPrefix;
+        // We need to fib a little, because it's always the same object, but it
+        // starts out as having an empty config set.  Then each method that adds
+        // fields returns `this as Jack`
+        this.#configSet = Object.create(null);
+        this.#shorts = Object.create(null);
+    }
+    /**
+     * Set the default value (which will still be overridden by env or cli)
+     * as if from a parsed config file. The optional `source` param, if
+     * provided, will be included in error messages if a value is invalid or
+     * unknown.
+     */
+    setConfigValues(values, source = '') {
+        try {
+            this.validate(values);
+        }
+        catch (er) {
+            const e = er;
+            if (source && e && typeof e === 'object') {
+                if (e.cause && typeof e.cause === 'object') {
+                    Object.assign(e.cause, { path: source });
+                }
+                else {
+                    e.cause = { path: source };
+                }
+            }
+            throw e;
+        }
+        for (const [field, value] of Object.entries(values)) {
+            const my = this.#configSet[field];
+            // already validated, just for TS's benefit
+            /* c8 ignore start */
+            if (!my) {
+                throw new Error('unexpected field in config set: ' + field, {
+                    cause: { found: field },
+                });
+            }
+            /* c8 ignore stop */
+            my.default = value;
+        }
+        return this;
+    }
+    /**
+     * Parse a string of arguments, and return the resulting
+     * `{ values, positionals }` object.
+     *
+     * If an {@link JackOptions#envPrefix} is set, then it will read default
+     * values from the environment, and write the resulting values back
+     * to the environment as well.
+     *
+     * Environment values always take precedence over any other value, except
+     * an explicit CLI setting.
+     */
+    parse(args = process.argv) {
+        this.loadEnvDefaults();
+        const p = this.parseRaw(args);
+        this.applyDefaults(p);
+        this.writeEnv(p);
+        return p;
+    }
+    loadEnvDefaults() {
+        if (this.#envPrefix) {
+            for (const [field, my] of Object.entries(this.#configSet)) {
+                const ek = toEnvKey(this.#envPrefix, field);
+                const env = this.#env[ek];
+                if (env !== undefined) {
+                    my.default = fromEnvVal(env, my.type, !!my.multiple, my.delim);
+                }
+            }
+        }
+    }
+    applyDefaults(p) {
+        for (const [field, c] of Object.entries(this.#configSet)) {
+            if (c.default !== undefined && !(field in p.values)) {
+                //@ts-ignore
+                p.values[field] = c.default;
+            }
+        }
+    }
+    /**
+     * Only parse the command line arguments passed in.
+     * Does not strip off the `node script.js` bits, so it must be just the
+     * arguments you wish to have parsed.
+     * Does not read from or write to the environment, or set defaults.
+     */
+    parseRaw(args) {
+        if (args === process.argv) {
+            args = args.slice(process._eval !== undefined ? 1 : 2);
+        }
+        const options = toParseArgsOptionsConfig(this.#configSet);
+        const result = parseArgs({
+            args,
+            options,
+            // always strict, but using our own logic
+            strict: false,
+            allowPositionals: this.#allowPositionals,
+            tokens: true,
+        });
+        const p = {
+            values: {},
+            positionals: [],
+        };
+        for (const token of result.tokens) {
+            if (token.kind === 'positional') {
+                p.positionals.push(token.value);
+                if (this.#options.stopAtPositional ||
+                    this.#options.stopAtPositionalTest?.(token.value)) {
+                    p.positionals.push(...args.slice(token.index + 1));
+                    break;
+                }
+            }
+            else if (token.kind === 'option') {
+                let value = undefined;
+                if (token.name.startsWith('no-')) {
+                    const my = this.#configSet[token.name];
+                    const pname = token.name.substring('no-'.length);
+                    const pos = this.#configSet[pname];
+                    if (pos &&
+                        pos.type === 'boolean' &&
+                        (!my ||
+                            (my.type === 'boolean' && !!my.multiple === !!pos.multiple))) {
+                        value = false;
+                        token.name = pname;
+                    }
+                }
+                const my = this.#configSet[token.name];
+                if (!my) {
+                    throw new Error(`Unknown option '${token.rawName}'. ` +
+                        `To specify a positional argument starting with a '-', ` +
+                        `place it at the end of the command after '--', as in ` +
+                        `'-- ${token.rawName}'`, {
+                        cause: {
+                            found: token.rawName + (token.value ? `=${token.value}` : ''),
+                        },
+                    });
+                }
+                if (value === undefined) {
+                    if (token.value === undefined) {
+                        if (my.type !== 'boolean') {
+                            throw new Error(`No value provided for ${token.rawName}, expected ${my.type}`, {
+                                cause: {
+                                    name: token.rawName,
+                                    wanted: valueType(my),
+                                },
+                            });
+                        }
+                        value = true;
+                    }
+                    else {
+                        if (my.type === 'boolean') {
+                            throw new Error(`Flag ${token.rawName} does not take a value, received '${token.value}'`, { cause: { found: token } });
+                        }
+                        if (my.type === 'string') {
+                            value = token.value;
+                        }
+                        else {
+                            value = +token.value;
+                            if (value !== value) {
+                                throw new Error(`Invalid value '${token.value}' provided for ` +
+                                    `'${token.rawName}' option, expected number`, {
+                                    cause: {
+                                        name: token.rawName,
+                                        found: token.value,
+                                        wanted: 'number',
+                                    },
+                                });
+                            }
+                        }
+                    }
+                }
+                if (my.multiple) {
+                    const pv = p.values;
+                    const tn = pv[token.name] ?? [];
+                    pv[token.name] = tn;
+                    tn.push(value);
+                }
+                else {
+                    const pv = p.values;
+                    pv[token.name] = value;
+                }
+            }
+        }
+        for (const [field, value] of Object.entries(p.values)) {
+            const valid = this.#configSet[field]?.validate;
+            const validOptions = this.#configSet[field]?.validOptions;
+            let cause;
+            if (validOptions && !isValidOption(value, validOptions)) {
+                cause = { name: field, found: value, validOptions: validOptions };
+            }
+            if (valid && !valid(value)) {
+                cause = cause || { name: field, found: value };
+            }
+            if (cause) {
+                throw new Error(`Invalid value provided for --${field}: ${JSON.stringify(value)}`, { cause });
+            }
+        }
+        return p;
+    }
+    /**
+     * do not set fields as 'no-foo' if 'foo' exists and both are bools
+     * just set foo.
+     */
+    #noNoFields(f, val, s = f) {
+        if (!f.startsWith('no-') || typeof val !== 'boolean')
+            return;
+        const yes = f.substring('no-'.length);
+        // recurse so we get the core config key we care about.
+        this.#noNoFields(yes, val, s);
+        if (this.#configSet[yes]?.type === 'boolean') {
+            throw new Error(`do not set '${s}', instead set '${yes}' as desired.`, { cause: { found: s, wanted: yes } });
+        }
+    }
+    /**
+     * Validate that any arbitrary object is a valid configuration `values`
+     * object.  Useful when loading config files or other sources.
+     */
+    validate(o) {
+        if (!o || typeof o !== 'object') {
+            throw new Error('Invalid config: not an object', {
+                cause: { found: o },
+            });
+        }
+        const opts = o;
+        for (const field in o) {
+            const value = opts[field];
+            /* c8 ignore next - for TS */
+            if (value === undefined)
+                continue;
+            this.#noNoFields(field, value);
+            const config = this.#configSet[field];
+            if (!config) {
+                throw new Error(`Unknown config option: ${field}`, {
+                    cause: { found: field },
+                });
+            }
+            if (!isValidValue(value, config.type, !!config.multiple)) {
+                throw new Error(`Invalid value ${valueType(value)} for ${field}, expected ${valueType(config)}`, {
+                    cause: {
+                        name: field,
+                        found: value,
+                        wanted: valueType(config),
+                    },
+                });
+            }
+            let cause;
+            if (config.validOptions &&
+                !isValidOption(value, config.validOptions)) {
+                cause = {
+                    name: field,
+                    found: value,
+                    validOptions: config.validOptions,
+                };
+            }
+            if (config.validate && !config.validate(value)) {
+                cause = cause || { name: field, found: value };
+            }
+            if (cause) {
+                throw new Error(`Invalid config value for ${field}: ${value}`, {
+                    cause,
+                });
+            }
+        }
+    }
+    writeEnv(p) {
+        if (!this.#env || !this.#envPrefix)
+            return;
+        for (const [field, value] of Object.entries(p.values)) {
+            const my = this.#configSet[field];
+            this.#env[toEnvKey(this.#envPrefix, field)] = toEnvVal(value, my?.delim);
+        }
+    }
+    /**
+     * Add a heading to the usage output banner
+     */
+    heading(text, level, { pre = false } = {}) {
+        if (level === undefined) {
+            level = this.#fields.some(r => isHeading(r)) ? 2 : 1;
+        }
+        this.#fields.push({ type: 'heading', text, level, pre });
+        return this;
+    }
+    /**
+     * Add a long-form description to the usage output at this position.
+     */
+    description(text, { pre } = {}) {
+        this.#fields.push({ type: 'description', text, pre });
+        return this;
+    }
+    /**
+     * Add one or more number fields.
+     */
+    num(fields) {
+        return this.#addFields(fields, num);
+    }
+    /**
+     * Add one or more multiple number fields.
+     */
+    numList(fields) {
+        return this.#addFields(fields, numList);
+    }
+    /**
+     * Add one or more string option fields.
+     */
+    opt(fields) {
+        return this.#addFields(fields, opt);
+    }
+    /**
+     * Add one or more multiple string option fields.
+     */
+    optList(fields) {
+        return this.#addFields(fields, optList);
+    }
+    /**
+     * Add one or more flag fields.
+     */
+    flag(fields) {
+        return this.#addFields(fields, flag);
+    }
+    /**
+     * Add one or more multiple flag fields.
+     */
+    flagList(fields) {
+        return this.#addFields(fields, flagList);
+    }
+    /**
+     * Generic field definition method. Similar to flag/flagList/number/etc,
+     * but you must specify the `type` (and optionally `multiple` and `delim`)
+     * fields on each one, or Jack won't know how to define them.
+     */
+    addFields(fields) {
+        const next = this;
+        for (const [name, field] of Object.entries(fields)) {
+            this.#validateName(name, field);
+            next.#fields.push({
+                type: 'config',
+                name,
+                value: field,
+            });
+        }
+        Object.assign(next.#configSet, fields);
+        return next;
+    }
+    #addFields(fields, fn) {
+        const next = this;
+        Object.assign(next.#configSet, Object.fromEntries(Object.entries(fields).map(([name, field]) => {
+            this.#validateName(name, field);
+            const option = fn(field);
+            next.#fields.push({
+                type: 'config',
+                name,
+                value: option,
+            });
+            return [name, option];
+        })));
+        return next;
+    }
+    #validateName(name, field) {
+        if (!/^[a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?$/.test(name)) {
+            throw new TypeError(`Invalid option name: ${name}, ` +
+                `must be '-' delimited ASCII alphanumeric`);
+        }
+        if (this.#configSet[name]) {
+            throw new TypeError(`Cannot redefine option ${field}`);
+        }
+        if (this.#shorts[name]) {
+            throw new TypeError(`Cannot redefine option ${name}, already ` +
+                `in use for ${this.#shorts[name]}`);
+        }
+        if (field.short) {
+            if (!/^[a-zA-Z0-9]$/.test(field.short)) {
+                throw new TypeError(`Invalid ${name} short option: ${field.short}, ` +
+                    'must be 1 ASCII alphanumeric character');
+            }
+            if (this.#shorts[field.short]) {
+                throw new TypeError(`Invalid ${name} short option: ${field.short}, ` +
+                    `already in use for ${this.#shorts[field.short]}`);
+            }
+            this.#shorts[field.short] = name;
+            this.#shorts[name] = name;
+        }
+    }
+    /**
+     * Return the usage banner for the given configuration
+     */
+    usage() {
+        if (this.#usage)
+            return this.#usage;
+        let headingLevel = 1;
+        const ui = cliui({ width });
+        const first = this.#fields[0];
+        let start = first?.type === 'heading' ? 1 : 0;
+        if (first?.type === 'heading') {
+            ui.div({
+                padding: [0, 0, 0, 0],
+                text: normalize(first.text),
+            });
+        }
+        ui.div({ padding: [0, 0, 0, 0], text: 'Usage:' });
+        if (this.#options.usage) {
+            ui.div({
+                text: this.#options.usage,
+                padding: [0, 0, 0, 2],
+            });
+        }
+        else {
+            const cmd = basename(String(process.argv[1]));
+            const shortFlags = [];
+            const shorts = [];
+            const flags = [];
+            const opts = [];
+            for (const [field, config] of Object.entries(this.#configSet)) {
+                if (config.short) {
+                    if (config.type === 'boolean')
+                        shortFlags.push(config.short);
+                    else
+                        shorts.push([config.short, config.hint || field]);
+                }
+                else {
+                    if (config.type === 'boolean')
+                        flags.push(field);
+                    else
+                        opts.push([field, config.hint || field]);
+                }
+            }
+            const sf = shortFlags.length ? ' -' + shortFlags.join('') : '';
+            const so = shorts.map(([k, v]) => ` --${k}=<${v}>`).join('');
+            const lf = flags.map(k => ` --${k}`).join('');
+            const lo = opts.map(([k, v]) => ` --${k}=<${v}>`).join('');
+            const usage = `${cmd}${sf}${so}${lf}${lo}`.trim();
+            ui.div({
+                text: usage,
+                padding: [0, 0, 0, 2],
+            });
+        }
+        ui.div({ padding: [0, 0, 0, 0], text: '' });
+        const maybeDesc = this.#fields[start];
+        if (maybeDesc && isDescription(maybeDesc)) {
+            const print = normalize(maybeDesc.text, maybeDesc.pre);
+            start++;
+            ui.div({ padding: [0, 0, 0, 0], text: print });
+            ui.div({ padding: [0, 0, 0, 0], text: '' });
+        }
+        const { rows, maxWidth } = this.#usageRows(start);
+        // every heading/description after the first gets indented by 2
+        // extra spaces.
+        for (const row of rows) {
+            if (row.left) {
+                // If the row is too long, don't wrap it
+                // Bump the right-hand side down a line to make room
+                const configIndent = indent(Math.max(headingLevel, 2));
+                if (row.left.length > maxWidth - 3) {
+                    ui.div({ text: row.left, padding: [0, 0, 0, configIndent] });
+                    ui.div({ text: row.text, padding: [0, 0, 0, maxWidth] });
+                }
+                else {
+                    ui.div({
+                        text: row.left,
+                        padding: [0, 1, 0, configIndent],
+                        width: maxWidth,
+                    }, { padding: [0, 0, 0, 0], text: row.text });
+                }
+                if (row.skipLine) {
+                    ui.div({ padding: [0, 0, 0, 0], text: '' });
+                }
+            }
+            else {
+                if (isHeading(row)) {
+                    const { level } = row;
+                    headingLevel = level;
+                    // only h1 and h2 have bottom padding
+                    // h3-h6 do not
+                    const b = level <= 2 ? 1 : 0;
+                    ui.div({ ...row, padding: [0, 0, b, indent(level)] });
+                }
+                else {
+                    ui.div({ ...row, padding: [0, 0, 1, indent(headingLevel + 1)] });
+                }
+            }
+        }
+        return (this.#usage = ui.toString());
+    }
+    /**
+     * Return the usage banner markdown for the given configuration
+     */
+    usageMarkdown() {
+        if (this.#usageMarkdown)
+            return this.#usageMarkdown;
+        const out = [];
+        let headingLevel = 1;
+        const first = this.#fields[0];
+        let start = first?.type === 'heading' ? 1 : 0;
+        if (first?.type === 'heading') {
+            out.push(`# ${normalizeOneLine(first.text)}`);
+        }
+        out.push('Usage:');
+        if (this.#options.usage) {
+            out.push(normalizeMarkdown(this.#options.usage, true));
+        }
+        else {
+            const cmd = basename(String(process.argv[1]));
+            const shortFlags = [];
+            const shorts = [];
+            const flags = [];
+            const opts = [];
+            for (const [field, config] of Object.entries(this.#configSet)) {
+                if (config.short) {
+                    if (config.type === 'boolean')
+                        shortFlags.push(config.short);
+                    else
+                        shorts.push([config.short, config.hint || field]);
+                }
+                else {
+                    if (config.type === 'boolean')
+                        flags.push(field);
+                    else
+                        opts.push([field, config.hint || field]);
+                }
+            }
+            const sf = shortFlags.length ? ' -' + shortFlags.join('') : '';
+            const so = shorts.map(([k, v]) => ` --${k}=<${v}>`).join('');
+            const lf = flags.map(k => ` --${k}`).join('');
+            const lo = opts.map(([k, v]) => ` --${k}=<${v}>`).join('');
+            const usage = `${cmd}${sf}${so}${lf}${lo}`.trim();
+            out.push(normalizeMarkdown(usage, true));
+        }
+        const maybeDesc = this.#fields[start];
+        if (maybeDesc && isDescription(maybeDesc)) {
+            out.push(normalizeMarkdown(maybeDesc.text, maybeDesc.pre));
+            start++;
+        }
+        const { rows } = this.#usageRows(start);
+        // heading level in markdown is number of # ahead of text
+        for (const row of rows) {
+            if (row.left) {
+                out.push('#'.repeat(headingLevel + 1) +
+                    ' ' +
+                    normalizeOneLine(row.left, true));
+                if (row.text)
+                    out.push(normalizeMarkdown(row.text));
+            }
+            else if (isHeading(row)) {
+                const { level } = row;
+                headingLevel = level;
+                out.push(`${'#'.repeat(headingLevel)} ${normalizeOneLine(row.text, row.pre)}`);
+            }
+            else {
+                out.push(normalizeMarkdown(row.text, !!row.pre));
+            }
+        }
+        return (this.#usageMarkdown = out.join('\n\n') + '\n');
+    }
+    #usageRows(start) {
+        // turn each config type into a row, and figure out the width of the
+        // left hand indentation for the option descriptions.
+        let maxMax = Math.max(12, Math.min(26, Math.floor(width / 3)));
+        let maxWidth = 8;
+        let prev = undefined;
+        const rows = [];
+        for (const field of this.#fields.slice(start)) {
+            if (field.type !== 'config') {
+                if (prev?.type === 'config')
+                    prev.skipLine = true;
+                prev = undefined;
+                field.text = normalize(field.text, !!field.pre);
+                rows.push(field);
+                continue;
+            }
+            const { value } = field;
+            const desc = value.description || '';
+            const mult = value.multiple ? 'Can be set multiple times' : '';
+            const opts = value.validOptions?.length ?
+                `Valid options:${value.validOptions.map(v => ` ${JSON.stringify(v)}`)}`
+                : '';
+            const dmDelim = desc.includes('\n') ? '\n\n' : '\n';
+            const extra = [opts, mult].join(dmDelim).trim();
+            const text = (normalize(desc) + dmDelim + extra).trim();
+            const hint = value.hint ||
+                (value.type === 'number' ? 'n'
+                    : value.type === 'string' ? field.name
+                        : undefined);
+            const short = !value.short ? ''
+                : value.type === 'boolean' ? `-${value.short} `
+                    : `-${value.short}<${hint}> `;
+            const left = value.type === 'boolean' ?
+                `${short}--${field.name}`
+                : `${short}--${field.name}=<${hint}>`;
+            const row = { text, left, type: 'config' };
+            if (text.length > width - maxMax) {
+                row.skipLine = true;
+            }
+            if (prev && left.length > maxMax)
+                prev.skipLine = true;
+            prev = row;
+            const len = left.length + 4;
+            if (len > maxWidth && len < maxMax) {
+                maxWidth = len;
+            }
+            rows.push(row);
+        }
+        return { rows, maxWidth };
+    }
+    /**
+     * Return the configuration options as a plain object
+     */
+    toJSON() {
+        return Object.fromEntries(Object.entries(this.#configSet).map(([field, def]) => [
+            field,
+            {
+                type: def.type,
+                ...(def.multiple ? { multiple: true } : {}),
+                ...(def.delim ? { delim: def.delim } : {}),
+                ...(def.short ? { short: def.short } : {}),
+                ...(def.description ?
+                    { description: normalize(def.description) }
+                    : {}),
+                ...(def.validate ? { validate: def.validate } : {}),
+                ...(def.validOptions ? { validOptions: def.validOptions } : {}),
+                ...(def.default !== undefined ? { default: def.default } : {}),
+                ...(def.hint ? { hint: def.hint } : {}),
+            },
+        ]));
+    }
+    /**
+     * Custom printer for `util.inspect`
+     */
+    [inspect.custom](_, options) {
+        return `Jack ${inspect(this.toJSON(), options)}`;
+    }
+}
+// Unwrap and un-indent, so we can wrap description
+// strings however makes them look nice in the code.
+const normalize = (s, pre = false) => {
+    if (pre)
+        // prepend a ZWSP to each line so cliui doesn't strip it.
+        return s
+            .split('\n')
+            .map(l => `\u200b${l}`)
+            .join('\n');
+    return s
+        .split(/^\s*```\s*$/gm)
+        .map((s, i) => {
+        if (i % 2 === 1) {
+            if (!s.trim()) {
+                return `\`\`\`\n\`\`\`\n`;
+            }
+            // outdent the ``` blocks, but preserve whitespace otherwise.
+            const split = s.split('\n');
+            // throw out the \n at the start and end
+            split.pop();
+            split.shift();
+            const si = split.reduce((shortest, l) => {
+                /* c8 ignore next */
+                const ind = l.match(/^\s*/)?.[0] ?? '';
+                if (ind.length)
+                    return Math.min(ind.length, shortest);
+                else
+                    return shortest;
+            }, Infinity);
+            /* c8 ignore next */
+            const i = isFinite(si) ? si : 0;
+            return ('\n```\n' +
+                split.map(s => `\u200b${s.substring(i)}`).join('\n') +
+                '\n```\n');
+        }
+        return (s
+            // remove single line breaks, except for lists
+            .replace(/([^\n])\n[ \t]*([^\n])/g, (_, $1, $2) => !/^[-*]/.test($2) ? `${$1} ${$2}` : `${$1}\n${$2}`)
+            // normalize mid-line whitespace
+            .replace(/([^\n])[ \t]+([^\n])/g, '$1 $2')
+            // two line breaks are enough
+            .replace(/\n{3,}/g, '\n\n')
+            // remove any spaces at the start of a line
+            .replace(/\n[ \t]+/g, '\n')
+            .trim());
+    })
+        .join('\n');
+};
+// normalize for markdown printing, remove leading spaces on lines
+const normalizeMarkdown = (s, pre = false) => {
+    const n = normalize(s, pre).replace(/\\/g, '\\\\');
+    return pre ?
+        `\`\`\`\n${n.replace(/\u200b/g, '')}\n\`\`\``
+        : n.replace(/\n +/g, '\n').trim();
+};
+const normalizeOneLine = (s, pre = false) => {
+    const n = normalize(s, pre)
+        .replace(/[\s\u200b]+/g, ' ')
+        .trim();
+    return pre ? `\`${n}\`` : n;
+};
+/**
+ * Main entry point. Create and return a {@link Jack} object.
+ */
+export const jack = (options = {}) => new Jack(options);
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/node-gyp/node_modules/path-scurry/dist/commonjs/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/node-gyp/node_modules/path-scurry/dist/commonjs/index.js
new file mode 100644
index 0000000000000000000000000000000000000000..555de62f04c90ed9e3f1cf69f6d51895ed018964
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/node-gyp/node_modules/path-scurry/dist/commonjs/index.js
@@ -0,0 +1,2014 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.PathScurry = exports.Path = exports.PathScurryDarwin = exports.PathScurryPosix = exports.PathScurryWin32 = exports.PathScurryBase = exports.PathPosix = exports.PathWin32 = exports.PathBase = exports.ChildrenCache = exports.ResolveCache = void 0;
+const lru_cache_1 = require("lru-cache");
+const node_path_1 = require("node:path");
+const node_url_1 = require("node:url");
+const fs_1 = require("fs");
+const actualFS = __importStar(require("node:fs"));
+const realpathSync = fs_1.realpathSync.native;
+// TODO: test perf of fs/promises realpath vs realpathCB,
+// since the promises one uses realpath.native
+const promises_1 = require("node:fs/promises");
+const minipass_1 = require("minipass");
+const defaultFS = {
+    lstatSync: fs_1.lstatSync,
+    readdir: fs_1.readdir,
+    readdirSync: fs_1.readdirSync,
+    readlinkSync: fs_1.readlinkSync,
+    realpathSync,
+    promises: {
+        lstat: promises_1.lstat,
+        readdir: promises_1.readdir,
+        readlink: promises_1.readlink,
+        realpath: promises_1.realpath,
+    },
+};
+// if they just gave us require('fs') then use our default
+const fsFromOption = (fsOption) => !fsOption || fsOption === defaultFS || fsOption === actualFS ?
+    defaultFS
+    : {
+        ...defaultFS,
+        ...fsOption,
+        promises: {
+            ...defaultFS.promises,
+            ...(fsOption.promises || {}),
+        },
+    };
+// turn something like //?/c:/ into c:\
+const uncDriveRegexp = /^\\\\\?\\([a-z]:)\\?$/i;
+const uncToDrive = (rootPath) => rootPath.replace(/\//g, '\\').replace(uncDriveRegexp, '$1\\');
+// windows paths are separated by either / or \
+const eitherSep = /[\\\/]/;
+const UNKNOWN = 0; // may not even exist, for all we know
+const IFIFO = 0b0001;
+const IFCHR = 0b0010;
+const IFDIR = 0b0100;
+const IFBLK = 0b0110;
+const IFREG = 0b1000;
+const IFLNK = 0b1010;
+const IFSOCK = 0b1100;
+const IFMT = 0b1111;
+// mask to unset low 4 bits
+const IFMT_UNKNOWN = ~IFMT;
+// set after successfully calling readdir() and getting entries.
+const READDIR_CALLED = 0b0000_0001_0000;
+// set after a successful lstat()
+const LSTAT_CALLED = 0b0000_0010_0000;
+// set if an entry (or one of its parents) is definitely not a dir
+const ENOTDIR = 0b0000_0100_0000;
+// set if an entry (or one of its parents) does not exist
+// (can also be set on lstat errors like EACCES or ENAMETOOLONG)
+const ENOENT = 0b0000_1000_0000;
+// cannot have child entries -- also verify &IFMT is either IFDIR or IFLNK
+// set if we fail to readlink
+const ENOREADLINK = 0b0001_0000_0000;
+// set if we know realpath() will fail
+const ENOREALPATH = 0b0010_0000_0000;
+const ENOCHILD = ENOTDIR | ENOENT | ENOREALPATH;
+const TYPEMASK = 0b0011_1111_1111;
+const entToType = (s) => s.isFile() ? IFREG
+    : s.isDirectory() ? IFDIR
+        : s.isSymbolicLink() ? IFLNK
+            : s.isCharacterDevice() ? IFCHR
+                : s.isBlockDevice() ? IFBLK
+                    : s.isSocket() ? IFSOCK
+                        : s.isFIFO() ? IFIFO
+                            : UNKNOWN;
+// normalize unicode path names
+const normalizeCache = new Map();
+const normalize = (s) => {
+    const c = normalizeCache.get(s);
+    if (c)
+        return c;
+    const n = s.normalize('NFKD');
+    normalizeCache.set(s, n);
+    return n;
+};
+const normalizeNocaseCache = new Map();
+const normalizeNocase = (s) => {
+    const c = normalizeNocaseCache.get(s);
+    if (c)
+        return c;
+    const n = normalize(s.toLowerCase());
+    normalizeNocaseCache.set(s, n);
+    return n;
+};
+/**
+ * An LRUCache for storing resolved path strings or Path objects.
+ * @internal
+ */
+class ResolveCache extends lru_cache_1.LRUCache {
+    constructor() {
+        super({ max: 256 });
+    }
+}
+exports.ResolveCache = ResolveCache;
+// In order to prevent blowing out the js heap by allocating hundreds of
+// thousands of Path entries when walking extremely large trees, the "children"
+// in this tree are represented by storing an array of Path entries in an
+// LRUCache, indexed by the parent.  At any time, Path.children() may return an
+// empty array, indicating that it doesn't know about any of its children, and
+// thus has to rebuild that cache.  This is fine, it just means that we don't
+// benefit as much from having the cached entries, but huge directory walks
+// don't blow out the stack, and smaller ones are still as fast as possible.
+//
+//It does impose some complexity when building up the readdir data, because we
+//need to pass a reference to the children array that we started with.
+/**
+ * an LRUCache for storing child entries.
+ * @internal
+ */
+class ChildrenCache extends lru_cache_1.LRUCache {
+    constructor(maxSize = 16 * 1024) {
+        super({
+            maxSize,
+            // parent + children
+            sizeCalculation: a => a.length + 1,
+        });
+    }
+}
+exports.ChildrenCache = ChildrenCache;
+const setAsCwd = Symbol('PathScurry setAsCwd');
+/**
+ * Path objects are sort of like a super-powered
+ * {@link https://nodejs.org/docs/latest/api/fs.html#class-fsdirent fs.Dirent}
+ *
+ * Each one represents a single filesystem entry on disk, which may or may not
+ * exist. It includes methods for reading various types of information via
+ * lstat, readlink, and readdir, and caches all information to the greatest
+ * degree possible.
+ *
+ * Note that fs operations that would normally throw will instead return an
+ * "empty" value. This is in order to prevent excessive overhead from error
+ * stack traces.
+ */
+class PathBase {
+    /**
+     * the basename of this path
+     *
+     * **Important**: *always* test the path name against any test string
+     * usingthe {@link isNamed} method, and not by directly comparing this
+     * string. Otherwise, unicode path strings that the system sees as identical
+     * will not be properly treated as the same path, leading to incorrect
+     * behavior and possible security issues.
+     */
+    name;
+    /**
+     * the Path entry corresponding to the path root.
+     *
+     * @internal
+     */
+    root;
+    /**
+     * All roots found within the current PathScurry family
+     *
+     * @internal
+     */
+    roots;
+    /**
+     * a reference to the parent path, or undefined in the case of root entries
+     *
+     * @internal
+     */
+    parent;
+    /**
+     * boolean indicating whether paths are compared case-insensitively
+     * @internal
+     */
+    nocase;
+    /**
+     * boolean indicating that this path is the current working directory
+     * of the PathScurry collection that contains it.
+     */
+    isCWD = false;
+    // potential default fs override
+    #fs;
+    // Stats fields
+    #dev;
+    get dev() {
+        return this.#dev;
+    }
+    #mode;
+    get mode() {
+        return this.#mode;
+    }
+    #nlink;
+    get nlink() {
+        return this.#nlink;
+    }
+    #uid;
+    get uid() {
+        return this.#uid;
+    }
+    #gid;
+    get gid() {
+        return this.#gid;
+    }
+    #rdev;
+    get rdev() {
+        return this.#rdev;
+    }
+    #blksize;
+    get blksize() {
+        return this.#blksize;
+    }
+    #ino;
+    get ino() {
+        return this.#ino;
+    }
+    #size;
+    get size() {
+        return this.#size;
+    }
+    #blocks;
+    get blocks() {
+        return this.#blocks;
+    }
+    #atimeMs;
+    get atimeMs() {
+        return this.#atimeMs;
+    }
+    #mtimeMs;
+    get mtimeMs() {
+        return this.#mtimeMs;
+    }
+    #ctimeMs;
+    get ctimeMs() {
+        return this.#ctimeMs;
+    }
+    #birthtimeMs;
+    get birthtimeMs() {
+        return this.#birthtimeMs;
+    }
+    #atime;
+    get atime() {
+        return this.#atime;
+    }
+    #mtime;
+    get mtime() {
+        return this.#mtime;
+    }
+    #ctime;
+    get ctime() {
+        return this.#ctime;
+    }
+    #birthtime;
+    get birthtime() {
+        return this.#birthtime;
+    }
+    #matchName;
+    #depth;
+    #fullpath;
+    #fullpathPosix;
+    #relative;
+    #relativePosix;
+    #type;
+    #children;
+    #linkTarget;
+    #realpath;
+    /**
+     * This property is for compatibility with the Dirent class as of
+     * Node v20, where Dirent['parentPath'] refers to the path of the
+     * directory that was passed to readdir. For root entries, it's the path
+     * to the entry itself.
+     */
+    get parentPath() {
+        return (this.parent || this).fullpath();
+    }
+    /**
+     * Deprecated alias for Dirent['parentPath'] Somewhat counterintuitively,
+     * this property refers to the *parent* path, not the path object itself.
+     */
+    get path() {
+        return this.parentPath;
+    }
+    /**
+     * Do not create new Path objects directly.  They should always be accessed
+     * via the PathScurry class or other methods on the Path class.
+     *
+     * @internal
+     */
+    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
+        this.name = name;
+        this.#matchName = nocase ? normalizeNocase(name) : normalize(name);
+        this.#type = type & TYPEMASK;
+        this.nocase = nocase;
+        this.roots = roots;
+        this.root = root || this;
+        this.#children = children;
+        this.#fullpath = opts.fullpath;
+        this.#relative = opts.relative;
+        this.#relativePosix = opts.relativePosix;
+        this.parent = opts.parent;
+        if (this.parent) {
+            this.#fs = this.parent.#fs;
+        }
+        else {
+            this.#fs = fsFromOption(opts.fs);
+        }
+    }
+    /**
+     * Returns the depth of the Path object from its root.
+     *
+     * For example, a path at `/foo/bar` would have a depth of 2.
+     */
+    depth() {
+        if (this.#depth !== undefined)
+            return this.#depth;
+        if (!this.parent)
+            return (this.#depth = 0);
+        return (this.#depth = this.parent.depth() + 1);
+    }
+    /**
+     * @internal
+     */
+    childrenCache() {
+        return this.#children;
+    }
+    /**
+     * Get the Path object referenced by the string path, resolved from this Path
+     */
+    resolve(path) {
+        if (!path) {
+            return this;
+        }
+        const rootPath = this.getRootString(path);
+        const dir = path.substring(rootPath.length);
+        const dirParts = dir.split(this.splitSep);
+        const result = rootPath ?
+            this.getRoot(rootPath).#resolveParts(dirParts)
+            : this.#resolveParts(dirParts);
+        return result;
+    }
+    #resolveParts(dirParts) {
+        let p = this;
+        for (const part of dirParts) {
+            p = p.child(part);
+        }
+        return p;
+    }
+    /**
+     * Returns the cached children Path objects, if still available.  If they
+     * have fallen out of the cache, then returns an empty array, and resets the
+     * READDIR_CALLED bit, so that future calls to readdir() will require an fs
+     * lookup.
+     *
+     * @internal
+     */
+    children() {
+        const cached = this.#children.get(this);
+        if (cached) {
+            return cached;
+        }
+        const children = Object.assign([], { provisional: 0 });
+        this.#children.set(this, children);
+        this.#type &= ~READDIR_CALLED;
+        return children;
+    }
+    /**
+     * Resolves a path portion and returns or creates the child Path.
+     *
+     * Returns `this` if pathPart is `''` or `'.'`, or `parent` if pathPart is
+     * `'..'`.
+     *
+     * This should not be called directly.  If `pathPart` contains any path
+     * separators, it will lead to unsafe undefined behavior.
+     *
+     * Use `Path.resolve()` instead.
+     *
+     * @internal
+     */
+    child(pathPart, opts) {
+        if (pathPart === '' || pathPart === '.') {
+            return this;
+        }
+        if (pathPart === '..') {
+            return this.parent || this;
+        }
+        // find the child
+        const children = this.children();
+        const name = this.nocase ? normalizeNocase(pathPart) : normalize(pathPart);
+        for (const p of children) {
+            if (p.#matchName === name) {
+                return p;
+            }
+        }
+        // didn't find it, create provisional child, since it might not
+        // actually exist.  If we know the parent isn't a dir, then
+        // in fact it CAN'T exist.
+        const s = this.parent ? this.sep : '';
+        const fullpath = this.#fullpath ? this.#fullpath + s + pathPart : undefined;
+        const pchild = this.newChild(pathPart, UNKNOWN, {
+            ...opts,
+            parent: this,
+            fullpath,
+        });
+        if (!this.canReaddir()) {
+            pchild.#type |= ENOENT;
+        }
+        // don't have to update provisional, because if we have real children,
+        // then provisional is set to children.length, otherwise a lower number
+        children.push(pchild);
+        return pchild;
+    }
+    /**
+     * The relative path from the cwd. If it does not share an ancestor with
+     * the cwd, then this ends up being equivalent to the fullpath()
+     */
+    relative() {
+        if (this.isCWD)
+            return '';
+        if (this.#relative !== undefined) {
+            return this.#relative;
+        }
+        const name = this.name;
+        const p = this.parent;
+        if (!p) {
+            return (this.#relative = this.name);
+        }
+        const pv = p.relative();
+        return pv + (!pv || !p.parent ? '' : this.sep) + name;
+    }
+    /**
+     * The relative path from the cwd, using / as the path separator.
+     * If it does not share an ancestor with
+     * the cwd, then this ends up being equivalent to the fullpathPosix()
+     * On posix systems, this is identical to relative().
+     */
+    relativePosix() {
+        if (this.sep === '/')
+            return this.relative();
+        if (this.isCWD)
+            return '';
+        if (this.#relativePosix !== undefined)
+            return this.#relativePosix;
+        const name = this.name;
+        const p = this.parent;
+        if (!p) {
+            return (this.#relativePosix = this.fullpathPosix());
+        }
+        const pv = p.relativePosix();
+        return pv + (!pv || !p.parent ? '' : '/') + name;
+    }
+    /**
+     * The fully resolved path string for this Path entry
+     */
+    fullpath() {
+        if (this.#fullpath !== undefined) {
+            return this.#fullpath;
+        }
+        const name = this.name;
+        const p = this.parent;
+        if (!p) {
+            return (this.#fullpath = this.name);
+        }
+        const pv = p.fullpath();
+        const fp = pv + (!p.parent ? '' : this.sep) + name;
+        return (this.#fullpath = fp);
+    }
+    /**
+     * On platforms other than windows, this is identical to fullpath.
+     *
+     * On windows, this is overridden to return the forward-slash form of the
+     * full UNC path.
+     */
+    fullpathPosix() {
+        if (this.#fullpathPosix !== undefined)
+            return this.#fullpathPosix;
+        if (this.sep === '/')
+            return (this.#fullpathPosix = this.fullpath());
+        if (!this.parent) {
+            const p = this.fullpath().replace(/\\/g, '/');
+            if (/^[a-z]:\//i.test(p)) {
+                return (this.#fullpathPosix = `//?/${p}`);
+            }
+            else {
+                return (this.#fullpathPosix = p);
+            }
+        }
+        const p = this.parent;
+        const pfpp = p.fullpathPosix();
+        const fpp = pfpp + (!pfpp || !p.parent ? '' : '/') + this.name;
+        return (this.#fullpathPosix = fpp);
+    }
+    /**
+     * Is the Path of an unknown type?
+     *
+     * Note that we might know *something* about it if there has been a previous
+     * filesystem operation, for example that it does not exist, or is not a
+     * link, or whether it has child entries.
+     */
+    isUnknown() {
+        return (this.#type & IFMT) === UNKNOWN;
+    }
+    isType(type) {
+        return this[`is${type}`]();
+    }
+    getType() {
+        return (this.isUnknown() ? 'Unknown'
+            : this.isDirectory() ? 'Directory'
+                : this.isFile() ? 'File'
+                    : this.isSymbolicLink() ? 'SymbolicLink'
+                        : this.isFIFO() ? 'FIFO'
+                            : this.isCharacterDevice() ? 'CharacterDevice'
+                                : this.isBlockDevice() ? 'BlockDevice'
+                                    : /* c8 ignore start */ this.isSocket() ? 'Socket'
+                                        : 'Unknown');
+        /* c8 ignore stop */
+    }
+    /**
+     * Is the Path a regular file?
+     */
+    isFile() {
+        return (this.#type & IFMT) === IFREG;
+    }
+    /**
+     * Is the Path a directory?
+     */
+    isDirectory() {
+        return (this.#type & IFMT) === IFDIR;
+    }
+    /**
+     * Is the path a character device?
+     */
+    isCharacterDevice() {
+        return (this.#type & IFMT) === IFCHR;
+    }
+    /**
+     * Is the path a block device?
+     */
+    isBlockDevice() {
+        return (this.#type & IFMT) === IFBLK;
+    }
+    /**
+     * Is the path a FIFO pipe?
+     */
+    isFIFO() {
+        return (this.#type & IFMT) === IFIFO;
+    }
+    /**
+     * Is the path a socket?
+     */
+    isSocket() {
+        return (this.#type & IFMT) === IFSOCK;
+    }
+    /**
+     * Is the path a symbolic link?
+     */
+    isSymbolicLink() {
+        return (this.#type & IFLNK) === IFLNK;
+    }
+    /**
+     * Return the entry if it has been subject of a successful lstat, or
+     * undefined otherwise.
+     *
+     * Does not read the filesystem, so an undefined result *could* simply
+     * mean that we haven't called lstat on it.
+     */
+    lstatCached() {
+        return this.#type & LSTAT_CALLED ? this : undefined;
+    }
+    /**
+     * Return the cached link target if the entry has been the subject of a
+     * successful readlink, or undefined otherwise.
+     *
+     * Does not read the filesystem, so an undefined result *could* just mean we
+     * don't have any cached data. Only use it if you are very sure that a
+     * readlink() has been called at some point.
+     */
+    readlinkCached() {
+        return this.#linkTarget;
+    }
+    /**
+     * Returns the cached realpath target if the entry has been the subject
+     * of a successful realpath, or undefined otherwise.
+     *
+     * Does not read the filesystem, so an undefined result *could* just mean we
+     * don't have any cached data. Only use it if you are very sure that a
+     * realpath() has been called at some point.
+     */
+    realpathCached() {
+        return this.#realpath;
+    }
+    /**
+     * Returns the cached child Path entries array if the entry has been the
+     * subject of a successful readdir(), or [] otherwise.
+     *
+     * Does not read the filesystem, so an empty array *could* just mean we
+     * don't have any cached data. Only use it if you are very sure that a
+     * readdir() has been called recently enough to still be valid.
+     */
+    readdirCached() {
+        const children = this.children();
+        return children.slice(0, children.provisional);
+    }
+    /**
+     * Return true if it's worth trying to readlink.  Ie, we don't (yet) have
+     * any indication that readlink will definitely fail.
+     *
+     * Returns false if the path is known to not be a symlink, if a previous
+     * readlink failed, or if the entry does not exist.
+     */
+    canReadlink() {
+        if (this.#linkTarget)
+            return true;
+        if (!this.parent)
+            return false;
+        // cases where it cannot possibly succeed
+        const ifmt = this.#type & IFMT;
+        return !((ifmt !== UNKNOWN && ifmt !== IFLNK) ||
+            this.#type & ENOREADLINK ||
+            this.#type & ENOENT);
+    }
+    /**
+     * Return true if readdir has previously been successfully called on this
+     * path, indicating that cachedReaddir() is likely valid.
+     */
+    calledReaddir() {
+        return !!(this.#type & READDIR_CALLED);
+    }
+    /**
+     * Returns true if the path is known to not exist. That is, a previous lstat
+     * or readdir failed to verify its existence when that would have been
+     * expected, or a parent entry was marked either enoent or enotdir.
+     */
+    isENOENT() {
+        return !!(this.#type & ENOENT);
+    }
+    /**
+     * Return true if the path is a match for the given path name.  This handles
+     * case sensitivity and unicode normalization.
+     *
+     * Note: even on case-sensitive systems, it is **not** safe to test the
+     * equality of the `.name` property to determine whether a given pathname
+     * matches, due to unicode normalization mismatches.
+     *
+     * Always use this method instead of testing the `path.name` property
+     * directly.
+     */
+    isNamed(n) {
+        return !this.nocase ?
+            this.#matchName === normalize(n)
+            : this.#matchName === normalizeNocase(n);
+    }
+    /**
+     * Return the Path object corresponding to the target of a symbolic link.
+     *
+     * If the Path is not a symbolic link, or if the readlink call fails for any
+     * reason, `undefined` is returned.
+     *
+     * Result is cached, and thus may be outdated if the filesystem is mutated.
+     */
+    async readlink() {
+        const target = this.#linkTarget;
+        if (target) {
+            return target;
+        }
+        if (!this.canReadlink()) {
+            return undefined;
+        }
+        /* c8 ignore start */
+        // already covered by the canReadlink test, here for ts grumples
+        if (!this.parent) {
+            return undefined;
+        }
+        /* c8 ignore stop */
+        try {
+            const read = await this.#fs.promises.readlink(this.fullpath());
+            const linkTarget = (await this.parent.realpath())?.resolve(read);
+            if (linkTarget) {
+                return (this.#linkTarget = linkTarget);
+            }
+        }
+        catch (er) {
+            this.#readlinkFail(er.code);
+            return undefined;
+        }
+    }
+    /**
+     * Synchronous {@link PathBase.readlink}
+     */
+    readlinkSync() {
+        const target = this.#linkTarget;
+        if (target) {
+            return target;
+        }
+        if (!this.canReadlink()) {
+            return undefined;
+        }
+        /* c8 ignore start */
+        // already covered by the canReadlink test, here for ts grumples
+        if (!this.parent) {
+            return undefined;
+        }
+        /* c8 ignore stop */
+        try {
+            const read = this.#fs.readlinkSync(this.fullpath());
+            const linkTarget = this.parent.realpathSync()?.resolve(read);
+            if (linkTarget) {
+                return (this.#linkTarget = linkTarget);
+            }
+        }
+        catch (er) {
+            this.#readlinkFail(er.code);
+            return undefined;
+        }
+    }
+    #readdirSuccess(children) {
+        // succeeded, mark readdir called bit
+        this.#type |= READDIR_CALLED;
+        // mark all remaining provisional children as ENOENT
+        for (let p = children.provisional; p < children.length; p++) {
+            const c = children[p];
+            if (c)
+                c.#markENOENT();
+        }
+    }
+    #markENOENT() {
+        // mark as UNKNOWN and ENOENT
+        if (this.#type & ENOENT)
+            return;
+        this.#type = (this.#type | ENOENT) & IFMT_UNKNOWN;
+        this.#markChildrenENOENT();
+    }
+    #markChildrenENOENT() {
+        // all children are provisional and do not exist
+        const children = this.children();
+        children.provisional = 0;
+        for (const p of children) {
+            p.#markENOENT();
+        }
+    }
+    #markENOREALPATH() {
+        this.#type |= ENOREALPATH;
+        this.#markENOTDIR();
+    }
+    // save the information when we know the entry is not a dir
+    #markENOTDIR() {
+        // entry is not a directory, so any children can't exist.
+        // this *should* be impossible, since any children created
+        // after it's been marked ENOTDIR should be marked ENOENT,
+        // so it won't even get to this point.
+        /* c8 ignore start */
+        if (this.#type & ENOTDIR)
+            return;
+        /* c8 ignore stop */
+        let t = this.#type;
+        // this could happen if we stat a dir, then delete it,
+        // then try to read it or one of its children.
+        if ((t & IFMT) === IFDIR)
+            t &= IFMT_UNKNOWN;
+        this.#type = t | ENOTDIR;
+        this.#markChildrenENOENT();
+    }
+    #readdirFail(code = '') {
+        // markENOTDIR and markENOENT also set provisional=0
+        if (code === 'ENOTDIR' || code === 'EPERM') {
+            this.#markENOTDIR();
+        }
+        else if (code === 'ENOENT') {
+            this.#markENOENT();
+        }
+        else {
+            this.children().provisional = 0;
+        }
+    }
+    #lstatFail(code = '') {
+        // Windows just raises ENOENT in this case, disable for win CI
+        /* c8 ignore start */
+        if (code === 'ENOTDIR') {
+            // already know it has a parent by this point
+            const p = this.parent;
+            p.#markENOTDIR();
+        }
+        else if (code === 'ENOENT') {
+            /* c8 ignore stop */
+            this.#markENOENT();
+        }
+    }
+    #readlinkFail(code = '') {
+        let ter = this.#type;
+        ter |= ENOREADLINK;
+        if (code === 'ENOENT')
+            ter |= ENOENT;
+        // windows gets a weird error when you try to readlink a file
+        if (code === 'EINVAL' || code === 'UNKNOWN') {
+            // exists, but not a symlink, we don't know WHAT it is, so remove
+            // all IFMT bits.
+            ter &= IFMT_UNKNOWN;
+        }
+        this.#type = ter;
+        // windows just gets ENOENT in this case.  We do cover the case,
+        // just disabled because it's impossible on Windows CI
+        /* c8 ignore start */
+        if (code === 'ENOTDIR' && this.parent) {
+            this.parent.#markENOTDIR();
+        }
+        /* c8 ignore stop */
+    }
+    #readdirAddChild(e, c) {
+        return (this.#readdirMaybePromoteChild(e, c) ||
+            this.#readdirAddNewChild(e, c));
+    }
+    #readdirAddNewChild(e, c) {
+        // alloc new entry at head, so it's never provisional
+        const type = entToType(e);
+        const child = this.newChild(e.name, type, { parent: this });
+        const ifmt = child.#type & IFMT;
+        if (ifmt !== IFDIR && ifmt !== IFLNK && ifmt !== UNKNOWN) {
+            child.#type |= ENOTDIR;
+        }
+        c.unshift(child);
+        c.provisional++;
+        return child;
+    }
+    #readdirMaybePromoteChild(e, c) {
+        for (let p = c.provisional; p < c.length; p++) {
+            const pchild = c[p];
+            const name = this.nocase ? normalizeNocase(e.name) : normalize(e.name);
+            if (name !== pchild.#matchName) {
+                continue;
+            }
+            return this.#readdirPromoteChild(e, pchild, p, c);
+        }
+    }
+    #readdirPromoteChild(e, p, index, c) {
+        const v = p.name;
+        // retain any other flags, but set ifmt from dirent
+        p.#type = (p.#type & IFMT_UNKNOWN) | entToType(e);
+        // case sensitivity fixing when we learn the true name.
+        if (v !== e.name)
+            p.name = e.name;
+        // just advance provisional index (potentially off the list),
+        // otherwise we have to splice/pop it out and re-insert at head
+        if (index !== c.provisional) {
+            if (index === c.length - 1)
+                c.pop();
+            else
+                c.splice(index, 1);
+            c.unshift(p);
+        }
+        c.provisional++;
+        return p;
+    }
+    /**
+     * Call lstat() on this Path, and update all known information that can be
+     * determined.
+     *
+     * Note that unlike `fs.lstat()`, the returned value does not contain some
+     * information, such as `mode`, `dev`, `nlink`, and `ino`.  If that
+     * information is required, you will need to call `fs.lstat` yourself.
+     *
+     * If the Path refers to a nonexistent file, or if the lstat call fails for
+     * any reason, `undefined` is returned.  Otherwise the updated Path object is
+     * returned.
+     *
+     * Results are cached, and thus may be out of date if the filesystem is
+     * mutated.
+     */
+    async lstat() {
+        if ((this.#type & ENOENT) === 0) {
+            try {
+                this.#applyStat(await this.#fs.promises.lstat(this.fullpath()));
+                return this;
+            }
+            catch (er) {
+                this.#lstatFail(er.code);
+            }
+        }
+    }
+    /**
+     * synchronous {@link PathBase.lstat}
+     */
+    lstatSync() {
+        if ((this.#type & ENOENT) === 0) {
+            try {
+                this.#applyStat(this.#fs.lstatSync(this.fullpath()));
+                return this;
+            }
+            catch (er) {
+                this.#lstatFail(er.code);
+            }
+        }
+    }
+    #applyStat(st) {
+        const { atime, atimeMs, birthtime, birthtimeMs, blksize, blocks, ctime, ctimeMs, dev, gid, ino, mode, mtime, mtimeMs, nlink, rdev, size, uid, } = st;
+        this.#atime = atime;
+        this.#atimeMs = atimeMs;
+        this.#birthtime = birthtime;
+        this.#birthtimeMs = birthtimeMs;
+        this.#blksize = blksize;
+        this.#blocks = blocks;
+        this.#ctime = ctime;
+        this.#ctimeMs = ctimeMs;
+        this.#dev = dev;
+        this.#gid = gid;
+        this.#ino = ino;
+        this.#mode = mode;
+        this.#mtime = mtime;
+        this.#mtimeMs = mtimeMs;
+        this.#nlink = nlink;
+        this.#rdev = rdev;
+        this.#size = size;
+        this.#uid = uid;
+        const ifmt = entToType(st);
+        // retain any other flags, but set the ifmt
+        this.#type = (this.#type & IFMT_UNKNOWN) | ifmt | LSTAT_CALLED;
+        if (ifmt !== UNKNOWN && ifmt !== IFDIR && ifmt !== IFLNK) {
+            this.#type |= ENOTDIR;
+        }
+    }
+    #onReaddirCB = [];
+    #readdirCBInFlight = false;
+    #callOnReaddirCB(children) {
+        this.#readdirCBInFlight = false;
+        const cbs = this.#onReaddirCB.slice();
+        this.#onReaddirCB.length = 0;
+        cbs.forEach(cb => cb(null, children));
+    }
+    /**
+     * Standard node-style callback interface to get list of directory entries.
+     *
+     * If the Path cannot or does not contain any children, then an empty array
+     * is returned.
+     *
+     * Results are cached, and thus may be out of date if the filesystem is
+     * mutated.
+     *
+     * @param cb The callback called with (er, entries).  Note that the `er`
+     * param is somewhat extraneous, as all readdir() errors are handled and
+     * simply result in an empty set of entries being returned.
+     * @param allowZalgo Boolean indicating that immediately known results should
+     * *not* be deferred with `queueMicrotask`. Defaults to `false`. Release
+     * zalgo at your peril, the dark pony lord is devious and unforgiving.
+     */
+    readdirCB(cb, allowZalgo = false) {
+        if (!this.canReaddir()) {
+            if (allowZalgo)
+                cb(null, []);
+            else
+                queueMicrotask(() => cb(null, []));
+            return;
+        }
+        const children = this.children();
+        if (this.calledReaddir()) {
+            const c = children.slice(0, children.provisional);
+            if (allowZalgo)
+                cb(null, c);
+            else
+                queueMicrotask(() => cb(null, c));
+            return;
+        }
+        // don't have to worry about zalgo at this point.
+        this.#onReaddirCB.push(cb);
+        if (this.#readdirCBInFlight) {
+            return;
+        }
+        this.#readdirCBInFlight = true;
+        // else read the directory, fill up children
+        // de-provisionalize any provisional children.
+        const fullpath = this.fullpath();
+        this.#fs.readdir(fullpath, { withFileTypes: true }, (er, entries) => {
+            if (er) {
+                this.#readdirFail(er.code);
+                children.provisional = 0;
+            }
+            else {
+                // if we didn't get an error, we always get entries.
+                //@ts-ignore
+                for (const e of entries) {
+                    this.#readdirAddChild(e, children);
+                }
+                this.#readdirSuccess(children);
+            }
+            this.#callOnReaddirCB(children.slice(0, children.provisional));
+            return;
+        });
+    }
+    #asyncReaddirInFlight;
+    /**
+     * Return an array of known child entries.
+     *
+     * If the Path cannot or does not contain any children, then an empty array
+     * is returned.
+     *
+     * Results are cached, and thus may be out of date if the filesystem is
+     * mutated.
+     */
+    async readdir() {
+        if (!this.canReaddir()) {
+            return [];
+        }
+        const children = this.children();
+        if (this.calledReaddir()) {
+            return children.slice(0, children.provisional);
+        }
+        // else read the directory, fill up children
+        // de-provisionalize any provisional children.
+        const fullpath = this.fullpath();
+        if (this.#asyncReaddirInFlight) {
+            await this.#asyncReaddirInFlight;
+        }
+        else {
+            /* c8 ignore start */
+            let resolve = () => { };
+            /* c8 ignore stop */
+            this.#asyncReaddirInFlight = new Promise(res => (resolve = res));
+            try {
+                for (const e of await this.#fs.promises.readdir(fullpath, {
+                    withFileTypes: true,
+                })) {
+                    this.#readdirAddChild(e, children);
+                }
+                this.#readdirSuccess(children);
+            }
+            catch (er) {
+                this.#readdirFail(er.code);
+                children.provisional = 0;
+            }
+            this.#asyncReaddirInFlight = undefined;
+            resolve();
+        }
+        return children.slice(0, children.provisional);
+    }
+    /**
+     * synchronous {@link PathBase.readdir}
+     */
+    readdirSync() {
+        if (!this.canReaddir()) {
+            return [];
+        }
+        const children = this.children();
+        if (this.calledReaddir()) {
+            return children.slice(0, children.provisional);
+        }
+        // else read the directory, fill up children
+        // de-provisionalize any provisional children.
+        const fullpath = this.fullpath();
+        try {
+            for (const e of this.#fs.readdirSync(fullpath, {
+                withFileTypes: true,
+            })) {
+                this.#readdirAddChild(e, children);
+            }
+            this.#readdirSuccess(children);
+        }
+        catch (er) {
+            this.#readdirFail(er.code);
+            children.provisional = 0;
+        }
+        return children.slice(0, children.provisional);
+    }
+    canReaddir() {
+        if (this.#type & ENOCHILD)
+            return false;
+        const ifmt = IFMT & this.#type;
+        // we always set ENOTDIR when setting IFMT, so should be impossible
+        /* c8 ignore start */
+        if (!(ifmt === UNKNOWN || ifmt === IFDIR || ifmt === IFLNK)) {
+            return false;
+        }
+        /* c8 ignore stop */
+        return true;
+    }
+    shouldWalk(dirs, walkFilter) {
+        return ((this.#type & IFDIR) === IFDIR &&
+            !(this.#type & ENOCHILD) &&
+            !dirs.has(this) &&
+            (!walkFilter || walkFilter(this)));
+    }
+    /**
+     * Return the Path object corresponding to path as resolved
+     * by realpath(3).
+     *
+     * If the realpath call fails for any reason, `undefined` is returned.
+     *
+     * Result is cached, and thus may be outdated if the filesystem is mutated.
+     * On success, returns a Path object.
+     */
+    async realpath() {
+        if (this.#realpath)
+            return this.#realpath;
+        if ((ENOREALPATH | ENOREADLINK | ENOENT) & this.#type)
+            return undefined;
+        try {
+            const rp = await this.#fs.promises.realpath(this.fullpath());
+            return (this.#realpath = this.resolve(rp));
+        }
+        catch (_) {
+            this.#markENOREALPATH();
+        }
+    }
+    /**
+     * Synchronous {@link realpath}
+     */
+    realpathSync() {
+        if (this.#realpath)
+            return this.#realpath;
+        if ((ENOREALPATH | ENOREADLINK | ENOENT) & this.#type)
+            return undefined;
+        try {
+            const rp = this.#fs.realpathSync(this.fullpath());
+            return (this.#realpath = this.resolve(rp));
+        }
+        catch (_) {
+            this.#markENOREALPATH();
+        }
+    }
+    /**
+     * Internal method to mark this Path object as the scurry cwd,
+     * called by {@link PathScurry#chdir}
+     *
+     * @internal
+     */
+    [setAsCwd](oldCwd) {
+        if (oldCwd === this)
+            return;
+        oldCwd.isCWD = false;
+        this.isCWD = true;
+        const changed = new Set([]);
+        let rp = [];
+        let p = this;
+        while (p && p.parent) {
+            changed.add(p);
+            p.#relative = rp.join(this.sep);
+            p.#relativePosix = rp.join('/');
+            p = p.parent;
+            rp.push('..');
+        }
+        // now un-memoize parents of old cwd
+        p = oldCwd;
+        while (p && p.parent && !changed.has(p)) {
+            p.#relative = undefined;
+            p.#relativePosix = undefined;
+            p = p.parent;
+        }
+    }
+}
+exports.PathBase = PathBase;
+/**
+ * Path class used on win32 systems
+ *
+ * Uses `'\\'` as the path separator for returned paths, either `'\\'` or `'/'`
+ * as the path separator for parsing paths.
+ */
+class PathWin32 extends PathBase {
+    /**
+     * Separator for generating path strings.
+     */
+    sep = '\\';
+    /**
+     * Separator for parsing path strings.
+     */
+    splitSep = eitherSep;
+    /**
+     * Do not create new Path objects directly.  They should always be accessed
+     * via the PathScurry class or other methods on the Path class.
+     *
+     * @internal
+     */
+    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
+        super(name, type, root, roots, nocase, children, opts);
+    }
+    /**
+     * @internal
+     */
+    newChild(name, type = UNKNOWN, opts = {}) {
+        return new PathWin32(name, type, this.root, this.roots, this.nocase, this.childrenCache(), opts);
+    }
+    /**
+     * @internal
+     */
+    getRootString(path) {
+        return node_path_1.win32.parse(path).root;
+    }
+    /**
+     * @internal
+     */
+    getRoot(rootPath) {
+        rootPath = uncToDrive(rootPath.toUpperCase());
+        if (rootPath === this.root.name) {
+            return this.root;
+        }
+        // ok, not that one, check if it matches another we know about
+        for (const [compare, root] of Object.entries(this.roots)) {
+            if (this.sameRoot(rootPath, compare)) {
+                return (this.roots[rootPath] = root);
+            }
+        }
+        // otherwise, have to create a new one.
+        return (this.roots[rootPath] = new PathScurryWin32(rootPath, this).root);
+    }
+    /**
+     * @internal
+     */
+    sameRoot(rootPath, compare = this.root.name) {
+        // windows can (rarely) have case-sensitive filesystem, but
+        // UNC and drive letters are always case-insensitive, and canonically
+        // represented uppercase.
+        rootPath = rootPath
+            .toUpperCase()
+            .replace(/\//g, '\\')
+            .replace(uncDriveRegexp, '$1\\');
+        return rootPath === compare;
+    }
+}
+exports.PathWin32 = PathWin32;
+/**
+ * Path class used on all posix systems.
+ *
+ * Uses `'/'` as the path separator.
+ */
+class PathPosix extends PathBase {
+    /**
+     * separator for parsing path strings
+     */
+    splitSep = '/';
+    /**
+     * separator for generating path strings
+     */
+    sep = '/';
+    /**
+     * Do not create new Path objects directly.  They should always be accessed
+     * via the PathScurry class or other methods on the Path class.
+     *
+     * @internal
+     */
+    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
+        super(name, type, root, roots, nocase, children, opts);
+    }
+    /**
+     * @internal
+     */
+    getRootString(path) {
+        return path.startsWith('/') ? '/' : '';
+    }
+    /**
+     * @internal
+     */
+    getRoot(_rootPath) {
+        return this.root;
+    }
+    /**
+     * @internal
+     */
+    newChild(name, type = UNKNOWN, opts = {}) {
+        return new PathPosix(name, type, this.root, this.roots, this.nocase, this.childrenCache(), opts);
+    }
+}
+exports.PathPosix = PathPosix;
+/**
+ * The base class for all PathScurry classes, providing the interface for path
+ * resolution and filesystem operations.
+ *
+ * Typically, you should *not* instantiate this class directly, but rather one
+ * of the platform-specific classes, or the exported {@link PathScurry} which
+ * defaults to the current platform.
+ */
+class PathScurryBase {
+    /**
+     * The root Path entry for the current working directory of this Scurry
+     */
+    root;
+    /**
+     * The string path for the root of this Scurry's current working directory
+     */
+    rootPath;
+    /**
+     * A collection of all roots encountered, referenced by rootPath
+     */
+    roots;
+    /**
+     * The Path entry corresponding to this PathScurry's current working directory.
+     */
+    cwd;
+    #resolveCache;
+    #resolvePosixCache;
+    #children;
+    /**
+     * Perform path comparisons case-insensitively.
+     *
+     * Defaults true on Darwin and Windows systems, false elsewhere.
+     */
+    nocase;
+    #fs;
+    /**
+     * This class should not be instantiated directly.
+     *
+     * Use PathScurryWin32, PathScurryDarwin, PathScurryPosix, or PathScurry
+     *
+     * @internal
+     */
+    constructor(cwd = process.cwd(), pathImpl, sep, { nocase, childrenCacheSize = 16 * 1024, fs = defaultFS, } = {}) {
+        this.#fs = fsFromOption(fs);
+        if (cwd instanceof URL || cwd.startsWith('file://')) {
+            cwd = (0, node_url_1.fileURLToPath)(cwd);
+        }
+        // resolve and split root, and then add to the store.
+        // this is the only time we call path.resolve()
+        const cwdPath = pathImpl.resolve(cwd);
+        this.roots = Object.create(null);
+        this.rootPath = this.parseRootPath(cwdPath);
+        this.#resolveCache = new ResolveCache();
+        this.#resolvePosixCache = new ResolveCache();
+        this.#children = new ChildrenCache(childrenCacheSize);
+        const split = cwdPath.substring(this.rootPath.length).split(sep);
+        // resolve('/') leaves '', splits to [''], we don't want that.
+        if (split.length === 1 && !split[0]) {
+            split.pop();
+        }
+        /* c8 ignore start */
+        if (nocase === undefined) {
+            throw new TypeError('must provide nocase setting to PathScurryBase ctor');
+        }
+        /* c8 ignore stop */
+        this.nocase = nocase;
+        this.root = this.newRoot(this.#fs);
+        this.roots[this.rootPath] = this.root;
+        let prev = this.root;
+        let len = split.length - 1;
+        const joinSep = pathImpl.sep;
+        let abs = this.rootPath;
+        let sawFirst = false;
+        for (const part of split) {
+            const l = len--;
+            prev = prev.child(part, {
+                relative: new Array(l).fill('..').join(joinSep),
+                relativePosix: new Array(l).fill('..').join('/'),
+                fullpath: (abs += (sawFirst ? '' : joinSep) + part),
+            });
+            sawFirst = true;
+        }
+        this.cwd = prev;
+    }
+    /**
+     * Get the depth of a provided path, string, or the cwd
+     */
+    depth(path = this.cwd) {
+        if (typeof path === 'string') {
+            path = this.cwd.resolve(path);
+        }
+        return path.depth();
+    }
+    /**
+     * Return the cache of child entries.  Exposed so subclasses can create
+     * child Path objects in a platform-specific way.
+     *
+     * @internal
+     */
+    childrenCache() {
+        return this.#children;
+    }
+    /**
+     * Resolve one or more path strings to a resolved string
+     *
+     * Same interface as require('path').resolve.
+     *
+     * Much faster than path.resolve() when called multiple times for the same
+     * path, because the resolved Path objects are cached.  Much slower
+     * otherwise.
+     */
+    resolve(...paths) {
+        // first figure out the minimum number of paths we have to test
+        // we always start at cwd, but any absolutes will bump the start
+        let r = '';
+        for (let i = paths.length - 1; i >= 0; i--) {
+            const p = paths[i];
+            if (!p || p === '.')
+                continue;
+            r = r ? `${p}/${r}` : p;
+            if (this.isAbsolute(p)) {
+                break;
+            }
+        }
+        const cached = this.#resolveCache.get(r);
+        if (cached !== undefined) {
+            return cached;
+        }
+        const result = this.cwd.resolve(r).fullpath();
+        this.#resolveCache.set(r, result);
+        return result;
+    }
+    /**
+     * Resolve one or more path strings to a resolved string, returning
+     * the posix path.  Identical to .resolve() on posix systems, but on
+     * windows will return a forward-slash separated UNC path.
+     *
+     * Same interface as require('path').resolve.
+     *
+     * Much faster than path.resolve() when called multiple times for the same
+     * path, because the resolved Path objects are cached.  Much slower
+     * otherwise.
+     */
+    resolvePosix(...paths) {
+        // first figure out the minimum number of paths we have to test
+        // we always start at cwd, but any absolutes will bump the start
+        let r = '';
+        for (let i = paths.length - 1; i >= 0; i--) {
+            const p = paths[i];
+            if (!p || p === '.')
+                continue;
+            r = r ? `${p}/${r}` : p;
+            if (this.isAbsolute(p)) {
+                break;
+            }
+        }
+        const cached = this.#resolvePosixCache.get(r);
+        if (cached !== undefined) {
+            return cached;
+        }
+        const result = this.cwd.resolve(r).fullpathPosix();
+        this.#resolvePosixCache.set(r, result);
+        return result;
+    }
+    /**
+     * find the relative path from the cwd to the supplied path string or entry
+     */
+    relative(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.relative();
+    }
+    /**
+     * find the relative path from the cwd to the supplied path string or
+     * entry, using / as the path delimiter, even on Windows.
+     */
+    relativePosix(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.relativePosix();
+    }
+    /**
+     * Return the basename for the provided string or Path object
+     */
+    basename(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.name;
+    }
+    /**
+     * Return the dirname for the provided string or Path object
+     */
+    dirname(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return (entry.parent || entry).fullpath();
+    }
+    async readdir(entry = this.cwd, opts = {
+        withFileTypes: true,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes } = opts;
+        if (!entry.canReaddir()) {
+            return [];
+        }
+        else {
+            const p = await entry.readdir();
+            return withFileTypes ? p : p.map(e => e.name);
+        }
+    }
+    readdirSync(entry = this.cwd, opts = {
+        withFileTypes: true,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true } = opts;
+        if (!entry.canReaddir()) {
+            return [];
+        }
+        else if (withFileTypes) {
+            return entry.readdirSync();
+        }
+        else {
+            return entry.readdirSync().map(e => e.name);
+        }
+    }
+    /**
+     * Call lstat() on the string or Path object, and update all known
+     * information that can be determined.
+     *
+     * Note that unlike `fs.lstat()`, the returned value does not contain some
+     * information, such as `mode`, `dev`, `nlink`, and `ino`.  If that
+     * information is required, you will need to call `fs.lstat` yourself.
+     *
+     * If the Path refers to a nonexistent file, or if the lstat call fails for
+     * any reason, `undefined` is returned.  Otherwise the updated Path object is
+     * returned.
+     *
+     * Results are cached, and thus may be out of date if the filesystem is
+     * mutated.
+     */
+    async lstat(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.lstat();
+    }
+    /**
+     * synchronous {@link PathScurryBase.lstat}
+     */
+    lstatSync(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.lstatSync();
+    }
+    async readlink(entry = this.cwd, { withFileTypes } = {
+        withFileTypes: false,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            withFileTypes = entry.withFileTypes;
+            entry = this.cwd;
+        }
+        const e = await entry.readlink();
+        return withFileTypes ? e : e?.fullpath();
+    }
+    readlinkSync(entry = this.cwd, { withFileTypes } = {
+        withFileTypes: false,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            withFileTypes = entry.withFileTypes;
+            entry = this.cwd;
+        }
+        const e = entry.readlinkSync();
+        return withFileTypes ? e : e?.fullpath();
+    }
+    async realpath(entry = this.cwd, { withFileTypes } = {
+        withFileTypes: false,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            withFileTypes = entry.withFileTypes;
+            entry = this.cwd;
+        }
+        const e = await entry.realpath();
+        return withFileTypes ? e : e?.fullpath();
+    }
+    realpathSync(entry = this.cwd, { withFileTypes } = {
+        withFileTypes: false,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            withFileTypes = entry.withFileTypes;
+            entry = this.cwd;
+        }
+        const e = entry.realpathSync();
+        return withFileTypes ? e : e?.fullpath();
+    }
+    async walk(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        const results = [];
+        if (!filter || filter(entry)) {
+            results.push(withFileTypes ? entry : entry.fullpath());
+        }
+        const dirs = new Set();
+        const walk = (dir, cb) => {
+            dirs.add(dir);
+            dir.readdirCB((er, entries) => {
+                /* c8 ignore start */
+                if (er) {
+                    return cb(er);
+                }
+                /* c8 ignore stop */
+                let len = entries.length;
+                if (!len)
+                    return cb();
+                const next = () => {
+                    if (--len === 0) {
+                        cb();
+                    }
+                };
+                for (const e of entries) {
+                    if (!filter || filter(e)) {
+                        results.push(withFileTypes ? e : e.fullpath());
+                    }
+                    if (follow && e.isSymbolicLink()) {
+                        e.realpath()
+                            .then(r => (r?.isUnknown() ? r.lstat() : r))
+                            .then(r => r?.shouldWalk(dirs, walkFilter) ? walk(r, next) : next());
+                    }
+                    else {
+                        if (e.shouldWalk(dirs, walkFilter)) {
+                            walk(e, next);
+                        }
+                        else {
+                            next();
+                        }
+                    }
+                }
+            }, true); // zalgooooooo
+        };
+        const start = entry;
+        return new Promise((res, rej) => {
+            walk(start, er => {
+                /* c8 ignore start */
+                if (er)
+                    return rej(er);
+                /* c8 ignore stop */
+                res(results);
+            });
+        });
+    }
+    walkSync(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        const results = [];
+        if (!filter || filter(entry)) {
+            results.push(withFileTypes ? entry : entry.fullpath());
+        }
+        const dirs = new Set([entry]);
+        for (const dir of dirs) {
+            const entries = dir.readdirSync();
+            for (const e of entries) {
+                if (!filter || filter(e)) {
+                    results.push(withFileTypes ? e : e.fullpath());
+                }
+                let r = e;
+                if (e.isSymbolicLink()) {
+                    if (!(follow && (r = e.realpathSync())))
+                        continue;
+                    if (r.isUnknown())
+                        r.lstatSync();
+                }
+                if (r.shouldWalk(dirs, walkFilter)) {
+                    dirs.add(r);
+                }
+            }
+        }
+        return results;
+    }
+    /**
+     * Support for `for await`
+     *
+     * Alias for {@link PathScurryBase.iterate}
+     *
+     * Note: As of Node 19, this is very slow, compared to other methods of
+     * walking.  Consider using {@link PathScurryBase.stream} if memory overhead
+     * and backpressure are concerns, or {@link PathScurryBase.walk} if not.
+     */
+    [Symbol.asyncIterator]() {
+        return this.iterate();
+    }
+    iterate(entry = this.cwd, options = {}) {
+        // iterating async over the stream is significantly more performant,
+        // especially in the warm-cache scenario, because it buffers up directory
+        // entries in the background instead of waiting for a yield for each one.
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            options = entry;
+            entry = this.cwd;
+        }
+        return this.stream(entry, options)[Symbol.asyncIterator]();
+    }
+    /**
+     * Iterating over a PathScurry performs a synchronous walk.
+     *
+     * Alias for {@link PathScurryBase.iterateSync}
+     */
+    [Symbol.iterator]() {
+        return this.iterateSync();
+    }
+    *iterateSync(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        if (!filter || filter(entry)) {
+            yield withFileTypes ? entry : entry.fullpath();
+        }
+        const dirs = new Set([entry]);
+        for (const dir of dirs) {
+            const entries = dir.readdirSync();
+            for (const e of entries) {
+                if (!filter || filter(e)) {
+                    yield withFileTypes ? e : e.fullpath();
+                }
+                let r = e;
+                if (e.isSymbolicLink()) {
+                    if (!(follow && (r = e.realpathSync())))
+                        continue;
+                    if (r.isUnknown())
+                        r.lstatSync();
+                }
+                if (r.shouldWalk(dirs, walkFilter)) {
+                    dirs.add(r);
+                }
+            }
+        }
+    }
+    stream(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        const results = new minipass_1.Minipass({ objectMode: true });
+        if (!filter || filter(entry)) {
+            results.write(withFileTypes ? entry : entry.fullpath());
+        }
+        const dirs = new Set();
+        const queue = [entry];
+        let processing = 0;
+        const process = () => {
+            let paused = false;
+            while (!paused) {
+                const dir = queue.shift();
+                if (!dir) {
+                    if (processing === 0)
+                        results.end();
+                    return;
+                }
+                processing++;
+                dirs.add(dir);
+                const onReaddir = (er, entries, didRealpaths = false) => {
+                    /* c8 ignore start */
+                    if (er)
+                        return results.emit('error', er);
+                    /* c8 ignore stop */
+                    if (follow && !didRealpaths) {
+                        const promises = [];
+                        for (const e of entries) {
+                            if (e.isSymbolicLink()) {
+                                promises.push(e
+                                    .realpath()
+                                    .then((r) => r?.isUnknown() ? r.lstat() : r));
+                            }
+                        }
+                        if (promises.length) {
+                            Promise.all(promises).then(() => onReaddir(null, entries, true));
+                            return;
+                        }
+                    }
+                    for (const e of entries) {
+                        if (e && (!filter || filter(e))) {
+                            if (!results.write(withFileTypes ? e : e.fullpath())) {
+                                paused = true;
+                            }
+                        }
+                    }
+                    processing--;
+                    for (const e of entries) {
+                        const r = e.realpathCached() || e;
+                        if (r.shouldWalk(dirs, walkFilter)) {
+                            queue.push(r);
+                        }
+                    }
+                    if (paused && !results.flowing) {
+                        results.once('drain', process);
+                    }
+                    else if (!sync) {
+                        process();
+                    }
+                };
+                // zalgo containment
+                let sync = true;
+                dir.readdirCB(onReaddir, true);
+                sync = false;
+            }
+        };
+        process();
+        return results;
+    }
+    streamSync(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        const results = new minipass_1.Minipass({ objectMode: true });
+        const dirs = new Set();
+        if (!filter || filter(entry)) {
+            results.write(withFileTypes ? entry : entry.fullpath());
+        }
+        const queue = [entry];
+        let processing = 0;
+        const process = () => {
+            let paused = false;
+            while (!paused) {
+                const dir = queue.shift();
+                if (!dir) {
+                    if (processing === 0)
+                        results.end();
+                    return;
+                }
+                processing++;
+                dirs.add(dir);
+                const entries = dir.readdirSync();
+                for (const e of entries) {
+                    if (!filter || filter(e)) {
+                        if (!results.write(withFileTypes ? e : e.fullpath())) {
+                            paused = true;
+                        }
+                    }
+                }
+                processing--;
+                for (const e of entries) {
+                    let r = e;
+                    if (e.isSymbolicLink()) {
+                        if (!(follow && (r = e.realpathSync())))
+                            continue;
+                        if (r.isUnknown())
+                            r.lstatSync();
+                    }
+                    if (r.shouldWalk(dirs, walkFilter)) {
+                        queue.push(r);
+                    }
+                }
+            }
+            if (paused && !results.flowing)
+                results.once('drain', process);
+        };
+        process();
+        return results;
+    }
+    chdir(path = this.cwd) {
+        const oldCwd = this.cwd;
+        this.cwd = typeof path === 'string' ? this.cwd.resolve(path) : path;
+        this.cwd[setAsCwd](oldCwd);
+    }
+}
+exports.PathScurryBase = PathScurryBase;
+/**
+ * Windows implementation of {@link PathScurryBase}
+ *
+ * Defaults to case insensitve, uses `'\\'` to generate path strings.  Uses
+ * {@link PathWin32} for Path objects.
+ */
+class PathScurryWin32 extends PathScurryBase {
+    /**
+     * separator for generating path strings
+     */
+    sep = '\\';
+    constructor(cwd = process.cwd(), opts = {}) {
+        const { nocase = true } = opts;
+        super(cwd, node_path_1.win32, '\\', { ...opts, nocase });
+        this.nocase = nocase;
+        for (let p = this.cwd; p; p = p.parent) {
+            p.nocase = this.nocase;
+        }
+    }
+    /**
+     * @internal
+     */
+    parseRootPath(dir) {
+        // if the path starts with a single separator, it's not a UNC, and we'll
+        // just get separator as the root, and driveFromUNC will return \
+        // In that case, mount \ on the root from the cwd.
+        return node_path_1.win32.parse(dir).root.toUpperCase();
+    }
+    /**
+     * @internal
+     */
+    newRoot(fs) {
+        return new PathWin32(this.rootPath, IFDIR, undefined, this.roots, this.nocase, this.childrenCache(), { fs });
+    }
+    /**
+     * Return true if the provided path string is an absolute path
+     */
+    isAbsolute(p) {
+        return (p.startsWith('/') || p.startsWith('\\') || /^[a-z]:(\/|\\)/i.test(p));
+    }
+}
+exports.PathScurryWin32 = PathScurryWin32;
+/**
+ * {@link PathScurryBase} implementation for all posix systems other than Darwin.
+ *
+ * Defaults to case-sensitive matching, uses `'/'` to generate path strings.
+ *
+ * Uses {@link PathPosix} for Path objects.
+ */
+class PathScurryPosix extends PathScurryBase {
+    /**
+     * separator for generating path strings
+     */
+    sep = '/';
+    constructor(cwd = process.cwd(), opts = {}) {
+        const { nocase = false } = opts;
+        super(cwd, node_path_1.posix, '/', { ...opts, nocase });
+        this.nocase = nocase;
+    }
+    /**
+     * @internal
+     */
+    parseRootPath(_dir) {
+        return '/';
+    }
+    /**
+     * @internal
+     */
+    newRoot(fs) {
+        return new PathPosix(this.rootPath, IFDIR, undefined, this.roots, this.nocase, this.childrenCache(), { fs });
+    }
+    /**
+     * Return true if the provided path string is an absolute path
+     */
+    isAbsolute(p) {
+        return p.startsWith('/');
+    }
+}
+exports.PathScurryPosix = PathScurryPosix;
+/**
+ * {@link PathScurryBase} implementation for Darwin (macOS) systems.
+ *
+ * Defaults to case-insensitive matching, uses `'/'` for generating path
+ * strings.
+ *
+ * Uses {@link PathPosix} for Path objects.
+ */
+class PathScurryDarwin extends PathScurryPosix {
+    constructor(cwd = process.cwd(), opts = {}) {
+        const { nocase = true } = opts;
+        super(cwd, { ...opts, nocase });
+    }
+}
+exports.PathScurryDarwin = PathScurryDarwin;
+/**
+ * Default {@link PathBase} implementation for the current platform.
+ *
+ * {@link PathWin32} on Windows systems, {@link PathPosix} on all others.
+ */
+exports.Path = process.platform === 'win32' ? PathWin32 : PathPosix;
+/**
+ * Default {@link PathScurryBase} implementation for the current platform.
+ *
+ * {@link PathScurryWin32} on Windows systems, {@link PathScurryDarwin} on
+ * Darwin (macOS) systems, {@link PathScurryPosix} on all others.
+ */
+exports.PathScurry = process.platform === 'win32' ? PathScurryWin32
+    : process.platform === 'darwin' ? PathScurryDarwin
+        : PathScurryPosix;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/node-gyp/node_modules/path-scurry/dist/commonjs/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/node-gyp/node_modules/path-scurry/dist/commonjs/package.json
new file mode 100644
index 0000000000000000000000000000000000000000..5bbefffbabee392d1855491b84dc0a716b6a3bf2
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/node-gyp/node_modules/path-scurry/dist/commonjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/node-gyp/node_modules/path-scurry/dist/esm/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/node-gyp/node_modules/path-scurry/dist/esm/index.js
new file mode 100644
index 0000000000000000000000000000000000000000..3b11b819faece5028d3c7a390069f387b3ec3573
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/node-gyp/node_modules/path-scurry/dist/esm/index.js
@@ -0,0 +1,1979 @@
+import { LRUCache } from 'lru-cache';
+import { posix, win32 } from 'node:path';
+import { fileURLToPath } from 'node:url';
+import { lstatSync, readdir as readdirCB, readdirSync, readlinkSync, realpathSync as rps, } from 'fs';
+import * as actualFS from 'node:fs';
+const realpathSync = rps.native;
+// TODO: test perf of fs/promises realpath vs realpathCB,
+// since the promises one uses realpath.native
+import { lstat, readdir, readlink, realpath } from 'node:fs/promises';
+import { Minipass } from 'minipass';
+const defaultFS = {
+    lstatSync,
+    readdir: readdirCB,
+    readdirSync,
+    readlinkSync,
+    realpathSync,
+    promises: {
+        lstat,
+        readdir,
+        readlink,
+        realpath,
+    },
+};
+// if they just gave us require('fs') then use our default
+const fsFromOption = (fsOption) => !fsOption || fsOption === defaultFS || fsOption === actualFS ?
+    defaultFS
+    : {
+        ...defaultFS,
+        ...fsOption,
+        promises: {
+            ...defaultFS.promises,
+            ...(fsOption.promises || {}),
+        },
+    };
+// turn something like //?/c:/ into c:\
+const uncDriveRegexp = /^\\\\\?\\([a-z]:)\\?$/i;
+const uncToDrive = (rootPath) => rootPath.replace(/\//g, '\\').replace(uncDriveRegexp, '$1\\');
+// windows paths are separated by either / or \
+const eitherSep = /[\\\/]/;
+const UNKNOWN = 0; // may not even exist, for all we know
+const IFIFO = 0b0001;
+const IFCHR = 0b0010;
+const IFDIR = 0b0100;
+const IFBLK = 0b0110;
+const IFREG = 0b1000;
+const IFLNK = 0b1010;
+const IFSOCK = 0b1100;
+const IFMT = 0b1111;
+// mask to unset low 4 bits
+const IFMT_UNKNOWN = ~IFMT;
+// set after successfully calling readdir() and getting entries.
+const READDIR_CALLED = 0b0000_0001_0000;
+// set after a successful lstat()
+const LSTAT_CALLED = 0b0000_0010_0000;
+// set if an entry (or one of its parents) is definitely not a dir
+const ENOTDIR = 0b0000_0100_0000;
+// set if an entry (or one of its parents) does not exist
+// (can also be set on lstat errors like EACCES or ENAMETOOLONG)
+const ENOENT = 0b0000_1000_0000;
+// cannot have child entries -- also verify &IFMT is either IFDIR or IFLNK
+// set if we fail to readlink
+const ENOREADLINK = 0b0001_0000_0000;
+// set if we know realpath() will fail
+const ENOREALPATH = 0b0010_0000_0000;
+const ENOCHILD = ENOTDIR | ENOENT | ENOREALPATH;
+const TYPEMASK = 0b0011_1111_1111;
+const entToType = (s) => s.isFile() ? IFREG
+    : s.isDirectory() ? IFDIR
+        : s.isSymbolicLink() ? IFLNK
+            : s.isCharacterDevice() ? IFCHR
+                : s.isBlockDevice() ? IFBLK
+                    : s.isSocket() ? IFSOCK
+                        : s.isFIFO() ? IFIFO
+                            : UNKNOWN;
+// normalize unicode path names
+const normalizeCache = new Map();
+const normalize = (s) => {
+    const c = normalizeCache.get(s);
+    if (c)
+        return c;
+    const n = s.normalize('NFKD');
+    normalizeCache.set(s, n);
+    return n;
+};
+const normalizeNocaseCache = new Map();
+const normalizeNocase = (s) => {
+    const c = normalizeNocaseCache.get(s);
+    if (c)
+        return c;
+    const n = normalize(s.toLowerCase());
+    normalizeNocaseCache.set(s, n);
+    return n;
+};
+/**
+ * An LRUCache for storing resolved path strings or Path objects.
+ * @internal
+ */
+export class ResolveCache extends LRUCache {
+    constructor() {
+        super({ max: 256 });
+    }
+}
+// In order to prevent blowing out the js heap by allocating hundreds of
+// thousands of Path entries when walking extremely large trees, the "children"
+// in this tree are represented by storing an array of Path entries in an
+// LRUCache, indexed by the parent.  At any time, Path.children() may return an
+// empty array, indicating that it doesn't know about any of its children, and
+// thus has to rebuild that cache.  This is fine, it just means that we don't
+// benefit as much from having the cached entries, but huge directory walks
+// don't blow out the stack, and smaller ones are still as fast as possible.
+//
+//It does impose some complexity when building up the readdir data, because we
+//need to pass a reference to the children array that we started with.
+/**
+ * an LRUCache for storing child entries.
+ * @internal
+ */
+export class ChildrenCache extends LRUCache {
+    constructor(maxSize = 16 * 1024) {
+        super({
+            maxSize,
+            // parent + children
+            sizeCalculation: a => a.length + 1,
+        });
+    }
+}
+const setAsCwd = Symbol('PathScurry setAsCwd');
+/**
+ * Path objects are sort of like a super-powered
+ * {@link https://nodejs.org/docs/latest/api/fs.html#class-fsdirent fs.Dirent}
+ *
+ * Each one represents a single filesystem entry on disk, which may or may not
+ * exist. It includes methods for reading various types of information via
+ * lstat, readlink, and readdir, and caches all information to the greatest
+ * degree possible.
+ *
+ * Note that fs operations that would normally throw will instead return an
+ * "empty" value. This is in order to prevent excessive overhead from error
+ * stack traces.
+ */
+export class PathBase {
+    /**
+     * the basename of this path
+     *
+     * **Important**: *always* test the path name against any test string
+     * usingthe {@link isNamed} method, and not by directly comparing this
+     * string. Otherwise, unicode path strings that the system sees as identical
+     * will not be properly treated as the same path, leading to incorrect
+     * behavior and possible security issues.
+     */
+    name;
+    /**
+     * the Path entry corresponding to the path root.
+     *
+     * @internal
+     */
+    root;
+    /**
+     * All roots found within the current PathScurry family
+     *
+     * @internal
+     */
+    roots;
+    /**
+     * a reference to the parent path, or undefined in the case of root entries
+     *
+     * @internal
+     */
+    parent;
+    /**
+     * boolean indicating whether paths are compared case-insensitively
+     * @internal
+     */
+    nocase;
+    /**
+     * boolean indicating that this path is the current working directory
+     * of the PathScurry collection that contains it.
+     */
+    isCWD = false;
+    // potential default fs override
+    #fs;
+    // Stats fields
+    #dev;
+    get dev() {
+        return this.#dev;
+    }
+    #mode;
+    get mode() {
+        return this.#mode;
+    }
+    #nlink;
+    get nlink() {
+        return this.#nlink;
+    }
+    #uid;
+    get uid() {
+        return this.#uid;
+    }
+    #gid;
+    get gid() {
+        return this.#gid;
+    }
+    #rdev;
+    get rdev() {
+        return this.#rdev;
+    }
+    #blksize;
+    get blksize() {
+        return this.#blksize;
+    }
+    #ino;
+    get ino() {
+        return this.#ino;
+    }
+    #size;
+    get size() {
+        return this.#size;
+    }
+    #blocks;
+    get blocks() {
+        return this.#blocks;
+    }
+    #atimeMs;
+    get atimeMs() {
+        return this.#atimeMs;
+    }
+    #mtimeMs;
+    get mtimeMs() {
+        return this.#mtimeMs;
+    }
+    #ctimeMs;
+    get ctimeMs() {
+        return this.#ctimeMs;
+    }
+    #birthtimeMs;
+    get birthtimeMs() {
+        return this.#birthtimeMs;
+    }
+    #atime;
+    get atime() {
+        return this.#atime;
+    }
+    #mtime;
+    get mtime() {
+        return this.#mtime;
+    }
+    #ctime;
+    get ctime() {
+        return this.#ctime;
+    }
+    #birthtime;
+    get birthtime() {
+        return this.#birthtime;
+    }
+    #matchName;
+    #depth;
+    #fullpath;
+    #fullpathPosix;
+    #relative;
+    #relativePosix;
+    #type;
+    #children;
+    #linkTarget;
+    #realpath;
+    /**
+     * This property is for compatibility with the Dirent class as of
+     * Node v20, where Dirent['parentPath'] refers to the path of the
+     * directory that was passed to readdir. For root entries, it's the path
+     * to the entry itself.
+     */
+    get parentPath() {
+        return (this.parent || this).fullpath();
+    }
+    /**
+     * Deprecated alias for Dirent['parentPath'] Somewhat counterintuitively,
+     * this property refers to the *parent* path, not the path object itself.
+     */
+    get path() {
+        return this.parentPath;
+    }
+    /**
+     * Do not create new Path objects directly.  They should always be accessed
+     * via the PathScurry class or other methods on the Path class.
+     *
+     * @internal
+     */
+    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
+        this.name = name;
+        this.#matchName = nocase ? normalizeNocase(name) : normalize(name);
+        this.#type = type & TYPEMASK;
+        this.nocase = nocase;
+        this.roots = roots;
+        this.root = root || this;
+        this.#children = children;
+        this.#fullpath = opts.fullpath;
+        this.#relative = opts.relative;
+        this.#relativePosix = opts.relativePosix;
+        this.parent = opts.parent;
+        if (this.parent) {
+            this.#fs = this.parent.#fs;
+        }
+        else {
+            this.#fs = fsFromOption(opts.fs);
+        }
+    }
+    /**
+     * Returns the depth of the Path object from its root.
+     *
+     * For example, a path at `/foo/bar` would have a depth of 2.
+     */
+    depth() {
+        if (this.#depth !== undefined)
+            return this.#depth;
+        if (!this.parent)
+            return (this.#depth = 0);
+        return (this.#depth = this.parent.depth() + 1);
+    }
+    /**
+     * @internal
+     */
+    childrenCache() {
+        return this.#children;
+    }
+    /**
+     * Get the Path object referenced by the string path, resolved from this Path
+     */
+    resolve(path) {
+        if (!path) {
+            return this;
+        }
+        const rootPath = this.getRootString(path);
+        const dir = path.substring(rootPath.length);
+        const dirParts = dir.split(this.splitSep);
+        const result = rootPath ?
+            this.getRoot(rootPath).#resolveParts(dirParts)
+            : this.#resolveParts(dirParts);
+        return result;
+    }
+    #resolveParts(dirParts) {
+        let p = this;
+        for (const part of dirParts) {
+            p = p.child(part);
+        }
+        return p;
+    }
+    /**
+     * Returns the cached children Path objects, if still available.  If they
+     * have fallen out of the cache, then returns an empty array, and resets the
+     * READDIR_CALLED bit, so that future calls to readdir() will require an fs
+     * lookup.
+     *
+     * @internal
+     */
+    children() {
+        const cached = this.#children.get(this);
+        if (cached) {
+            return cached;
+        }
+        const children = Object.assign([], { provisional: 0 });
+        this.#children.set(this, children);
+        this.#type &= ~READDIR_CALLED;
+        return children;
+    }
+    /**
+     * Resolves a path portion and returns or creates the child Path.
+     *
+     * Returns `this` if pathPart is `''` or `'.'`, or `parent` if pathPart is
+     * `'..'`.
+     *
+     * This should not be called directly.  If `pathPart` contains any path
+     * separators, it will lead to unsafe undefined behavior.
+     *
+     * Use `Path.resolve()` instead.
+     *
+     * @internal
+     */
+    child(pathPart, opts) {
+        if (pathPart === '' || pathPart === '.') {
+            return this;
+        }
+        if (pathPart === '..') {
+            return this.parent || this;
+        }
+        // find the child
+        const children = this.children();
+        const name = this.nocase ? normalizeNocase(pathPart) : normalize(pathPart);
+        for (const p of children) {
+            if (p.#matchName === name) {
+                return p;
+            }
+        }
+        // didn't find it, create provisional child, since it might not
+        // actually exist.  If we know the parent isn't a dir, then
+        // in fact it CAN'T exist.
+        const s = this.parent ? this.sep : '';
+        const fullpath = this.#fullpath ? this.#fullpath + s + pathPart : undefined;
+        const pchild = this.newChild(pathPart, UNKNOWN, {
+            ...opts,
+            parent: this,
+            fullpath,
+        });
+        if (!this.canReaddir()) {
+            pchild.#type |= ENOENT;
+        }
+        // don't have to update provisional, because if we have real children,
+        // then provisional is set to children.length, otherwise a lower number
+        children.push(pchild);
+        return pchild;
+    }
+    /**
+     * The relative path from the cwd. If it does not share an ancestor with
+     * the cwd, then this ends up being equivalent to the fullpath()
+     */
+    relative() {
+        if (this.isCWD)
+            return '';
+        if (this.#relative !== undefined) {
+            return this.#relative;
+        }
+        const name = this.name;
+        const p = this.parent;
+        if (!p) {
+            return (this.#relative = this.name);
+        }
+        const pv = p.relative();
+        return pv + (!pv || !p.parent ? '' : this.sep) + name;
+    }
+    /**
+     * The relative path from the cwd, using / as the path separator.
+     * If it does not share an ancestor with
+     * the cwd, then this ends up being equivalent to the fullpathPosix()
+     * On posix systems, this is identical to relative().
+     */
+    relativePosix() {
+        if (this.sep === '/')
+            return this.relative();
+        if (this.isCWD)
+            return '';
+        if (this.#relativePosix !== undefined)
+            return this.#relativePosix;
+        const name = this.name;
+        const p = this.parent;
+        if (!p) {
+            return (this.#relativePosix = this.fullpathPosix());
+        }
+        const pv = p.relativePosix();
+        return pv + (!pv || !p.parent ? '' : '/') + name;
+    }
+    /**
+     * The fully resolved path string for this Path entry
+     */
+    fullpath() {
+        if (this.#fullpath !== undefined) {
+            return this.#fullpath;
+        }
+        const name = this.name;
+        const p = this.parent;
+        if (!p) {
+            return (this.#fullpath = this.name);
+        }
+        const pv = p.fullpath();
+        const fp = pv + (!p.parent ? '' : this.sep) + name;
+        return (this.#fullpath = fp);
+    }
+    /**
+     * On platforms other than windows, this is identical to fullpath.
+     *
+     * On windows, this is overridden to return the forward-slash form of the
+     * full UNC path.
+     */
+    fullpathPosix() {
+        if (this.#fullpathPosix !== undefined)
+            return this.#fullpathPosix;
+        if (this.sep === '/')
+            return (this.#fullpathPosix = this.fullpath());
+        if (!this.parent) {
+            const p = this.fullpath().replace(/\\/g, '/');
+            if (/^[a-z]:\//i.test(p)) {
+                return (this.#fullpathPosix = `//?/${p}`);
+            }
+            else {
+                return (this.#fullpathPosix = p);
+            }
+        }
+        const p = this.parent;
+        const pfpp = p.fullpathPosix();
+        const fpp = pfpp + (!pfpp || !p.parent ? '' : '/') + this.name;
+        return (this.#fullpathPosix = fpp);
+    }
+    /**
+     * Is the Path of an unknown type?
+     *
+     * Note that we might know *something* about it if there has been a previous
+     * filesystem operation, for example that it does not exist, or is not a
+     * link, or whether it has child entries.
+     */
+    isUnknown() {
+        return (this.#type & IFMT) === UNKNOWN;
+    }
+    isType(type) {
+        return this[`is${type}`]();
+    }
+    getType() {
+        return (this.isUnknown() ? 'Unknown'
+            : this.isDirectory() ? 'Directory'
+                : this.isFile() ? 'File'
+                    : this.isSymbolicLink() ? 'SymbolicLink'
+                        : this.isFIFO() ? 'FIFO'
+                            : this.isCharacterDevice() ? 'CharacterDevice'
+                                : this.isBlockDevice() ? 'BlockDevice'
+                                    : /* c8 ignore start */ this.isSocket() ? 'Socket'
+                                        : 'Unknown');
+        /* c8 ignore stop */
+    }
+    /**
+     * Is the Path a regular file?
+     */
+    isFile() {
+        return (this.#type & IFMT) === IFREG;
+    }
+    /**
+     * Is the Path a directory?
+     */
+    isDirectory() {
+        return (this.#type & IFMT) === IFDIR;
+    }
+    /**
+     * Is the path a character device?
+     */
+    isCharacterDevice() {
+        return (this.#type & IFMT) === IFCHR;
+    }
+    /**
+     * Is the path a block device?
+     */
+    isBlockDevice() {
+        return (this.#type & IFMT) === IFBLK;
+    }
+    /**
+     * Is the path a FIFO pipe?
+     */
+    isFIFO() {
+        return (this.#type & IFMT) === IFIFO;
+    }
+    /**
+     * Is the path a socket?
+     */
+    isSocket() {
+        return (this.#type & IFMT) === IFSOCK;
+    }
+    /**
+     * Is the path a symbolic link?
+     */
+    isSymbolicLink() {
+        return (this.#type & IFLNK) === IFLNK;
+    }
+    /**
+     * Return the entry if it has been subject of a successful lstat, or
+     * undefined otherwise.
+     *
+     * Does not read the filesystem, so an undefined result *could* simply
+     * mean that we haven't called lstat on it.
+     */
+    lstatCached() {
+        return this.#type & LSTAT_CALLED ? this : undefined;
+    }
+    /**
+     * Return the cached link target if the entry has been the subject of a
+     * successful readlink, or undefined otherwise.
+     *
+     * Does not read the filesystem, so an undefined result *could* just mean we
+     * don't have any cached data. Only use it if you are very sure that a
+     * readlink() has been called at some point.
+     */
+    readlinkCached() {
+        return this.#linkTarget;
+    }
+    /**
+     * Returns the cached realpath target if the entry has been the subject
+     * of a successful realpath, or undefined otherwise.
+     *
+     * Does not read the filesystem, so an undefined result *could* just mean we
+     * don't have any cached data. Only use it if you are very sure that a
+     * realpath() has been called at some point.
+     */
+    realpathCached() {
+        return this.#realpath;
+    }
+    /**
+     * Returns the cached child Path entries array if the entry has been the
+     * subject of a successful readdir(), or [] otherwise.
+     *
+     * Does not read the filesystem, so an empty array *could* just mean we
+     * don't have any cached data. Only use it if you are very sure that a
+     * readdir() has been called recently enough to still be valid.
+     */
+    readdirCached() {
+        const children = this.children();
+        return children.slice(0, children.provisional);
+    }
+    /**
+     * Return true if it's worth trying to readlink.  Ie, we don't (yet) have
+     * any indication that readlink will definitely fail.
+     *
+     * Returns false if the path is known to not be a symlink, if a previous
+     * readlink failed, or if the entry does not exist.
+     */
+    canReadlink() {
+        if (this.#linkTarget)
+            return true;
+        if (!this.parent)
+            return false;
+        // cases where it cannot possibly succeed
+        const ifmt = this.#type & IFMT;
+        return !((ifmt !== UNKNOWN && ifmt !== IFLNK) ||
+            this.#type & ENOREADLINK ||
+            this.#type & ENOENT);
+    }
+    /**
+     * Return true if readdir has previously been successfully called on this
+     * path, indicating that cachedReaddir() is likely valid.
+     */
+    calledReaddir() {
+        return !!(this.#type & READDIR_CALLED);
+    }
+    /**
+     * Returns true if the path is known to not exist. That is, a previous lstat
+     * or readdir failed to verify its existence when that would have been
+     * expected, or a parent entry was marked either enoent or enotdir.
+     */
+    isENOENT() {
+        return !!(this.#type & ENOENT);
+    }
+    /**
+     * Return true if the path is a match for the given path name.  This handles
+     * case sensitivity and unicode normalization.
+     *
+     * Note: even on case-sensitive systems, it is **not** safe to test the
+     * equality of the `.name` property to determine whether a given pathname
+     * matches, due to unicode normalization mismatches.
+     *
+     * Always use this method instead of testing the `path.name` property
+     * directly.
+     */
+    isNamed(n) {
+        return !this.nocase ?
+            this.#matchName === normalize(n)
+            : this.#matchName === normalizeNocase(n);
+    }
+    /**
+     * Return the Path object corresponding to the target of a symbolic link.
+     *
+     * If the Path is not a symbolic link, or if the readlink call fails for any
+     * reason, `undefined` is returned.
+     *
+     * Result is cached, and thus may be outdated if the filesystem is mutated.
+     */
+    async readlink() {
+        const target = this.#linkTarget;
+        if (target) {
+            return target;
+        }
+        if (!this.canReadlink()) {
+            return undefined;
+        }
+        /* c8 ignore start */
+        // already covered by the canReadlink test, here for ts grumples
+        if (!this.parent) {
+            return undefined;
+        }
+        /* c8 ignore stop */
+        try {
+            const read = await this.#fs.promises.readlink(this.fullpath());
+            const linkTarget = (await this.parent.realpath())?.resolve(read);
+            if (linkTarget) {
+                return (this.#linkTarget = linkTarget);
+            }
+        }
+        catch (er) {
+            this.#readlinkFail(er.code);
+            return undefined;
+        }
+    }
+    /**
+     * Synchronous {@link PathBase.readlink}
+     */
+    readlinkSync() {
+        const target = this.#linkTarget;
+        if (target) {
+            return target;
+        }
+        if (!this.canReadlink()) {
+            return undefined;
+        }
+        /* c8 ignore start */
+        // already covered by the canReadlink test, here for ts grumples
+        if (!this.parent) {
+            return undefined;
+        }
+        /* c8 ignore stop */
+        try {
+            const read = this.#fs.readlinkSync(this.fullpath());
+            const linkTarget = this.parent.realpathSync()?.resolve(read);
+            if (linkTarget) {
+                return (this.#linkTarget = linkTarget);
+            }
+        }
+        catch (er) {
+            this.#readlinkFail(er.code);
+            return undefined;
+        }
+    }
+    #readdirSuccess(children) {
+        // succeeded, mark readdir called bit
+        this.#type |= READDIR_CALLED;
+        // mark all remaining provisional children as ENOENT
+        for (let p = children.provisional; p < children.length; p++) {
+            const c = children[p];
+            if (c)
+                c.#markENOENT();
+        }
+    }
+    #markENOENT() {
+        // mark as UNKNOWN and ENOENT
+        if (this.#type & ENOENT)
+            return;
+        this.#type = (this.#type | ENOENT) & IFMT_UNKNOWN;
+        this.#markChildrenENOENT();
+    }
+    #markChildrenENOENT() {
+        // all children are provisional and do not exist
+        const children = this.children();
+        children.provisional = 0;
+        for (const p of children) {
+            p.#markENOENT();
+        }
+    }
+    #markENOREALPATH() {
+        this.#type |= ENOREALPATH;
+        this.#markENOTDIR();
+    }
+    // save the information when we know the entry is not a dir
+    #markENOTDIR() {
+        // entry is not a directory, so any children can't exist.
+        // this *should* be impossible, since any children created
+        // after it's been marked ENOTDIR should be marked ENOENT,
+        // so it won't even get to this point.
+        /* c8 ignore start */
+        if (this.#type & ENOTDIR)
+            return;
+        /* c8 ignore stop */
+        let t = this.#type;
+        // this could happen if we stat a dir, then delete it,
+        // then try to read it or one of its children.
+        if ((t & IFMT) === IFDIR)
+            t &= IFMT_UNKNOWN;
+        this.#type = t | ENOTDIR;
+        this.#markChildrenENOENT();
+    }
+    #readdirFail(code = '') {
+        // markENOTDIR and markENOENT also set provisional=0
+        if (code === 'ENOTDIR' || code === 'EPERM') {
+            this.#markENOTDIR();
+        }
+        else if (code === 'ENOENT') {
+            this.#markENOENT();
+        }
+        else {
+            this.children().provisional = 0;
+        }
+    }
+    #lstatFail(code = '') {
+        // Windows just raises ENOENT in this case, disable for win CI
+        /* c8 ignore start */
+        if (code === 'ENOTDIR') {
+            // already know it has a parent by this point
+            const p = this.parent;
+            p.#markENOTDIR();
+        }
+        else if (code === 'ENOENT') {
+            /* c8 ignore stop */
+            this.#markENOENT();
+        }
+    }
+    #readlinkFail(code = '') {
+        let ter = this.#type;
+        ter |= ENOREADLINK;
+        if (code === 'ENOENT')
+            ter |= ENOENT;
+        // windows gets a weird error when you try to readlink a file
+        if (code === 'EINVAL' || code === 'UNKNOWN') {
+            // exists, but not a symlink, we don't know WHAT it is, so remove
+            // all IFMT bits.
+            ter &= IFMT_UNKNOWN;
+        }
+        this.#type = ter;
+        // windows just gets ENOENT in this case.  We do cover the case,
+        // just disabled because it's impossible on Windows CI
+        /* c8 ignore start */
+        if (code === 'ENOTDIR' && this.parent) {
+            this.parent.#markENOTDIR();
+        }
+        /* c8 ignore stop */
+    }
+    #readdirAddChild(e, c) {
+        return (this.#readdirMaybePromoteChild(e, c) ||
+            this.#readdirAddNewChild(e, c));
+    }
+    #readdirAddNewChild(e, c) {
+        // alloc new entry at head, so it's never provisional
+        const type = entToType(e);
+        const child = this.newChild(e.name, type, { parent: this });
+        const ifmt = child.#type & IFMT;
+        if (ifmt !== IFDIR && ifmt !== IFLNK && ifmt !== UNKNOWN) {
+            child.#type |= ENOTDIR;
+        }
+        c.unshift(child);
+        c.provisional++;
+        return child;
+    }
+    #readdirMaybePromoteChild(e, c) {
+        for (let p = c.provisional; p < c.length; p++) {
+            const pchild = c[p];
+            const name = this.nocase ? normalizeNocase(e.name) : normalize(e.name);
+            if (name !== pchild.#matchName) {
+                continue;
+            }
+            return this.#readdirPromoteChild(e, pchild, p, c);
+        }
+    }
+    #readdirPromoteChild(e, p, index, c) {
+        const v = p.name;
+        // retain any other flags, but set ifmt from dirent
+        p.#type = (p.#type & IFMT_UNKNOWN) | entToType(e);
+        // case sensitivity fixing when we learn the true name.
+        if (v !== e.name)
+            p.name = e.name;
+        // just advance provisional index (potentially off the list),
+        // otherwise we have to splice/pop it out and re-insert at head
+        if (index !== c.provisional) {
+            if (index === c.length - 1)
+                c.pop();
+            else
+                c.splice(index, 1);
+            c.unshift(p);
+        }
+        c.provisional++;
+        return p;
+    }
+    /**
+     * Call lstat() on this Path, and update all known information that can be
+     * determined.
+     *
+     * Note that unlike `fs.lstat()`, the returned value does not contain some
+     * information, such as `mode`, `dev`, `nlink`, and `ino`.  If that
+     * information is required, you will need to call `fs.lstat` yourself.
+     *
+     * If the Path refers to a nonexistent file, or if the lstat call fails for
+     * any reason, `undefined` is returned.  Otherwise the updated Path object is
+     * returned.
+     *
+     * Results are cached, and thus may be out of date if the filesystem is
+     * mutated.
+     */
+    async lstat() {
+        if ((this.#type & ENOENT) === 0) {
+            try {
+                this.#applyStat(await this.#fs.promises.lstat(this.fullpath()));
+                return this;
+            }
+            catch (er) {
+                this.#lstatFail(er.code);
+            }
+        }
+    }
+    /**
+     * synchronous {@link PathBase.lstat}
+     */
+    lstatSync() {
+        if ((this.#type & ENOENT) === 0) {
+            try {
+                this.#applyStat(this.#fs.lstatSync(this.fullpath()));
+                return this;
+            }
+            catch (er) {
+                this.#lstatFail(er.code);
+            }
+        }
+    }
+    #applyStat(st) {
+        const { atime, atimeMs, birthtime, birthtimeMs, blksize, blocks, ctime, ctimeMs, dev, gid, ino, mode, mtime, mtimeMs, nlink, rdev, size, uid, } = st;
+        this.#atime = atime;
+        this.#atimeMs = atimeMs;
+        this.#birthtime = birthtime;
+        this.#birthtimeMs = birthtimeMs;
+        this.#blksize = blksize;
+        this.#blocks = blocks;
+        this.#ctime = ctime;
+        this.#ctimeMs = ctimeMs;
+        this.#dev = dev;
+        this.#gid = gid;
+        this.#ino = ino;
+        this.#mode = mode;
+        this.#mtime = mtime;
+        this.#mtimeMs = mtimeMs;
+        this.#nlink = nlink;
+        this.#rdev = rdev;
+        this.#size = size;
+        this.#uid = uid;
+        const ifmt = entToType(st);
+        // retain any other flags, but set the ifmt
+        this.#type = (this.#type & IFMT_UNKNOWN) | ifmt | LSTAT_CALLED;
+        if (ifmt !== UNKNOWN && ifmt !== IFDIR && ifmt !== IFLNK) {
+            this.#type |= ENOTDIR;
+        }
+    }
+    #onReaddirCB = [];
+    #readdirCBInFlight = false;
+    #callOnReaddirCB(children) {
+        this.#readdirCBInFlight = false;
+        const cbs = this.#onReaddirCB.slice();
+        this.#onReaddirCB.length = 0;
+        cbs.forEach(cb => cb(null, children));
+    }
+    /**
+     * Standard node-style callback interface to get list of directory entries.
+     *
+     * If the Path cannot or does not contain any children, then an empty array
+     * is returned.
+     *
+     * Results are cached, and thus may be out of date if the filesystem is
+     * mutated.
+     *
+     * @param cb The callback called with (er, entries).  Note that the `er`
+     * param is somewhat extraneous, as all readdir() errors are handled and
+     * simply result in an empty set of entries being returned.
+     * @param allowZalgo Boolean indicating that immediately known results should
+     * *not* be deferred with `queueMicrotask`. Defaults to `false`. Release
+     * zalgo at your peril, the dark pony lord is devious and unforgiving.
+     */
+    readdirCB(cb, allowZalgo = false) {
+        if (!this.canReaddir()) {
+            if (allowZalgo)
+                cb(null, []);
+            else
+                queueMicrotask(() => cb(null, []));
+            return;
+        }
+        const children = this.children();
+        if (this.calledReaddir()) {
+            const c = children.slice(0, children.provisional);
+            if (allowZalgo)
+                cb(null, c);
+            else
+                queueMicrotask(() => cb(null, c));
+            return;
+        }
+        // don't have to worry about zalgo at this point.
+        this.#onReaddirCB.push(cb);
+        if (this.#readdirCBInFlight) {
+            return;
+        }
+        this.#readdirCBInFlight = true;
+        // else read the directory, fill up children
+        // de-provisionalize any provisional children.
+        const fullpath = this.fullpath();
+        this.#fs.readdir(fullpath, { withFileTypes: true }, (er, entries) => {
+            if (er) {
+                this.#readdirFail(er.code);
+                children.provisional = 0;
+            }
+            else {
+                // if we didn't get an error, we always get entries.
+                //@ts-ignore
+                for (const e of entries) {
+                    this.#readdirAddChild(e, children);
+                }
+                this.#readdirSuccess(children);
+            }
+            this.#callOnReaddirCB(children.slice(0, children.provisional));
+            return;
+        });
+    }
+    #asyncReaddirInFlight;
+    /**
+     * Return an array of known child entries.
+     *
+     * If the Path cannot or does not contain any children, then an empty array
+     * is returned.
+     *
+     * Results are cached, and thus may be out of date if the filesystem is
+     * mutated.
+     */
+    async readdir() {
+        if (!this.canReaddir()) {
+            return [];
+        }
+        const children = this.children();
+        if (this.calledReaddir()) {
+            return children.slice(0, children.provisional);
+        }
+        // else read the directory, fill up children
+        // de-provisionalize any provisional children.
+        const fullpath = this.fullpath();
+        if (this.#asyncReaddirInFlight) {
+            await this.#asyncReaddirInFlight;
+        }
+        else {
+            /* c8 ignore start */
+            let resolve = () => { };
+            /* c8 ignore stop */
+            this.#asyncReaddirInFlight = new Promise(res => (resolve = res));
+            try {
+                for (const e of await this.#fs.promises.readdir(fullpath, {
+                    withFileTypes: true,
+                })) {
+                    this.#readdirAddChild(e, children);
+                }
+                this.#readdirSuccess(children);
+            }
+            catch (er) {
+                this.#readdirFail(er.code);
+                children.provisional = 0;
+            }
+            this.#asyncReaddirInFlight = undefined;
+            resolve();
+        }
+        return children.slice(0, children.provisional);
+    }
+    /**
+     * synchronous {@link PathBase.readdir}
+     */
+    readdirSync() {
+        if (!this.canReaddir()) {
+            return [];
+        }
+        const children = this.children();
+        if (this.calledReaddir()) {
+            return children.slice(0, children.provisional);
+        }
+        // else read the directory, fill up children
+        // de-provisionalize any provisional children.
+        const fullpath = this.fullpath();
+        try {
+            for (const e of this.#fs.readdirSync(fullpath, {
+                withFileTypes: true,
+            })) {
+                this.#readdirAddChild(e, children);
+            }
+            this.#readdirSuccess(children);
+        }
+        catch (er) {
+            this.#readdirFail(er.code);
+            children.provisional = 0;
+        }
+        return children.slice(0, children.provisional);
+    }
+    canReaddir() {
+        if (this.#type & ENOCHILD)
+            return false;
+        const ifmt = IFMT & this.#type;
+        // we always set ENOTDIR when setting IFMT, so should be impossible
+        /* c8 ignore start */
+        if (!(ifmt === UNKNOWN || ifmt === IFDIR || ifmt === IFLNK)) {
+            return false;
+        }
+        /* c8 ignore stop */
+        return true;
+    }
+    shouldWalk(dirs, walkFilter) {
+        return ((this.#type & IFDIR) === IFDIR &&
+            !(this.#type & ENOCHILD) &&
+            !dirs.has(this) &&
+            (!walkFilter || walkFilter(this)));
+    }
+    /**
+     * Return the Path object corresponding to path as resolved
+     * by realpath(3).
+     *
+     * If the realpath call fails for any reason, `undefined` is returned.
+     *
+     * Result is cached, and thus may be outdated if the filesystem is mutated.
+     * On success, returns a Path object.
+     */
+    async realpath() {
+        if (this.#realpath)
+            return this.#realpath;
+        if ((ENOREALPATH | ENOREADLINK | ENOENT) & this.#type)
+            return undefined;
+        try {
+            const rp = await this.#fs.promises.realpath(this.fullpath());
+            return (this.#realpath = this.resolve(rp));
+        }
+        catch (_) {
+            this.#markENOREALPATH();
+        }
+    }
+    /**
+     * Synchronous {@link realpath}
+     */
+    realpathSync() {
+        if (this.#realpath)
+            return this.#realpath;
+        if ((ENOREALPATH | ENOREADLINK | ENOENT) & this.#type)
+            return undefined;
+        try {
+            const rp = this.#fs.realpathSync(this.fullpath());
+            return (this.#realpath = this.resolve(rp));
+        }
+        catch (_) {
+            this.#markENOREALPATH();
+        }
+    }
+    /**
+     * Internal method to mark this Path object as the scurry cwd,
+     * called by {@link PathScurry#chdir}
+     *
+     * @internal
+     */
+    [setAsCwd](oldCwd) {
+        if (oldCwd === this)
+            return;
+        oldCwd.isCWD = false;
+        this.isCWD = true;
+        const changed = new Set([]);
+        let rp = [];
+        let p = this;
+        while (p && p.parent) {
+            changed.add(p);
+            p.#relative = rp.join(this.sep);
+            p.#relativePosix = rp.join('/');
+            p = p.parent;
+            rp.push('..');
+        }
+        // now un-memoize parents of old cwd
+        p = oldCwd;
+        while (p && p.parent && !changed.has(p)) {
+            p.#relative = undefined;
+            p.#relativePosix = undefined;
+            p = p.parent;
+        }
+    }
+}
+/**
+ * Path class used on win32 systems
+ *
+ * Uses `'\\'` as the path separator for returned paths, either `'\\'` or `'/'`
+ * as the path separator for parsing paths.
+ */
+export class PathWin32 extends PathBase {
+    /**
+     * Separator for generating path strings.
+     */
+    sep = '\\';
+    /**
+     * Separator for parsing path strings.
+     */
+    splitSep = eitherSep;
+    /**
+     * Do not create new Path objects directly.  They should always be accessed
+     * via the PathScurry class or other methods on the Path class.
+     *
+     * @internal
+     */
+    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
+        super(name, type, root, roots, nocase, children, opts);
+    }
+    /**
+     * @internal
+     */
+    newChild(name, type = UNKNOWN, opts = {}) {
+        return new PathWin32(name, type, this.root, this.roots, this.nocase, this.childrenCache(), opts);
+    }
+    /**
+     * @internal
+     */
+    getRootString(path) {
+        return win32.parse(path).root;
+    }
+    /**
+     * @internal
+     */
+    getRoot(rootPath) {
+        rootPath = uncToDrive(rootPath.toUpperCase());
+        if (rootPath === this.root.name) {
+            return this.root;
+        }
+        // ok, not that one, check if it matches another we know about
+        for (const [compare, root] of Object.entries(this.roots)) {
+            if (this.sameRoot(rootPath, compare)) {
+                return (this.roots[rootPath] = root);
+            }
+        }
+        // otherwise, have to create a new one.
+        return (this.roots[rootPath] = new PathScurryWin32(rootPath, this).root);
+    }
+    /**
+     * @internal
+     */
+    sameRoot(rootPath, compare = this.root.name) {
+        // windows can (rarely) have case-sensitive filesystem, but
+        // UNC and drive letters are always case-insensitive, and canonically
+        // represented uppercase.
+        rootPath = rootPath
+            .toUpperCase()
+            .replace(/\//g, '\\')
+            .replace(uncDriveRegexp, '$1\\');
+        return rootPath === compare;
+    }
+}
+/**
+ * Path class used on all posix systems.
+ *
+ * Uses `'/'` as the path separator.
+ */
+export class PathPosix extends PathBase {
+    /**
+     * separator for parsing path strings
+     */
+    splitSep = '/';
+    /**
+     * separator for generating path strings
+     */
+    sep = '/';
+    /**
+     * Do not create new Path objects directly.  They should always be accessed
+     * via the PathScurry class or other methods on the Path class.
+     *
+     * @internal
+     */
+    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
+        super(name, type, root, roots, nocase, children, opts);
+    }
+    /**
+     * @internal
+     */
+    getRootString(path) {
+        return path.startsWith('/') ? '/' : '';
+    }
+    /**
+     * @internal
+     */
+    getRoot(_rootPath) {
+        return this.root;
+    }
+    /**
+     * @internal
+     */
+    newChild(name, type = UNKNOWN, opts = {}) {
+        return new PathPosix(name, type, this.root, this.roots, this.nocase, this.childrenCache(), opts);
+    }
+}
+/**
+ * The base class for all PathScurry classes, providing the interface for path
+ * resolution and filesystem operations.
+ *
+ * Typically, you should *not* instantiate this class directly, but rather one
+ * of the platform-specific classes, or the exported {@link PathScurry} which
+ * defaults to the current platform.
+ */
+export class PathScurryBase {
+    /**
+     * The root Path entry for the current working directory of this Scurry
+     */
+    root;
+    /**
+     * The string path for the root of this Scurry's current working directory
+     */
+    rootPath;
+    /**
+     * A collection of all roots encountered, referenced by rootPath
+     */
+    roots;
+    /**
+     * The Path entry corresponding to this PathScurry's current working directory.
+     */
+    cwd;
+    #resolveCache;
+    #resolvePosixCache;
+    #children;
+    /**
+     * Perform path comparisons case-insensitively.
+     *
+     * Defaults true on Darwin and Windows systems, false elsewhere.
+     */
+    nocase;
+    #fs;
+    /**
+     * This class should not be instantiated directly.
+     *
+     * Use PathScurryWin32, PathScurryDarwin, PathScurryPosix, or PathScurry
+     *
+     * @internal
+     */
+    constructor(cwd = process.cwd(), pathImpl, sep, { nocase, childrenCacheSize = 16 * 1024, fs = defaultFS, } = {}) {
+        this.#fs = fsFromOption(fs);
+        if (cwd instanceof URL || cwd.startsWith('file://')) {
+            cwd = fileURLToPath(cwd);
+        }
+        // resolve and split root, and then add to the store.
+        // this is the only time we call path.resolve()
+        const cwdPath = pathImpl.resolve(cwd);
+        this.roots = Object.create(null);
+        this.rootPath = this.parseRootPath(cwdPath);
+        this.#resolveCache = new ResolveCache();
+        this.#resolvePosixCache = new ResolveCache();
+        this.#children = new ChildrenCache(childrenCacheSize);
+        const split = cwdPath.substring(this.rootPath.length).split(sep);
+        // resolve('/') leaves '', splits to [''], we don't want that.
+        if (split.length === 1 && !split[0]) {
+            split.pop();
+        }
+        /* c8 ignore start */
+        if (nocase === undefined) {
+            throw new TypeError('must provide nocase setting to PathScurryBase ctor');
+        }
+        /* c8 ignore stop */
+        this.nocase = nocase;
+        this.root = this.newRoot(this.#fs);
+        this.roots[this.rootPath] = this.root;
+        let prev = this.root;
+        let len = split.length - 1;
+        const joinSep = pathImpl.sep;
+        let abs = this.rootPath;
+        let sawFirst = false;
+        for (const part of split) {
+            const l = len--;
+            prev = prev.child(part, {
+                relative: new Array(l).fill('..').join(joinSep),
+                relativePosix: new Array(l).fill('..').join('/'),
+                fullpath: (abs += (sawFirst ? '' : joinSep) + part),
+            });
+            sawFirst = true;
+        }
+        this.cwd = prev;
+    }
+    /**
+     * Get the depth of a provided path, string, or the cwd
+     */
+    depth(path = this.cwd) {
+        if (typeof path === 'string') {
+            path = this.cwd.resolve(path);
+        }
+        return path.depth();
+    }
+    /**
+     * Return the cache of child entries.  Exposed so subclasses can create
+     * child Path objects in a platform-specific way.
+     *
+     * @internal
+     */
+    childrenCache() {
+        return this.#children;
+    }
+    /**
+     * Resolve one or more path strings to a resolved string
+     *
+     * Same interface as require('path').resolve.
+     *
+     * Much faster than path.resolve() when called multiple times for the same
+     * path, because the resolved Path objects are cached.  Much slower
+     * otherwise.
+     */
+    resolve(...paths) {
+        // first figure out the minimum number of paths we have to test
+        // we always start at cwd, but any absolutes will bump the start
+        let r = '';
+        for (let i = paths.length - 1; i >= 0; i--) {
+            const p = paths[i];
+            if (!p || p === '.')
+                continue;
+            r = r ? `${p}/${r}` : p;
+            if (this.isAbsolute(p)) {
+                break;
+            }
+        }
+        const cached = this.#resolveCache.get(r);
+        if (cached !== undefined) {
+            return cached;
+        }
+        const result = this.cwd.resolve(r).fullpath();
+        this.#resolveCache.set(r, result);
+        return result;
+    }
+    /**
+     * Resolve one or more path strings to a resolved string, returning
+     * the posix path.  Identical to .resolve() on posix systems, but on
+     * windows will return a forward-slash separated UNC path.
+     *
+     * Same interface as require('path').resolve.
+     *
+     * Much faster than path.resolve() when called multiple times for the same
+     * path, because the resolved Path objects are cached.  Much slower
+     * otherwise.
+     */
+    resolvePosix(...paths) {
+        // first figure out the minimum number of paths we have to test
+        // we always start at cwd, but any absolutes will bump the start
+        let r = '';
+        for (let i = paths.length - 1; i >= 0; i--) {
+            const p = paths[i];
+            if (!p || p === '.')
+                continue;
+            r = r ? `${p}/${r}` : p;
+            if (this.isAbsolute(p)) {
+                break;
+            }
+        }
+        const cached = this.#resolvePosixCache.get(r);
+        if (cached !== undefined) {
+            return cached;
+        }
+        const result = this.cwd.resolve(r).fullpathPosix();
+        this.#resolvePosixCache.set(r, result);
+        return result;
+    }
+    /**
+     * find the relative path from the cwd to the supplied path string or entry
+     */
+    relative(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.relative();
+    }
+    /**
+     * find the relative path from the cwd to the supplied path string or
+     * entry, using / as the path delimiter, even on Windows.
+     */
+    relativePosix(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.relativePosix();
+    }
+    /**
+     * Return the basename for the provided string or Path object
+     */
+    basename(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.name;
+    }
+    /**
+     * Return the dirname for the provided string or Path object
+     */
+    dirname(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return (entry.parent || entry).fullpath();
+    }
+    async readdir(entry = this.cwd, opts = {
+        withFileTypes: true,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes } = opts;
+        if (!entry.canReaddir()) {
+            return [];
+        }
+        else {
+            const p = await entry.readdir();
+            return withFileTypes ? p : p.map(e => e.name);
+        }
+    }
+    readdirSync(entry = this.cwd, opts = {
+        withFileTypes: true,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true } = opts;
+        if (!entry.canReaddir()) {
+            return [];
+        }
+        else if (withFileTypes) {
+            return entry.readdirSync();
+        }
+        else {
+            return entry.readdirSync().map(e => e.name);
+        }
+    }
+    /**
+     * Call lstat() on the string or Path object, and update all known
+     * information that can be determined.
+     *
+     * Note that unlike `fs.lstat()`, the returned value does not contain some
+     * information, such as `mode`, `dev`, `nlink`, and `ino`.  If that
+     * information is required, you will need to call `fs.lstat` yourself.
+     *
+     * If the Path refers to a nonexistent file, or if the lstat call fails for
+     * any reason, `undefined` is returned.  Otherwise the updated Path object is
+     * returned.
+     *
+     * Results are cached, and thus may be out of date if the filesystem is
+     * mutated.
+     */
+    async lstat(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.lstat();
+    }
+    /**
+     * synchronous {@link PathScurryBase.lstat}
+     */
+    lstatSync(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.lstatSync();
+    }
+    async readlink(entry = this.cwd, { withFileTypes } = {
+        withFileTypes: false,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            withFileTypes = entry.withFileTypes;
+            entry = this.cwd;
+        }
+        const e = await entry.readlink();
+        return withFileTypes ? e : e?.fullpath();
+    }
+    readlinkSync(entry = this.cwd, { withFileTypes } = {
+        withFileTypes: false,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            withFileTypes = entry.withFileTypes;
+            entry = this.cwd;
+        }
+        const e = entry.readlinkSync();
+        return withFileTypes ? e : e?.fullpath();
+    }
+    async realpath(entry = this.cwd, { withFileTypes } = {
+        withFileTypes: false,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            withFileTypes = entry.withFileTypes;
+            entry = this.cwd;
+        }
+        const e = await entry.realpath();
+        return withFileTypes ? e : e?.fullpath();
+    }
+    realpathSync(entry = this.cwd, { withFileTypes } = {
+        withFileTypes: false,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            withFileTypes = entry.withFileTypes;
+            entry = this.cwd;
+        }
+        const e = entry.realpathSync();
+        return withFileTypes ? e : e?.fullpath();
+    }
+    async walk(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        const results = [];
+        if (!filter || filter(entry)) {
+            results.push(withFileTypes ? entry : entry.fullpath());
+        }
+        const dirs = new Set();
+        const walk = (dir, cb) => {
+            dirs.add(dir);
+            dir.readdirCB((er, entries) => {
+                /* c8 ignore start */
+                if (er) {
+                    return cb(er);
+                }
+                /* c8 ignore stop */
+                let len = entries.length;
+                if (!len)
+                    return cb();
+                const next = () => {
+                    if (--len === 0) {
+                        cb();
+                    }
+                };
+                for (const e of entries) {
+                    if (!filter || filter(e)) {
+                        results.push(withFileTypes ? e : e.fullpath());
+                    }
+                    if (follow && e.isSymbolicLink()) {
+                        e.realpath()
+                            .then(r => (r?.isUnknown() ? r.lstat() : r))
+                            .then(r => r?.shouldWalk(dirs, walkFilter) ? walk(r, next) : next());
+                    }
+                    else {
+                        if (e.shouldWalk(dirs, walkFilter)) {
+                            walk(e, next);
+                        }
+                        else {
+                            next();
+                        }
+                    }
+                }
+            }, true); // zalgooooooo
+        };
+        const start = entry;
+        return new Promise((res, rej) => {
+            walk(start, er => {
+                /* c8 ignore start */
+                if (er)
+                    return rej(er);
+                /* c8 ignore stop */
+                res(results);
+            });
+        });
+    }
+    walkSync(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        const results = [];
+        if (!filter || filter(entry)) {
+            results.push(withFileTypes ? entry : entry.fullpath());
+        }
+        const dirs = new Set([entry]);
+        for (const dir of dirs) {
+            const entries = dir.readdirSync();
+            for (const e of entries) {
+                if (!filter || filter(e)) {
+                    results.push(withFileTypes ? e : e.fullpath());
+                }
+                let r = e;
+                if (e.isSymbolicLink()) {
+                    if (!(follow && (r = e.realpathSync())))
+                        continue;
+                    if (r.isUnknown())
+                        r.lstatSync();
+                }
+                if (r.shouldWalk(dirs, walkFilter)) {
+                    dirs.add(r);
+                }
+            }
+        }
+        return results;
+    }
+    /**
+     * Support for `for await`
+     *
+     * Alias for {@link PathScurryBase.iterate}
+     *
+     * Note: As of Node 19, this is very slow, compared to other methods of
+     * walking.  Consider using {@link PathScurryBase.stream} if memory overhead
+     * and backpressure are concerns, or {@link PathScurryBase.walk} if not.
+     */
+    [Symbol.asyncIterator]() {
+        return this.iterate();
+    }
+    iterate(entry = this.cwd, options = {}) {
+        // iterating async over the stream is significantly more performant,
+        // especially in the warm-cache scenario, because it buffers up directory
+        // entries in the background instead of waiting for a yield for each one.
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            options = entry;
+            entry = this.cwd;
+        }
+        return this.stream(entry, options)[Symbol.asyncIterator]();
+    }
+    /**
+     * Iterating over a PathScurry performs a synchronous walk.
+     *
+     * Alias for {@link PathScurryBase.iterateSync}
+     */
+    [Symbol.iterator]() {
+        return this.iterateSync();
+    }
+    *iterateSync(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        if (!filter || filter(entry)) {
+            yield withFileTypes ? entry : entry.fullpath();
+        }
+        const dirs = new Set([entry]);
+        for (const dir of dirs) {
+            const entries = dir.readdirSync();
+            for (const e of entries) {
+                if (!filter || filter(e)) {
+                    yield withFileTypes ? e : e.fullpath();
+                }
+                let r = e;
+                if (e.isSymbolicLink()) {
+                    if (!(follow && (r = e.realpathSync())))
+                        continue;
+                    if (r.isUnknown())
+                        r.lstatSync();
+                }
+                if (r.shouldWalk(dirs, walkFilter)) {
+                    dirs.add(r);
+                }
+            }
+        }
+    }
+    stream(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        const results = new Minipass({ objectMode: true });
+        if (!filter || filter(entry)) {
+            results.write(withFileTypes ? entry : entry.fullpath());
+        }
+        const dirs = new Set();
+        const queue = [entry];
+        let processing = 0;
+        const process = () => {
+            let paused = false;
+            while (!paused) {
+                const dir = queue.shift();
+                if (!dir) {
+                    if (processing === 0)
+                        results.end();
+                    return;
+                }
+                processing++;
+                dirs.add(dir);
+                const onReaddir = (er, entries, didRealpaths = false) => {
+                    /* c8 ignore start */
+                    if (er)
+                        return results.emit('error', er);
+                    /* c8 ignore stop */
+                    if (follow && !didRealpaths) {
+                        const promises = [];
+                        for (const e of entries) {
+                            if (e.isSymbolicLink()) {
+                                promises.push(e
+                                    .realpath()
+                                    .then((r) => r?.isUnknown() ? r.lstat() : r));
+                            }
+                        }
+                        if (promises.length) {
+                            Promise.all(promises).then(() => onReaddir(null, entries, true));
+                            return;
+                        }
+                    }
+                    for (const e of entries) {
+                        if (e && (!filter || filter(e))) {
+                            if (!results.write(withFileTypes ? e : e.fullpath())) {
+                                paused = true;
+                            }
+                        }
+                    }
+                    processing--;
+                    for (const e of entries) {
+                        const r = e.realpathCached() || e;
+                        if (r.shouldWalk(dirs, walkFilter)) {
+                            queue.push(r);
+                        }
+                    }
+                    if (paused && !results.flowing) {
+                        results.once('drain', process);
+                    }
+                    else if (!sync) {
+                        process();
+                    }
+                };
+                // zalgo containment
+                let sync = true;
+                dir.readdirCB(onReaddir, true);
+                sync = false;
+            }
+        };
+        process();
+        return results;
+    }
+    streamSync(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        const results = new Minipass({ objectMode: true });
+        const dirs = new Set();
+        if (!filter || filter(entry)) {
+            results.write(withFileTypes ? entry : entry.fullpath());
+        }
+        const queue = [entry];
+        let processing = 0;
+        const process = () => {
+            let paused = false;
+            while (!paused) {
+                const dir = queue.shift();
+                if (!dir) {
+                    if (processing === 0)
+                        results.end();
+                    return;
+                }
+                processing++;
+                dirs.add(dir);
+                const entries = dir.readdirSync();
+                for (const e of entries) {
+                    if (!filter || filter(e)) {
+                        if (!results.write(withFileTypes ? e : e.fullpath())) {
+                            paused = true;
+                        }
+                    }
+                }
+                processing--;
+                for (const e of entries) {
+                    let r = e;
+                    if (e.isSymbolicLink()) {
+                        if (!(follow && (r = e.realpathSync())))
+                            continue;
+                        if (r.isUnknown())
+                            r.lstatSync();
+                    }
+                    if (r.shouldWalk(dirs, walkFilter)) {
+                        queue.push(r);
+                    }
+                }
+            }
+            if (paused && !results.flowing)
+                results.once('drain', process);
+        };
+        process();
+        return results;
+    }
+    chdir(path = this.cwd) {
+        const oldCwd = this.cwd;
+        this.cwd = typeof path === 'string' ? this.cwd.resolve(path) : path;
+        this.cwd[setAsCwd](oldCwd);
+    }
+}
+/**
+ * Windows implementation of {@link PathScurryBase}
+ *
+ * Defaults to case insensitve, uses `'\\'` to generate path strings.  Uses
+ * {@link PathWin32} for Path objects.
+ */
+export class PathScurryWin32 extends PathScurryBase {
+    /**
+     * separator for generating path strings
+     */
+    sep = '\\';
+    constructor(cwd = process.cwd(), opts = {}) {
+        const { nocase = true } = opts;
+        super(cwd, win32, '\\', { ...opts, nocase });
+        this.nocase = nocase;
+        for (let p = this.cwd; p; p = p.parent) {
+            p.nocase = this.nocase;
+        }
+    }
+    /**
+     * @internal
+     */
+    parseRootPath(dir) {
+        // if the path starts with a single separator, it's not a UNC, and we'll
+        // just get separator as the root, and driveFromUNC will return \
+        // In that case, mount \ on the root from the cwd.
+        return win32.parse(dir).root.toUpperCase();
+    }
+    /**
+     * @internal
+     */
+    newRoot(fs) {
+        return new PathWin32(this.rootPath, IFDIR, undefined, this.roots, this.nocase, this.childrenCache(), { fs });
+    }
+    /**
+     * Return true if the provided path string is an absolute path
+     */
+    isAbsolute(p) {
+        return (p.startsWith('/') || p.startsWith('\\') || /^[a-z]:(\/|\\)/i.test(p));
+    }
+}
+/**
+ * {@link PathScurryBase} implementation for all posix systems other than Darwin.
+ *
+ * Defaults to case-sensitive matching, uses `'/'` to generate path strings.
+ *
+ * Uses {@link PathPosix} for Path objects.
+ */
+export class PathScurryPosix extends PathScurryBase {
+    /**
+     * separator for generating path strings
+     */
+    sep = '/';
+    constructor(cwd = process.cwd(), opts = {}) {
+        const { nocase = false } = opts;
+        super(cwd, posix, '/', { ...opts, nocase });
+        this.nocase = nocase;
+    }
+    /**
+     * @internal
+     */
+    parseRootPath(_dir) {
+        return '/';
+    }
+    /**
+     * @internal
+     */
+    newRoot(fs) {
+        return new PathPosix(this.rootPath, IFDIR, undefined, this.roots, this.nocase, this.childrenCache(), { fs });
+    }
+    /**
+     * Return true if the provided path string is an absolute path
+     */
+    isAbsolute(p) {
+        return p.startsWith('/');
+    }
+}
+/**
+ * {@link PathScurryBase} implementation for Darwin (macOS) systems.
+ *
+ * Defaults to case-insensitive matching, uses `'/'` for generating path
+ * strings.
+ *
+ * Uses {@link PathPosix} for Path objects.
+ */
+export class PathScurryDarwin extends PathScurryPosix {
+    constructor(cwd = process.cwd(), opts = {}) {
+        const { nocase = true } = opts;
+        super(cwd, { ...opts, nocase });
+    }
+}
+/**
+ * Default {@link PathBase} implementation for the current platform.
+ *
+ * {@link PathWin32} on Windows systems, {@link PathPosix} on all others.
+ */
+export const Path = process.platform === 'win32' ? PathWin32 : PathPosix;
+/**
+ * Default {@link PathScurryBase} implementation for the current platform.
+ *
+ * {@link PathScurryWin32} on Windows systems, {@link PathScurryDarwin} on
+ * Darwin (macOS) systems, {@link PathScurryPosix} on all others.
+ */
+export const PathScurry = process.platform === 'win32' ? PathScurryWin32
+    : process.platform === 'darwin' ? PathScurryDarwin
+        : PathScurryPosix;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/node-gyp/node_modules/path-scurry/dist/esm/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/node-gyp/node_modules/path-scurry/dist/esm/package.json
new file mode 100644
index 0000000000000000000000000000000000000000..3dbc1ca591c0557e35b6004aeba250e6a70b56e3
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/node-gyp/node_modules/path-scurry/dist/esm/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-audit-report/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-audit-report/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..8d28acf866d9325ba4d3d882a167368bc3356bee
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-audit-report/LICENSE
@@ -0,0 +1,16 @@
+ISC License
+
+Copyright (c) npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this software for
+any purpose with or without fee is hereby granted, provided that the
+above copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
+ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
+COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
+CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
+USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-audit-report/lib/colors.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-audit-report/lib/colors.js
new file mode 100644
index 0000000000000000000000000000000000000000..e6688f2f1c8c69c4c74e0b6be27d43b17296ed40
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-audit-report/lib/colors.js
@@ -0,0 +1,22 @@
+module.exports = (chalk) => {
+  const green = s => chalk.green.bold(s)
+  const red = s => chalk.red.bold(s)
+  const magenta = s => chalk.magenta.bold(s)
+  const yellow = s => chalk.yellow.bold(s)
+  const white = s => chalk.bold(s)
+  const severity = (sev, s) => sev.toLowerCase() === 'moderate' ? yellow(s || sev)
+    : sev.toLowerCase() === 'high' ? red(s || sev)
+    : sev.toLowerCase() === 'critical' ? magenta(s || sev)
+    : white(s || sev)
+  const dim = s => chalk.dim(s)
+
+  return {
+    dim,
+    green,
+    red,
+    magenta,
+    yellow,
+    white,
+    severity,
+  }
+}
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-audit-report/lib/exit-code.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-audit-report/lib/exit-code.js
new file mode 100644
index 0000000000000000000000000000000000000000..fcb580b5126716d30c7ef84436dac46c8260a274
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-audit-report/lib/exit-code.js
@@ -0,0 +1,14 @@
+// return 1 if any vulns in the set are at or above the specified severity
+const severities = new Map(Object.entries([
+  'info',
+  'low',
+  'moderate',
+  'high',
+  'critical',
+  'none',
+]).map(s => s.reverse()))
+
+module.exports = (data, level) =>
+  Object.entries(data.metadata.vulnerabilities)
+    .some(([sev, count]) => count > 0 && severities.has(sev) &&
+      severities.get(sev) >= severities.get(level)) ? 1 : 0
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-audit-report/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-audit-report/lib/index.js
new file mode 100644
index 0000000000000000000000000000000000000000..d0ced01efefec9bb644f30f76f060b363b71a0bb
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-audit-report/lib/index.js
@@ -0,0 +1,41 @@
+'use strict'
+
+const reporters = {
+  install: require('./reporters/install'),
+  detail: require('./reporters/detail'),
+  json: require('./reporters/json'),
+  quiet: require('./reporters/quiet'),
+}
+
+const exitCode = require('./exit-code.js')
+
+module.exports = Object.assign((data, options = {}) => {
+  const {
+    reporter = 'install',
+    chalk,
+    unicode = true,
+    indent = 2,
+  } = options
+
+  // CLI defaults this to `null` so the defaulting method above doesn't work
+  const auditLevel = options.auditLevel || 'low'
+
+  if (!data) {
+    throw Object.assign(
+      new TypeError('ENOAUDITDATA'),
+      {
+        code: 'ENOAUDITDATA',
+        message: 'missing audit data',
+      }
+    )
+  }
+
+  if (typeof data.toJSON === 'function') {
+    data = data.toJSON()
+  }
+
+  return {
+    report: reporters[reporter](data, { chalk, unicode, indent }),
+    exitCode: exitCode(data, auditLevel),
+  }
+}, { reporters })
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-audit-report/lib/reporters/detail.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-audit-report/lib/reporters/detail.js
new file mode 100644
index 0000000000000000000000000000000000000000..6dde8ec88de447cafc8b39a56a806d20d19ea5bb
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-audit-report/lib/reporters/detail.js
@@ -0,0 +1,86 @@
+'use strict'
+
+const colors = require('../colors.js')
+const install = require('./install.js')
+
+module.exports = (data, { chalk }) => {
+  const summary = install.summary(data, { chalk })
+  const none = data.metadata.vulnerabilities.total === 0
+  return none ? summary : fullReport(data, { chalk, summary })
+}
+
+const fullReport = (data, { chalk, summary }) => {
+  const c = colors(chalk)
+  const output = [c.white('# npm audit report'), '']
+
+  const printed = new Set()
+  for (const [, vuln] of Object.entries(data.vulnerabilities)) {
+    // only print starting from the top-level advisories
+    if (vuln.via.filter(v => typeof v !== 'string').length !== 0) {
+      output.push(printVuln(vuln, c, data.vulnerabilities, printed))
+    }
+  }
+
+  output.push(summary)
+
+  return output.join('\n')
+}
+
+const printVuln = (vuln, c, vulnerabilities, printed, indent = '') => {
+  if (printed.has(vuln)) {
+    return null
+  }
+
+  printed.add(vuln)
+  const output = []
+
+  output.push(c.white(vuln.name) + '  ' + vuln.range)
+
+  if (indent === '' && (vuln.severity !== 'low' || vuln.severity === 'info')) {
+    output.push(`Severity: ${c.severity(vuln.severity)}`)
+  }
+
+  for (const via of vuln.via) {
+    if (typeof via === 'string') {
+      output.push(`Depends on vulnerable versions of ${c.white(via)}`)
+    } else if (indent === '') {
+      output.push(`${c.white(via.title)} - ${via.url}`)
+    }
+  }
+
+  if (indent === '') {
+    const { fixAvailable: fa } = vuln
+    if (fa === false) {
+      output.push(c.red('No fix available'))
+    } else if (fa === true) {
+      output.push(c.green('fix available') + ' via `npm audit fix`')
+    } else {
+      /* istanbul ignore else - should be impossible, just being cautious */
+      if (typeof fa === 'object' && indent === '') {
+        output.push(
+          `${c.yellow('fix available')} via \`npm audit fix --force\``,
+          `Will install ${fa.name}@${fa.version}` +
+          `, which is ${fa.isSemVerMajor ? 'a breaking change' :
+            'outside the stated dependency range'}`
+        )
+      }
+    }
+  }
+
+  for (const path of vuln.nodes) {
+    output.push(c.dim(path))
+  }
+
+  for (const effect of vuln.effects) {
+    const e = printVuln(vulnerabilities[effect], c, vulnerabilities, printed, '  ')
+    if (e) {
+      output.push(...e.split('\n'))
+    }
+  }
+
+  if (indent === '') {
+    output.push('')
+  }
+
+  return output.map(l => `${indent}${l}`).join('\n')
+}
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-audit-report/lib/reporters/json.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-audit-report/lib/reporters/json.js
new file mode 100644
index 0000000000000000000000000000000000000000..6714720a54e64ea42640f4fd907cd036e8a1ca7a
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-audit-report/lib/reporters/json.js
@@ -0,0 +1 @@
+module.exports = (data, { indent }) => JSON.stringify(data, null, indent)
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-audit-report/lib/reporters/quiet.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-audit-report/lib/reporters/quiet.js
new file mode 100644
index 0000000000000000000000000000000000000000..57517aae8f19f3be6710df44e5461cba6aa3d1db
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-audit-report/lib/reporters/quiet.js
@@ -0,0 +1 @@
+module.exports = () => ''
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-audit-report/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-audit-report/package.json
new file mode 100644
index 0000000000000000000000000000000000000000..22b16099e23d664a72ef00102eef3b8c25119bc8
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-audit-report/package.json
@@ -0,0 +1,62 @@
+{
+  "name": "npm-audit-report",
+  "version": "6.0.0",
+  "description": "Given a response from the npm security api, render it into a variety of security reports",
+  "main": "lib/index.js",
+  "scripts": {
+    "test": "tap",
+    "snap": "tap",
+    "lint": "npm run eslint",
+    "postlint": "template-oss-check",
+    "template-oss-apply": "template-oss-apply --force",
+    "lintfix": "npm run eslint -- --fix",
+    "posttest": "npm run lint",
+    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
+  },
+  "tap": {
+    "check-coverage": true,
+    "coverage-map": "map.js",
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  },
+  "keywords": [
+    "npm",
+    "security",
+    "report",
+    "audit"
+  ],
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "devDependencies": {
+    "@npmcli/eslint-config": "^5.0.0",
+    "@npmcli/template-oss": "4.23.3",
+    "chalk": "^5.2.0",
+    "tap": "^16.0.0"
+  },
+  "directories": {
+    "lib": "lib",
+    "test": "test"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/npm/npm-audit-report.git"
+  },
+  "bugs": {
+    "url": "https://github.com/npm/npm-audit-report/issues"
+  },
+  "homepage": "https://github.com/npm/npm-audit-report#readme",
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "engines": {
+    "node": "^18.17.0 || >=20.5.0"
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.23.3",
+    "publish": true
+  }
+}
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-bundled/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-bundled/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..20a476254092375ee52bb87e5e36d070074cc5a7
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-bundled/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) npm, Inc. and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-bundled/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-bundled/lib/index.js
new file mode 100644
index 0000000000000000000000000000000000000000..f5ee0bb3ea76534baff32c6b2beecc19acf45b60
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-bundled/lib/index.js
@@ -0,0 +1,254 @@
+'use strict'
+
+// walk the tree of deps starting from the top level list of bundled deps
+// Any deps at the top level that are depended on by a bundled dep that
+// does not have that dep in its own node_modules folder are considered
+// bundled deps as well.  This list of names can be passed to npm-packlist
+// as the "bundled" argument.  Additionally, packageJsonCache is shared so
+// packlist doesn't have to re-read files already consumed in this pass
+
+const fs = require('fs')
+const path = require('path')
+const EE = require('events').EventEmitter
+// we don't care about the package bins, but we share a pj cache
+// with other modules that DO care about it, so keep it nice.
+const normalizePackageBin = require('npm-normalize-package-bin')
+
+class BundleWalker extends EE {
+  constructor (opt) {
+    opt = opt || {}
+    super(opt)
+    this.path = path.resolve(opt.path || process.cwd())
+
+    this.parent = opt.parent || null
+    if (this.parent) {
+      this.result = this.parent.result
+      // only collect results in node_modules folders at the top level
+      // since the node_modules in a bundled dep is included always
+      if (!this.parent.parent) {
+        const base = path.basename(this.path)
+        const scope = path.basename(path.dirname(this.path))
+        this.result.add(/^@/.test(scope) ? scope + '/' + base : base)
+      }
+      this.root = this.parent.root
+      this.packageJsonCache = this.parent.packageJsonCache
+    } else {
+      this.result = new Set()
+      this.root = this.path
+      this.packageJsonCache = opt.packageJsonCache || new Map()
+    }
+
+    this.seen = new Set()
+    this.didDone = false
+    this.children = 0
+    this.node_modules = []
+    this.package = null
+    this.bundle = null
+  }
+
+  addListener (ev, fn) {
+    return this.on(ev, fn)
+  }
+
+  on (ev, fn) {
+    const ret = super.on(ev, fn)
+    if (ev === 'done' && this.didDone) {
+      this.emit('done', this.result)
+    }
+    return ret
+  }
+
+  done () {
+    if (!this.didDone) {
+      this.didDone = true
+      if (!this.parent) {
+        const res = Array.from(this.result)
+        this.result = res
+        this.emit('done', res)
+      } else {
+        this.emit('done')
+      }
+    }
+  }
+
+  start () {
+    const pj = path.resolve(this.path, 'package.json')
+    if (this.packageJsonCache.has(pj)) {
+      this.onPackage(this.packageJsonCache.get(pj))
+    } else {
+      this.readPackageJson(pj)
+    }
+    return this
+  }
+
+  readPackageJson (pj) {
+    fs.readFile(pj, (er, data) =>
+      er ? this.done() : this.onPackageJson(pj, data))
+  }
+
+  onPackageJson (pj, data) {
+    try {
+      this.package = normalizePackageBin(JSON.parse(data + ''))
+    } catch (er) {
+      return this.done()
+    }
+    this.packageJsonCache.set(pj, this.package)
+    this.onPackage(this.package)
+  }
+
+  allDepsBundled (pkg) {
+    return Object.keys(pkg.dependencies || {}).concat(
+      Object.keys(pkg.optionalDependencies || {}))
+  }
+
+  onPackage (pkg) {
+    // all deps are bundled if we got here as a child.
+    // otherwise, only bundle bundledDeps
+    // Get a unique-ified array with a short-lived Set
+    const bdRaw = this.parent ? this.allDepsBundled(pkg)
+      : pkg.bundleDependencies || pkg.bundledDependencies || []
+
+    const bd = Array.from(new Set(
+      Array.isArray(bdRaw) ? bdRaw
+      : bdRaw === true ? this.allDepsBundled(pkg)
+      : Object.keys(bdRaw)))
+
+    if (!bd.length) {
+      return this.done()
+    }
+
+    this.bundle = bd
+    this.readModules()
+  }
+
+  readModules () {
+    readdirNodeModules(this.path + '/node_modules', (er, nm) =>
+      er ? this.onReaddir([]) : this.onReaddir(nm))
+  }
+
+  onReaddir (nm) {
+    // keep track of what we have, in case children need it
+    this.node_modules = nm
+
+    this.bundle.forEach(dep => this.childDep(dep))
+    if (this.children === 0) {
+      this.done()
+    }
+  }
+
+  childDep (dep) {
+    if (this.node_modules.indexOf(dep) !== -1) {
+      if (!this.seen.has(dep)) {
+        this.seen.add(dep)
+        this.child(dep)
+      }
+    } else if (this.parent) {
+      this.parent.childDep(dep)
+    }
+  }
+
+  child (dep) {
+    const p = this.path + '/node_modules/' + dep
+    this.children += 1
+    const child = new BundleWalker({
+      path: p,
+      parent: this,
+    })
+    child.on('done', () => {
+      if (--this.children === 0) {
+        this.done()
+      }
+    })
+    child.start()
+  }
+}
+
+class BundleWalkerSync extends BundleWalker {
+  start () {
+    super.start()
+    this.done()
+    return this
+  }
+
+  readPackageJson (pj) {
+    try {
+      this.onPackageJson(pj, fs.readFileSync(pj))
+    } catch {
+      // empty catch
+    }
+    return this
+  }
+
+  readModules () {
+    try {
+      this.onReaddir(readdirNodeModulesSync(this.path + '/node_modules'))
+    } catch {
+      this.onReaddir([])
+    }
+  }
+
+  child (dep) {
+    new BundleWalkerSync({
+      path: this.path + '/node_modules/' + dep,
+      parent: this,
+    }).start()
+  }
+}
+
+const readdirNodeModules = (nm, cb) => {
+  fs.readdir(nm, (er, set) => {
+    if (er) {
+      cb(er)
+    } else {
+      const scopes = set.filter(f => /^@/.test(f))
+      if (!scopes.length) {
+        cb(null, set)
+      } else {
+        const unscoped = set.filter(f => !/^@/.test(f))
+        let count = scopes.length
+        scopes.forEach(scope => {
+          fs.readdir(nm + '/' + scope, (readdirEr, pkgs) => {
+            if (readdirEr || !pkgs.length) {
+              unscoped.push(scope)
+            } else {
+              unscoped.push.apply(unscoped, pkgs.map(p => scope + '/' + p))
+            }
+            if (--count === 0) {
+              cb(null, unscoped)
+            }
+          })
+        })
+      }
+    }
+  })
+}
+
+const readdirNodeModulesSync = nm => {
+  const set = fs.readdirSync(nm)
+  const unscoped = set.filter(f => !/^@/.test(f))
+  const scopes = set.filter(f => /^@/.test(f)).map(scope => {
+    try {
+      const pkgs = fs.readdirSync(nm + '/' + scope)
+      return pkgs.length ? pkgs.map(p => scope + '/' + p) : [scope]
+    } catch (er) {
+      return [scope]
+    }
+  }).reduce((a, b) => a.concat(b), [])
+  return unscoped.concat(scopes)
+}
+
+const walk = (options, callback) => {
+  const p = new Promise((resolve, reject) => {
+    new BundleWalker(options).on('done', resolve).on('error', reject).start()
+  })
+  return callback ? p.then(res => callback(null, res), callback) : p
+}
+
+const walkSync = options => {
+  return new BundleWalkerSync(options).start().result
+}
+
+module.exports = walk
+walk.sync = walkSync
+walk.BundleWalker = BundleWalker
+walk.BundleWalkerSync = BundleWalkerSync
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-bundled/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-bundled/package.json
new file mode 100644
index 0000000000000000000000000000000000000000..c5daf35dbaa841fe75ab5e324d7079e6770014f5
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-bundled/package.json
@@ -0,0 +1,49 @@
+{
+  "name": "npm-bundled",
+  "version": "4.0.0",
+  "description": "list things in node_modules that are bundledDependencies, or transitive dependencies thereof",
+  "main": "lib/index.js",
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/npm/npm-bundled.git"
+  },
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "devDependencies": {
+    "@npmcli/eslint-config": "^5.0.0",
+    "@npmcli/template-oss": "4.23.3",
+    "mutate-fs": "^2.1.1",
+    "tap": "^16.3.0"
+  },
+  "scripts": {
+    "test": "tap",
+    "lint": "npm run eslint",
+    "postlint": "template-oss-check",
+    "template-oss-apply": "template-oss-apply --force",
+    "lintfix": "npm run eslint -- --fix",
+    "snap": "tap",
+    "posttest": "npm run lint",
+    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
+  },
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "dependencies": {
+    "npm-normalize-package-bin": "^4.0.0"
+  },
+  "engines": {
+    "node": "^18.17.0 || >=20.5.0"
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.23.3",
+    "publish": true
+  },
+  "tap": {
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  }
+}
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-install-checks/lib/current-env.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-install-checks/lib/current-env.js
new file mode 100644
index 0000000000000000000000000000000000000000..31f154aac59b323bafde5a3a5823000dbbee17d9
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-install-checks/lib/current-env.js
@@ -0,0 +1,91 @@
+const process = require('node:process')
+const nodeOs = require('node:os')
+const fs = require('node:fs')
+
+function isMusl (file) {
+  return file.includes('libc.musl-') || file.includes('ld-musl-')
+}
+
+function os () {
+  return process.platform
+}
+
+function cpu () {
+  return process.arch
+}
+
+const LDD_PATH = '/usr/bin/ldd'
+function getFamilyFromFilesystem () {
+  try {
+    const content = fs.readFileSync(LDD_PATH, 'utf-8')
+    if (content.includes('musl')) {
+      return 'musl'
+    }
+    if (content.includes('GNU C Library')) {
+      return 'glibc'
+    }
+    return null
+  } catch {
+    return undefined
+  }
+}
+
+function getFamilyFromReport () {
+  const originalExclude = process.report.excludeNetwork
+  process.report.excludeNetwork = true
+  const report = process.report.getReport()
+  process.report.excludeNetwork = originalExclude
+  if (report.header?.glibcVersionRuntime) {
+    family = 'glibc'
+  } else if (Array.isArray(report.sharedObjects) && report.sharedObjects.some(isMusl)) {
+    family = 'musl'
+  } else {
+    family = null
+  }
+  return family
+}
+
+let family
+function libc (osName) {
+  if (osName !== 'linux') {
+    return undefined
+  }
+  if (family === undefined) {
+    family = getFamilyFromFilesystem()
+    if (family === undefined) {
+      family = getFamilyFromReport()
+    }
+  }
+  return family
+}
+
+function devEngines (env = {}) {
+  const osName = env.os || os()
+  return {
+    cpu: {
+      name: env.cpu || cpu(),
+    },
+    libc: {
+      name: env.libc || libc(osName),
+    },
+    os: {
+      name: osName,
+      version: env.osVersion || nodeOs.release(),
+    },
+    packageManager: {
+      name: 'npm',
+      version: env.npmVersion,
+    },
+    runtime: {
+      name: 'node',
+      version: env.nodeVersion || process.version,
+    },
+  }
+}
+
+module.exports = {
+  cpu,
+  libc,
+  os,
+  devEngines,
+}
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-install-checks/lib/dev-engines.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-install-checks/lib/dev-engines.js
new file mode 100644
index 0000000000000000000000000000000000000000..2c483349ae70a9ce4eb5527ea52b191f3e4d4cab
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-install-checks/lib/dev-engines.js
@@ -0,0 +1,145 @@
+const satisfies = require('semver/functions/satisfies')
+const validRange = require('semver/ranges/valid')
+
+const recognizedOnFail = [
+  'ignore',
+  'warn',
+  'error',
+  'download',
+]
+
+const recognizedProperties = [
+  'name',
+  'version',
+  'onFail',
+]
+
+const recognizedEngines = [
+  'packageManager',
+  'runtime',
+  'cpu',
+  'libc',
+  'os',
+]
+
+/** checks a devEngine dependency */
+function checkDependency (wanted, current, opts) {
+  const { engine } = opts
+
+  if ((typeof wanted !== 'object' || wanted === null) || Array.isArray(wanted)) {
+    throw new Error(`Invalid non-object value for "${engine}"`)
+  }
+
+  const properties = Object.keys(wanted)
+
+  for (const prop of properties) {
+    if (!recognizedProperties.includes(prop)) {
+      throw new Error(`Invalid property "${prop}" for "${engine}"`)
+    }
+  }
+
+  if (!properties.includes('name')) {
+    throw new Error(`Missing "name" property for "${engine}"`)
+  }
+
+  if (typeof wanted.name !== 'string') {
+    throw new Error(`Invalid non-string value for "name" within "${engine}"`)
+  }
+
+  if (typeof current.name !== 'string' || current.name === '') {
+    throw new Error(`Unable to determine "name" for "${engine}"`)
+  }
+
+  if (properties.includes('onFail')) {
+    if (typeof wanted.onFail !== 'string') {
+      throw new Error(`Invalid non-string value for "onFail" within "${engine}"`)
+    }
+    if (!recognizedOnFail.includes(wanted.onFail)) {
+      throw new Error(`Invalid onFail value "${wanted.onFail}" for "${engine}"`)
+    }
+  }
+
+  if (wanted.name !== current.name) {
+    return new Error(
+      `Invalid name "${wanted.name}" does not match "${current.name}" for "${engine}"`
+    )
+  }
+
+  if (properties.includes('version')) {
+    if (typeof wanted.version !== 'string') {
+      throw new Error(`Invalid non-string value for "version" within "${engine}"`)
+    }
+    if (typeof current.version !== 'string' || current.version === '') {
+      throw new Error(`Unable to determine "version" for "${engine}" "${wanted.name}"`)
+    }
+    if (validRange(wanted.version)) {
+      if (!satisfies(current.version, wanted.version, opts.semver)) {
+        return new Error(
+          // eslint-disable-next-line max-len
+          `Invalid semver version "${wanted.version}" does not match "${current.version}" for "${engine}"`
+        )
+      }
+    } else if (wanted.version !== current.version) {
+      return new Error(
+        `Invalid version "${wanted.version}" does not match "${current.version}" for "${engine}"`
+      )
+    }
+  }
+}
+
+/** checks devEngines package property and returns array of warnings / errors */
+function checkDevEngines (wanted, current = {}, opts = {}) {
+  if ((typeof wanted !== 'object' || wanted === null) || Array.isArray(wanted)) {
+    throw new Error(`Invalid non-object value for "devEngines"`)
+  }
+
+  const errors = []
+
+  for (const engine of Object.keys(wanted)) {
+    if (!recognizedEngines.includes(engine)) {
+      throw new Error(`Invalid property "devEngines.${engine}"`)
+    }
+    const dependencyAsAuthored = wanted[engine]
+    const dependencies = [dependencyAsAuthored].flat()
+    const currentEngine = current[engine] || {}
+
+    // this accounts for empty array eg { runtime: [] } and ignores it
+    if (dependencies.length === 0) {
+      continue
+    }
+
+    const depErrors = []
+    for (const dep of dependencies) {
+      const result = checkDependency(dep, currentEngine, { ...opts, engine })
+      if (result) {
+        depErrors.push(result)
+      }
+    }
+
+    const invalid = depErrors.length === dependencies.length
+
+    if (invalid) {
+      const lastDependency = dependencies[dependencies.length - 1]
+      let onFail = lastDependency.onFail || 'error'
+      if (onFail === 'download') {
+        onFail = 'error'
+      }
+
+      const err = Object.assign(new Error(`Invalid devEngines.${engine}`), {
+        errors: depErrors,
+        engine,
+        isWarn: onFail === 'warn',
+        isError: onFail === 'error',
+        current: currentEngine,
+        required: dependencyAsAuthored,
+      })
+
+      errors.push(err)
+    }
+  }
+  return errors
+}
+
+module.exports = {
+  checkDevEngines,
+}
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-install-checks/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-install-checks/lib/index.js
new file mode 100644
index 0000000000000000000000000000000000000000..717029208730805e14c57f8f402bb181775300b4
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-install-checks/lib/index.js
@@ -0,0 +1,90 @@
+const semver = require('semver')
+const currentEnv = require('./current-env')
+const { checkDevEngines } = require('./dev-engines')
+
+const checkEngine = (target, npmVer, nodeVer, force = false) => {
+  const nodev = force ? null : nodeVer
+  const eng = target.engines
+  const opt = { includePrerelease: true }
+  if (!eng) {
+    return
+  }
+
+  const nodeFail = nodev && eng.node && !semver.satisfies(nodev, eng.node, opt)
+  const npmFail = npmVer && eng.npm && !semver.satisfies(npmVer, eng.npm, opt)
+  if (nodeFail || npmFail) {
+    throw Object.assign(new Error('Unsupported engine'), {
+      pkgid: target._id,
+      current: { node: nodeVer, npm: npmVer },
+      required: eng,
+      code: 'EBADENGINE',
+    })
+  }
+}
+
+const checkPlatform = (target, force = false, environment = {}) => {
+  if (force) {
+    return
+  }
+
+  const os = environment.os || currentEnv.os()
+  const cpu = environment.cpu || currentEnv.cpu()
+  const libc = environment.libc || currentEnv.libc(os)
+
+  const osOk = target.os ? checkList(os, target.os) : true
+  const cpuOk = target.cpu ? checkList(cpu, target.cpu) : true
+  let libcOk = target.libc ? checkList(libc, target.libc) : true
+  if (target.libc && !libc) {
+    libcOk = false
+  }
+
+  if (!osOk || !cpuOk || !libcOk) {
+    throw Object.assign(new Error('Unsupported platform'), {
+      pkgid: target._id,
+      current: {
+        os,
+        cpu,
+        libc,
+      },
+      required: {
+        os: target.os,
+        cpu: target.cpu,
+        libc: target.libc,
+      },
+      code: 'EBADPLATFORM',
+    })
+  }
+}
+
+const checkList = (value, list) => {
+  if (typeof list === 'string') {
+    list = [list]
+  }
+  if (list.length === 1 && list[0] === 'any') {
+    return true
+  }
+  // match none of the negated values, and at least one of the
+  // non-negated values, if any are present.
+  let negated = 0
+  let match = false
+  for (const entry of list) {
+    const negate = entry.charAt(0) === '!'
+    const test = negate ? entry.slice(1) : entry
+    if (negate) {
+      negated++
+      if (value === test) {
+        return false
+      }
+    } else {
+      match = match || value === test
+    }
+  }
+  return match || negated === list.length
+}
+
+module.exports = {
+  checkEngine,
+  checkPlatform,
+  checkDevEngines,
+  currentEnv,
+}
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-normalize-package-bin/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-normalize-package-bin/lib/index.js
new file mode 100644
index 0000000000000000000000000000000000000000..3cb8478cf6e2f3e4ad802f6423fbbaacd605ae37
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-normalize-package-bin/lib/index.js
@@ -0,0 +1,64 @@
+// pass in a manifest with a 'bin' field here, and it'll turn it
+// into a properly santized bin object
+const { join, basename } = require('path')
+
+const normalize = pkg =>
+  !pkg.bin ? removeBin(pkg)
+  : typeof pkg.bin === 'string' ? normalizeString(pkg)
+  : Array.isArray(pkg.bin) ? normalizeArray(pkg)
+  : typeof pkg.bin === 'object' ? normalizeObject(pkg)
+  : removeBin(pkg)
+
+const normalizeString = pkg => {
+  if (!pkg.name) {
+    return removeBin(pkg)
+  }
+  pkg.bin = { [pkg.name]: pkg.bin }
+  return normalizeObject(pkg)
+}
+
+const normalizeArray = pkg => {
+  pkg.bin = pkg.bin.reduce((acc, k) => {
+    acc[basename(k)] = k
+    return acc
+  }, {})
+  return normalizeObject(pkg)
+}
+
+const removeBin = pkg => {
+  delete pkg.bin
+  return pkg
+}
+
+const normalizeObject = pkg => {
+  const orig = pkg.bin
+  const clean = {}
+  let hasBins = false
+  Object.keys(orig).forEach(binKey => {
+    const base = join('/', basename(binKey.replace(/\\|:/g, '/'))).slice(1)
+
+    if (typeof orig[binKey] !== 'string' || !base) {
+      return
+    }
+
+    const binTarget = join('/', orig[binKey].replace(/\\/g, '/'))
+      .replace(/\\/g, '/').slice(1)
+
+    if (!binTarget) {
+      return
+    }
+
+    clean[base] = binTarget
+    hasBins = true
+  })
+
+  if (hasBins) {
+    pkg.bin = clean
+  } else {
+    delete pkg.bin
+  }
+
+  return pkg
+}
+
+module.exports = normalize
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-package-arg/lib/npa.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-package-arg/lib/npa.js
new file mode 100644
index 0000000000000000000000000000000000000000..50121b99efbe362de2770f97b98d8a79c327cb9e
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-package-arg/lib/npa.js
@@ -0,0 +1,481 @@
+'use strict'
+
+const isWindows = process.platform === 'win32'
+
+const { URL } = require('node:url')
+// We need to use path/win32 so that we get consistent results in tests, but this also means we need to manually convert backslashes to forward slashes when generating file: urls with paths.
+const path = isWindows ? require('node:path/win32') : require('node:path')
+const { homedir } = require('node:os')
+const HostedGit = require('hosted-git-info')
+const semver = require('semver')
+const validatePackageName = require('validate-npm-package-name')
+const { log } = require('proc-log')
+
+const hasSlashes = isWindows ? /\\|[/]/ : /[/]/
+const isURL = /^(?:git[+])?[a-z]+:/i
+const isGit = /^[^@]+@[^:.]+\.[^:]+:.+$/i
+const isFileType = /[.](?:tgz|tar\.gz|tar)$/i
+const isPortNumber = /:[0-9]+(\/|$)/i
+const isWindowsFile = /^(?:[.]|~[/]|[/\\]|[a-zA-Z]:)/
+const isPosixFile = /^(?:[.]|~[/]|[/]|[a-zA-Z]:)/
+const defaultRegistry = 'https://registry.npmjs.org'
+
+function npa (arg, where) {
+  let name
+  let spec
+  if (typeof arg === 'object') {
+    if (arg instanceof Result && (!where || where === arg.where)) {
+      return arg
+    } else if (arg.name && arg.rawSpec) {
+      return npa.resolve(arg.name, arg.rawSpec, where || arg.where)
+    } else {
+      return npa(arg.raw, where || arg.where)
+    }
+  }
+  const nameEndsAt = arg.indexOf('@', 1) // Skip possible leading @
+  const namePart = nameEndsAt > 0 ? arg.slice(0, nameEndsAt) : arg
+  if (isURL.test(arg)) {
+    spec = arg
+  } else if (isGit.test(arg)) {
+    spec = `git+ssh://${arg}`
+  // eslint-disable-next-line max-len
+  } else if (!namePart.startsWith('@') && (hasSlashes.test(namePart) || isFileType.test(namePart))) {
+    spec = arg
+  } else if (nameEndsAt > 0) {
+    name = namePart
+    spec = arg.slice(nameEndsAt + 1) || '*'
+  } else {
+    const valid = validatePackageName(arg)
+    if (valid.validForOldPackages) {
+      name = arg
+      spec = '*'
+    } else {
+      spec = arg
+    }
+  }
+  return resolve(name, spec, where, arg)
+}
+
+function isFileSpec (spec) {
+  if (!spec) {
+    return false
+  }
+  if (spec.toLowerCase().startsWith('file:')) {
+    return true
+  }
+  if (isWindows) {
+    return isWindowsFile.test(spec)
+  }
+  // We never hit this in windows tests, obviously
+  /* istanbul ignore next */
+  return isPosixFile.test(spec)
+}
+
+function isAliasSpec (spec) {
+  if (!spec) {
+    return false
+  }
+  return spec.toLowerCase().startsWith('npm:')
+}
+
+function resolve (name, spec, where, arg) {
+  const res = new Result({
+    raw: arg,
+    name: name,
+    rawSpec: spec,
+    fromArgument: arg != null,
+  })
+
+  if (name) {
+    res.name = name
+  }
+
+  if (!where) {
+    where = process.cwd()
+  }
+
+  if (isFileSpec(spec)) {
+    return fromFile(res, where)
+  } else if (isAliasSpec(spec)) {
+    return fromAlias(res, where)
+  }
+
+  const hosted = HostedGit.fromUrl(spec, {
+    noGitPlus: true,
+    noCommittish: true,
+  })
+  if (hosted) {
+    return fromHostedGit(res, hosted)
+  } else if (spec && isURL.test(spec)) {
+    return fromURL(res)
+  } else if (spec && (hasSlashes.test(spec) || isFileType.test(spec))) {
+    return fromFile(res, where)
+  } else {
+    return fromRegistry(res)
+  }
+}
+
+function toPurl (arg, reg = defaultRegistry) {
+  const res = npa(arg)
+
+  if (res.type !== 'version') {
+    throw invalidPurlType(res.type, res.raw)
+  }
+
+  // URI-encode leading @ of scoped packages
+  let purl = 'pkg:npm/' + res.name.replace(/^@/, '%40') + '@' + res.rawSpec
+  if (reg !== defaultRegistry) {
+    purl += '?repository_url=' + reg
+  }
+
+  return purl
+}
+
+function invalidPackageName (name, valid, raw) {
+  // eslint-disable-next-line max-len
+  const err = new Error(`Invalid package name "${name}" of package "${raw}": ${valid.errors.join('; ')}.`)
+  err.code = 'EINVALIDPACKAGENAME'
+  return err
+}
+
+function invalidTagName (name, raw) {
+  // eslint-disable-next-line max-len
+  const err = new Error(`Invalid tag name "${name}" of package "${raw}": Tags may not have any characters that encodeURIComponent encodes.`)
+  err.code = 'EINVALIDTAGNAME'
+  return err
+}
+
+function invalidPurlType (type, raw) {
+  // eslint-disable-next-line max-len
+  const err = new Error(`Invalid type "${type}" of package "${raw}": Purl can only be generated for "version" types.`)
+  err.code = 'EINVALIDPURLTYPE'
+  return err
+}
+
+class Result {
+  constructor (opts) {
+    this.type = opts.type
+    this.registry = opts.registry
+    this.where = opts.where
+    if (opts.raw == null) {
+      this.raw = opts.name ? `${opts.name}@${opts.rawSpec}` : opts.rawSpec
+    } else {
+      this.raw = opts.raw
+    }
+    this.name = undefined
+    this.escapedName = undefined
+    this.scope = undefined
+    this.rawSpec = opts.rawSpec || ''
+    this.saveSpec = opts.saveSpec
+    this.fetchSpec = opts.fetchSpec
+    if (opts.name) {
+      this.setName(opts.name)
+    }
+    this.gitRange = opts.gitRange
+    this.gitCommittish = opts.gitCommittish
+    this.gitSubdir = opts.gitSubdir
+    this.hosted = opts.hosted
+  }
+
+  // TODO move this to a getter/setter in a semver major
+  setName (name) {
+    const valid = validatePackageName(name)
+    if (!valid.validForOldPackages) {
+      throw invalidPackageName(name, valid, this.raw)
+    }
+
+    this.name = name
+    this.scope = name[0] === '@' ? name.slice(0, name.indexOf('/')) : undefined
+    // scoped packages in couch must have slash url-encoded, e.g. @foo%2Fbar
+    this.escapedName = name.replace('/', '%2f')
+    return this
+  }
+
+  toString () {
+    const full = []
+    if (this.name != null && this.name !== '') {
+      full.push(this.name)
+    }
+    const spec = this.saveSpec || this.fetchSpec || this.rawSpec
+    if (spec != null && spec !== '') {
+      full.push(spec)
+    }
+    return full.length ? full.join('@') : this.raw
+  }
+
+  toJSON () {
+    const result = Object.assign({}, this)
+    delete result.hosted
+    return result
+  }
+}
+
+// sets res.gitCommittish, res.gitRange, and res.gitSubdir
+function setGitAttrs (res, committish) {
+  if (!committish) {
+    res.gitCommittish = null
+    return
+  }
+
+  // for each :: separated item:
+  for (const part of committish.split('::')) {
+    // if the item has no : the n it is a commit-ish
+    if (!part.includes(':')) {
+      if (res.gitRange) {
+        throw new Error('cannot override existing semver range with a committish')
+      }
+      if (res.gitCommittish) {
+        throw new Error('cannot override existing committish with a second committish')
+      }
+      res.gitCommittish = part
+      continue
+    }
+    // split on name:value
+    const [name, value] = part.split(':')
+    // if name is semver do semver lookup of ref or tag
+    if (name === 'semver') {
+      if (res.gitCommittish) {
+        throw new Error('cannot override existing committish with a semver range')
+      }
+      if (res.gitRange) {
+        throw new Error('cannot override existing semver range with a second semver range')
+      }
+      res.gitRange = decodeURIComponent(value)
+      continue
+    }
+    if (name === 'path') {
+      if (res.gitSubdir) {
+        throw new Error('cannot override existing path with a second path')
+      }
+      res.gitSubdir = `/${value}`
+      continue
+    }
+    log.warn('npm-package-arg', `ignoring unknown key "${name}"`)
+  }
+}
+
+// Taken from: EncodePathChars and lookup_table in src/node_url.cc
+// url.pathToFileURL only returns absolute references.  We can't use it to encode paths.
+// encodeURI mangles windows paths. We can't use it to encode paths.
+// Under the hood, url.pathToFileURL does a limited set of encoding, with an extra windows step, and then calls path.resolve.
+// The encoding node does without path.resolve is not available outside of the source, so we are recreating it here.
+const encodedPathChars = new Map([
+  ['\0', '%00'],
+  ['\t', '%09'],
+  ['\n', '%0A'],
+  ['\r', '%0D'],
+  [' ', '%20'],
+  ['"', '%22'],
+  ['#', '%23'],
+  ['%', '%25'],
+  ['?', '%3F'],
+  ['[', '%5B'],
+  ['\\', isWindows ? '/' : '%5C'],
+  [']', '%5D'],
+  ['^', '%5E'],
+  ['|', '%7C'],
+  ['~', '%7E'],
+])
+
+function pathToFileURL (str) {
+  let result = ''
+  for (let i = 0; i < str.length; i++) {
+    result = `${result}${encodedPathChars.get(str[i]) ?? str[i]}`
+  }
+  if (result.startsWith('file:')) {
+    return result
+  }
+  return `file:${result}`
+}
+
+function fromFile (res, where) {
+  res.type = isFileType.test(res.rawSpec) ? 'file' : 'directory'
+  res.where = where
+
+  let rawSpec = pathToFileURL(res.rawSpec)
+
+  if (rawSpec.startsWith('file:/')) {
+    // XXX backwards compatibility lack of compliance with RFC 8089
+
+    // turn file://path into file:/path
+    if (/^file:\/\/[^/]/.test(rawSpec)) {
+      rawSpec = `file:/${rawSpec.slice(5)}`
+    }
+
+    // turn file:/../path into file:../path
+    // for 1 or 3 leading slashes (2 is already ruled out from handling file:// explicitly above)
+    if (/^\/{1,3}\.\.?(\/|$)/.test(rawSpec.slice(5))) {
+      rawSpec = rawSpec.replace(/^file:\/{1,3}/, 'file:')
+    }
+  }
+
+  let resolvedUrl
+  let specUrl
+  try {
+    // always put the '/' on "where", or else file:foo from /path/to/bar goes to /path/to/foo, when we want it to be /path/to/bar/foo
+    resolvedUrl = new URL(rawSpec, `${pathToFileURL(path.resolve(where))}/`)
+    specUrl = new URL(rawSpec)
+  } catch (originalError) {
+    const er = new Error('Invalid file: URL, must comply with RFC 8089')
+    throw Object.assign(er, {
+      raw: res.rawSpec,
+      spec: res,
+      where,
+      originalError,
+    })
+  }
+
+  // turn /C:/blah into just C:/blah on windows
+  let specPath = decodeURIComponent(specUrl.pathname)
+  let resolvedPath = decodeURIComponent(resolvedUrl.pathname)
+  if (isWindows) {
+    specPath = specPath.replace(/^\/+([a-z]:\/)/i, '$1')
+    resolvedPath = resolvedPath.replace(/^\/+([a-z]:\/)/i, '$1')
+  }
+
+  // replace ~ with homedir, but keep the ~ in the saveSpec
+  // otherwise, make it relative to where param
+  if (/^\/~(\/|$)/.test(specPath)) {
+    res.saveSpec = `file:${specPath.substr(1)}`
+    resolvedPath = path.resolve(homedir(), specPath.substr(3))
+  } else if (!path.isAbsolute(rawSpec.slice(5))) {
+    res.saveSpec = `file:${path.relative(where, resolvedPath)}`
+  } else {
+    res.saveSpec = `file:${path.resolve(resolvedPath)}`
+  }
+
+  res.fetchSpec = path.resolve(where, resolvedPath)
+  // re-normalize the slashes in saveSpec due to node:path/win32 behavior in windows
+  res.saveSpec = res.saveSpec.split('\\').join('/')
+  // Ignoring because this only happens in windows
+  /* istanbul ignore next */
+  if (res.saveSpec.startsWith('file://')) {
+    // normalization of \\win32\root paths can cause a double / which we don't want
+    res.saveSpec = `file:/${res.saveSpec.slice(7)}`
+  }
+  return res
+}
+
+function fromHostedGit (res, hosted) {
+  res.type = 'git'
+  res.hosted = hosted
+  res.saveSpec = hosted.toString({ noGitPlus: false, noCommittish: false })
+  res.fetchSpec = hosted.getDefaultRepresentation() === 'shortcut' ? null : hosted.toString()
+  setGitAttrs(res, hosted.committish)
+  return res
+}
+
+function unsupportedURLType (protocol, spec) {
+  const err = new Error(`Unsupported URL Type "${protocol}": ${spec}`)
+  err.code = 'EUNSUPPORTEDPROTOCOL'
+  return err
+}
+
+function fromURL (res) {
+  let rawSpec = res.rawSpec
+  res.saveSpec = rawSpec
+  if (rawSpec.startsWith('git+ssh:')) {
+    // git ssh specifiers are overloaded to also use scp-style git
+    // specifiers, so we have to parse those out and treat them special.
+    // They are NOT true URIs, so we can't hand them to URL.
+
+    // This regex looks for things that look like:
+    // git+ssh://git@my.custom.git.com:username/project.git#deadbeef
+    // ...and various combinations. The username in the beginning is *required*.
+    const matched = rawSpec.match(/^git\+ssh:\/\/([^:#]+:[^#]+(?:\.git)?)(?:#(.*))?$/i)
+    // Filter out all-number "usernames" which are really port numbers
+    // They can either be :1234 :1234/ or :1234/path but not :12abc
+    if (matched && !matched[1].match(isPortNumber)) {
+      res.type = 'git'
+      setGitAttrs(res, matched[2])
+      res.fetchSpec = matched[1]
+      return res
+    }
+  } else if (rawSpec.startsWith('git+file://')) {
+    // URL can't handle windows paths
+    rawSpec = rawSpec.replace(/\\/g, '/')
+  }
+  const parsedUrl = new URL(rawSpec)
+  // check the protocol, and then see if it's git or not
+  switch (parsedUrl.protocol) {
+    case 'git:':
+    case 'git+http:':
+    case 'git+https:':
+    case 'git+rsync:':
+    case 'git+ftp:':
+    case 'git+file:':
+    case 'git+ssh:':
+      res.type = 'git'
+      setGitAttrs(res, parsedUrl.hash.slice(1))
+      if (parsedUrl.protocol === 'git+file:' && /^git\+file:\/\/[a-z]:/i.test(rawSpec)) {
+        // URL can't handle drive letters on windows file paths, the host can't contain a :
+        res.fetchSpec = `git+file://${parsedUrl.host.toLowerCase()}:${parsedUrl.pathname}`
+      } else {
+        parsedUrl.hash = ''
+        res.fetchSpec = parsedUrl.toString()
+      }
+      if (res.fetchSpec.startsWith('git+')) {
+        res.fetchSpec = res.fetchSpec.slice(4)
+      }
+      break
+    case 'http:':
+    case 'https:':
+      res.type = 'remote'
+      res.fetchSpec = res.saveSpec
+      break
+
+    default:
+      throw unsupportedURLType(parsedUrl.protocol, rawSpec)
+  }
+
+  return res
+}
+
+function fromAlias (res, where) {
+  const subSpec = npa(res.rawSpec.substr(4), where)
+  if (subSpec.type === 'alias') {
+    throw new Error('nested aliases not supported')
+  }
+
+  if (!subSpec.registry) {
+    throw new Error('aliases only work for registry deps')
+  }
+
+  if (!subSpec.name) {
+    throw new Error('aliases must have a name')
+  }
+
+  res.subSpec = subSpec
+  res.registry = true
+  res.type = 'alias'
+  res.saveSpec = null
+  res.fetchSpec = null
+  return res
+}
+
+function fromRegistry (res) {
+  res.registry = true
+  const spec = res.rawSpec.trim()
+  // no save spec for registry components as we save based on the fetched
+  // version, not on the argument so this can't compute that.
+  res.saveSpec = null
+  res.fetchSpec = spec
+  const version = semver.valid(spec, true)
+  const range = semver.validRange(spec, true)
+  if (version) {
+    res.type = 'version'
+  } else if (range) {
+    res.type = 'range'
+  } else {
+    if (encodeURIComponent(spec) !== spec) {
+      throw invalidTagName(spec, res.raw)
+    }
+    res.type = 'tag'
+  }
+  return res
+}
+
+module.exports = npa
+module.exports.resolve = resolve
+module.exports.toPurl = toPurl
+module.exports.Result = Result
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-packlist/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-packlist/lib/index.js
new file mode 100644
index 0000000000000000000000000000000000000000..ada704de4575dad5c272e4a04c914ecfc0622978
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-packlist/lib/index.js
@@ -0,0 +1,464 @@
+'use strict'
+
+const { Walker: IgnoreWalker } = require('ignore-walk')
+const { lstatSync: lstat, readFileSync: readFile } = require('fs')
+const { basename, dirname, extname, join, relative, resolve, sep } = require('path')
+const { log } = require('proc-log')
+
+// symbols used to represent synthetic rule sets
+const defaultRules = Symbol('npm-packlist.rules.default')
+const strictRules = Symbol('npm-packlist.rules.strict')
+
+// There may be others, but :?|<> are handled by node-tar
+const nameIsBadForWindows = file => /\*/.test(file)
+
+// these are the default rules that are applied to everything except for non-link bundled deps
+const defaults = [
+  '.npmignore',
+  '.gitignore',
+  '**/.git',
+  '**/.svn',
+  '**/.hg',
+  '**/CVS',
+  '**/.git/**',
+  '**/.svn/**',
+  '**/.hg/**',
+  '**/CVS/**',
+  '/.lock-wscript',
+  '/.wafpickle-*',
+  '/build/config.gypi',
+  'npm-debug.log',
+  '**/.npmrc',
+  '.*.swp',
+  '.DS_Store',
+  '**/.DS_Store/**',
+  '._*',
+  '**/._*/**',
+  '*.orig',
+  '/archived-packages/**',
+]
+
+const strictDefaults = [
+  // these are forcibly excluded
+  '/.git',
+]
+
+const normalizePath = (path) => path.split('\\').join('/')
+
+const readOutOfTreeIgnoreFiles = (root, rel, result = []) => {
+  for (const file of ['.npmignore', '.gitignore']) {
+    try {
+      const ignoreContent = readFile(join(root, file), { encoding: 'utf8' })
+      result.push(ignoreContent)
+      // break the loop immediately after reading, this allows us to prioritize
+      // the .npmignore and discard the .gitignore if one is present
+      break
+    } catch (err) {
+      // we ignore ENOENT errors completely because we don't care if the file doesn't exist
+      // but we throw everything else because failing to read a file that does exist is
+      // something that the user likely wants to know about
+      // istanbul ignore next -- we do not need to test a thrown error
+      if (err.code !== 'ENOENT') {
+        throw err
+      }
+    }
+  }
+
+  if (!rel) {
+    return result
+  }
+
+  const firstRel = rel.split(sep, 1)[0]
+  const newRoot = join(root, firstRel)
+  const newRel = relative(newRoot, join(root, rel))
+
+  return readOutOfTreeIgnoreFiles(newRoot, newRel, result)
+}
+
+class PackWalker extends IgnoreWalker {
+  constructor (tree, opts) {
+    const options = {
+      ...opts,
+      includeEmpty: false,
+      follow: false,
+      // we path.resolve() here because ignore-walk doesn't do it and we want full paths
+      path: resolve(opts?.path || tree.path).replace(/\\/g, '/'),
+      ignoreFiles: opts?.ignoreFiles || [
+        defaultRules,
+        'package.json',
+        '.npmignore',
+        '.gitignore',
+        strictRules,
+      ],
+    }
+
+    super(options)
+
+    this.isPackage = options.isPackage
+    this.seen = options.seen || new Set()
+    this.tree = tree
+    this.requiredFiles = options.requiredFiles || []
+
+    const additionalDefaults = []
+    if (options.prefix && options.workspaces) {
+      const path = normalizePath(options.path)
+      const prefix = normalizePath(options.prefix)
+      const workspaces = options.workspaces.map((ws) => normalizePath(ws))
+
+      // istanbul ignore else - this does nothing unless we need it to
+      if (path !== prefix && workspaces.includes(path)) {
+        // if path and prefix are not the same directory, and workspaces has path in it
+        // then we know path is a workspace directory. in order to not drop ignore rules
+        // from directories between the workspaces root (prefix) and the workspace itself
+        // (path) we need to find and read those now
+        const relpath = relative(options.prefix, dirname(options.path))
+        additionalDefaults.push(...readOutOfTreeIgnoreFiles(options.prefix, relpath))
+      } else if (path === prefix) {
+        // on the other hand, if the path and prefix are the same, then we ignore workspaces
+        // so that we don't pack a workspace as part of the root project. append them as
+        // normalized relative paths from the root
+        additionalDefaults.push(...workspaces.map((w) => normalizePath(relative(options.path, w))))
+      }
+    }
+
+    // go ahead and inject the default rules now
+    this.injectRules(defaultRules, [...defaults, ...additionalDefaults])
+
+    if (!this.isPackage) {
+      // if this instance is not a package, then place some strict default rules, and append
+      // known required files for this directory
+      this.injectRules(strictRules, [
+        ...strictDefaults,
+        ...this.requiredFiles.map((file) => `!${file}`),
+      ])
+    }
+  }
+
+  // overridden method: we intercept the reading of the package.json file here so that we can
+  // process it into both the package.json file rules as well as the strictRules synthetic rule set
+  addIgnoreFile (file, callback) {
+    // if we're adding anything other than package.json, then let ignore-walk handle it
+    if (file !== 'package.json' || !this.isPackage) {
+      return super.addIgnoreFile(file, callback)
+    }
+
+    return this.processPackage(callback)
+  }
+
+  // overridden method: if we're done, but we're a package, then we also need to evaluate bundles
+  // before we actually emit our done event
+  emit (ev, data) {
+    if (ev !== 'done' || !this.isPackage) {
+      return super.emit(ev, data)
+    }
+
+    // we intentionally delay the done event while keeping the function sync here
+    // eslint-disable-next-line promise/catch-or-return, promise/always-return
+    this.gatherBundles().then(() => {
+      super.emit('done', this.result)
+    })
+    return true
+  }
+
+  // overridden method: before actually filtering, we make sure that we've removed the rules for
+  // files that should no longer take effect due to our order of precedence
+  filterEntries () {
+    if (this.ignoreRules['package.json']) {
+      // package.json means no .npmignore or .gitignore
+      this.ignoreRules['.npmignore'] = null
+      this.ignoreRules['.gitignore'] = null
+    } else if (this.ignoreRules['.npmignore']) {
+      // .npmignore means no .gitignore
+      this.ignoreRules['.gitignore'] = null
+    } else if (this.ignoreRules['.gitignore'] && !this.ignoreRules['.npmignore']) {
+      log.warn(
+        'gitignore-fallback',
+        'No .npmignore file found, using .gitignore for file exclusion. Consider creating a .npmignore file to explicitly control published files.'
+      )
+    }
+
+    return super.filterEntries()
+  }
+
+  // overridden method: we never want to include anything that isn't a file or directory
+  onstat (opts, callback) {
+    if (!opts.st.isFile() && !opts.st.isDirectory()) {
+      return callback()
+    }
+
+    return super.onstat(opts, callback)
+  }
+
+  // overridden method: we want to refuse to pack files that are invalid, node-tar protects us from
+  // a lot of them but not all
+  stat (opts, callback) {
+    if (nameIsBadForWindows(opts.entry)) {
+      return callback()
+    }
+
+    return super.stat(opts, callback)
+  }
+
+  // overridden method: this is called to create options for a child walker when we step
+  // in to a normal child directory (this will never be a bundle). the default method here
+  // copies the root's `ignoreFiles` value, but we don't want to respect package.json for
+  // subdirectories, so we override it with a list that intentionally omits package.json
+  walkerOpt (entry, opts) {
+    let ignoreFiles = null
+
+    // however, if we have a tree, and we have workspaces, and the directory we're about
+    // to step into is a workspace, then we _do_ want to respect its package.json
+    if (this.tree.workspaces) {
+      const workspaceDirs = [...this.tree.workspaces.values()]
+        .map((dir) => dir.replace(/\\/g, '/'))
+
+      const entryPath = join(this.path, entry).replace(/\\/g, '/')
+      if (workspaceDirs.includes(entryPath)) {
+        ignoreFiles = [
+          defaultRules,
+          'package.json',
+          '.npmignore',
+          '.gitignore',
+          strictRules,
+        ]
+      }
+    } else {
+      ignoreFiles = [
+        defaultRules,
+        '.npmignore',
+        '.gitignore',
+        strictRules,
+      ]
+    }
+
+    return {
+      ...super.walkerOpt(entry, opts),
+      ignoreFiles,
+      // we map over our own requiredFiles and pass ones that are within this entry
+      requiredFiles: this.requiredFiles
+        .map((file) => {
+          if (relative(file, entry) === '..') {
+            return relative(entry, file).replace(/\\/g, '/')
+          }
+          return false
+        })
+        .filter(Boolean),
+    }
+  }
+
+  // overridden method: we want child walkers to be instances of this class, not ignore-walk
+  walker (entry, opts, callback) {
+    new PackWalker(this.tree, this.walkerOpt(entry, opts)).on('done', callback).start()
+  }
+
+  // overridden method: we use a custom sort method to help compressibility
+  sort (a, b) {
+    // optimize for compressibility
+    // extname, then basename, then locale alphabetically
+    // https://twitter.com/isntitvacant/status/1131094910923231232
+    const exta = extname(a).toLowerCase()
+    const extb = extname(b).toLowerCase()
+    const basea = basename(a).toLowerCase()
+    const baseb = basename(b).toLowerCase()
+
+    return exta.localeCompare(extb, 'en') ||
+      basea.localeCompare(baseb, 'en') ||
+      a.localeCompare(b, 'en')
+  }
+
+  // convenience method: this joins the given rules with newlines, appends a trailing newline,
+  // and calls the internal onReadIgnoreFile method
+  injectRules (filename, rules, callback = () => {}) {
+    this.onReadIgnoreFile(filename, `${rules.join('\n')}\n`, callback)
+  }
+
+  // custom method: this is called by addIgnoreFile when we find a package.json, it uses the
+  // arborist tree to pull both default rules and strict rules for the package
+  processPackage (callback) {
+    const {
+      bin,
+      browser,
+      files,
+      main,
+    } = this.tree.package
+
+    // rules in these arrays are inverted since they are patterns we want to _not_ ignore
+    const ignores = []
+    const strict = [
+      ...strictDefaults,
+      '!/package.json',
+      '!/readme{,.*[^~$]}',
+      '!/copying{,.*[^~$]}',
+      '!/license{,.*[^~$]}',
+      '!/licence{,.*[^~$]}',
+      '/.git',
+      '/node_modules',
+      '.npmrc',
+      '/package-lock.json',
+      '/yarn.lock',
+      '/pnpm-lock.yaml',
+      '/bun.lockb',
+    ]
+
+    // if we have a files array in our package, we need to pull rules from it
+    if (files) {
+      for (let file of files) {
+        // invert the rule because these are things we want to include
+        if (file.startsWith('./')) {
+          file = file.slice(1)
+        }
+        if (file.endsWith('/*')) {
+          file += '*'
+        }
+        const inverse = `!${file}`
+        try {
+          // if an entry in the files array is a specific file, then we need to include it as a
+          // strict requirement for this package. if it's a directory or a pattern, it's a default
+          // pattern instead. this is ugly, but we have to stat to find out if it's a file
+          const stat = lstat(join(this.path, file.replace(/^!+/, '')).replace(/\\/g, '/'))
+          // if we have a file and we know that, it's strictly required
+          if (stat.isFile()) {
+            strict.unshift(inverse)
+            this.requiredFiles.push(file.startsWith('/') ? file.slice(1) : file)
+          } else if (stat.isDirectory()) {
+            // otherwise, it's a default ignore, and since we got here we know it's not a pattern
+            // so we include the directory contents
+            ignores.push(inverse)
+            ignores.push(`${inverse}/**`)
+          }
+          // if the thing exists, but is neither a file or a directory, we don't want it at all
+        } catch (err) {
+          // if lstat throws, then we assume we're looking at a pattern and treat it as a default
+          ignores.push(inverse)
+        }
+      }
+
+      // we prepend a '*' to exclude everything, followed by our inverted file rules
+      // which now mean to include those
+      this.injectRules('package.json', ['*', ...ignores])
+    }
+
+    // browser is required
+    if (browser) {
+      strict.push(`!/${browser}`)
+    }
+
+    // main is required
+    if (main) {
+      strict.push(`!/${main}`)
+    }
+
+    // each bin is required
+    if (bin) {
+      for (const key in bin) {
+        strict.push(`!/${bin[key]}`)
+      }
+    }
+
+    // and now we add all of the strict rules to our synthetic file
+    this.injectRules(strictRules, strict, callback)
+  }
+
+  // custom method: after we've finished gathering the files for the root package, we call this
+  // before emitting the 'done' event in order to gather all of the files for bundled deps
+  async gatherBundles () {
+    if (this.seen.has(this.tree)) {
+      return
+    }
+
+    // add this node to our seen tracker
+    this.seen.add(this.tree)
+
+    // if we're the project root, then we look at our bundleDependencies, otherwise we got here
+    // because we're a bundled dependency of the root, which means we need to include all prod
+    // and optional dependencies in the bundle
+    let toBundle
+    if (this.tree.isProjectRoot) {
+      const { bundleDependencies } = this.tree.package
+      toBundle = bundleDependencies || []
+    } else {
+      const { dependencies, optionalDependencies } = this.tree.package
+      toBundle = Object.keys(dependencies || {}).concat(Object.keys(optionalDependencies || {}))
+    }
+
+    for (const dep of toBundle) {
+      const edge = this.tree.edgesOut.get(dep)
+      // no edgeOut = missing node, so skip it. we can't pack it if it's not here
+      // we also refuse to pack peer dependencies and dev dependencies
+      if (!edge || edge.peer || edge.dev) {
+        continue
+      }
+
+      // get a reference to the node we're bundling
+      const node = this.tree.edgesOut.get(dep).to
+      // if there's no node, this is most likely an optional dependency that hasn't been
+      // installed. just skip it.
+      if (!node) {
+        continue
+      }
+      // we use node.path for the path because we want the location the node was linked to,
+      // not where it actually lives on disk
+      const path = node.path
+      // but link nodes don't have edgesOut, so we need to pass in the target of the node
+      // in order to make sure we correctly traverse its dependencies
+      const tree = node.target
+
+      // and start building options to be passed to the walker for this package
+      const walkerOpts = {
+        path,
+        isPackage: true,
+        ignoreFiles: [],
+        seen: this.seen, // pass through seen so we can prevent infinite circular loops
+      }
+
+      // if our node is a link, we apply defaultRules. we don't do this for regular bundled
+      // deps because their .npmignore and .gitignore files are excluded by default and may
+      // override defaults
+      if (node.isLink) {
+        walkerOpts.ignoreFiles.push(defaultRules)
+      }
+
+      // _all_ nodes will follow package.json rules from their package root
+      walkerOpts.ignoreFiles.push('package.json')
+
+      // only link nodes will obey .npmignore or .gitignore
+      if (node.isLink) {
+        walkerOpts.ignoreFiles.push('.npmignore')
+        walkerOpts.ignoreFiles.push('.gitignore')
+      }
+
+      // _all_ nodes follow strict rules
+      walkerOpts.ignoreFiles.push(strictRules)
+
+      // create a walker for this dependency and gather its results
+      const walker = new PackWalker(tree, walkerOpts)
+      const bundled = await new Promise((pResolve, pReject) => {
+        walker.on('error', pReject)
+        walker.on('done', pResolve)
+        walker.start()
+      })
+
+      // now we make sure we have our paths correct from the root, and accumulate everything into
+      // our own result set to deduplicate
+      const relativeFrom = relative(this.root, walker.path)
+      for (const file of bundled) {
+        this.result.add(join(relativeFrom, file).replace(/\\/g, '/'))
+      }
+    }
+  }
+}
+
+const walk = (tree, options, callback) => {
+  if (typeof options === 'function') {
+    callback = options
+    options = {}
+  }
+  const p = new Promise((pResolve, pReject) => {
+    new PackWalker(tree, { ...options, isPackage: true })
+      .on('done', pResolve).on('error', pReject).start()
+  })
+  return callback ? p.then(res => callback(null, res), callback) : p
+}
+
+module.exports = walk
+walk.Walker = PackWalker
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-pick-manifest/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-pick-manifest/lib/index.js
new file mode 100644
index 0000000000000000000000000000000000000000..985c78df7a9bf202cdd65cd067ef0789397d04dc
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-pick-manifest/lib/index.js
@@ -0,0 +1,219 @@
+'use strict'
+
+const npa = require('npm-package-arg')
+const semver = require('semver')
+const { checkEngine } = require('npm-install-checks')
+const normalizeBin = require('npm-normalize-package-bin')
+
+const engineOk = (manifest, npmVersion, nodeVersion) => {
+  try {
+    checkEngine(manifest, npmVersion, nodeVersion)
+    return true
+  } catch (_) {
+    return false
+  }
+}
+
+const isBefore = (verTimes, ver, time) =>
+  !verTimes || !verTimes[ver] || Date.parse(verTimes[ver]) <= time
+
+const avoidSemverOpt = { includePrerelease: true, loose: true }
+const shouldAvoid = (ver, avoid) =>
+  avoid && semver.satisfies(ver, avoid, avoidSemverOpt)
+
+const decorateAvoid = (result, avoid) =>
+  result && shouldAvoid(result.version, avoid)
+    ? { ...result, _shouldAvoid: true }
+    : result
+
+const pickManifest = (packument, wanted, opts) => {
+  const {
+    defaultTag = 'latest',
+    before = null,
+    nodeVersion = process.version,
+    npmVersion = null,
+    includeStaged = false,
+    avoid = null,
+    avoidStrict = false,
+  } = opts
+
+  const { name, time: verTimes } = packument
+  const versions = packument.versions || {}
+
+  if (avoidStrict) {
+    const looseOpts = {
+      ...opts,
+      avoidStrict: false,
+    }
+
+    const result = pickManifest(packument, wanted, looseOpts)
+    if (!result || !result._shouldAvoid) {
+      return result
+    }
+
+    const caret = pickManifest(packument, `^${result.version}`, looseOpts)
+    if (!caret || !caret._shouldAvoid) {
+      return {
+        ...caret,
+        _outsideDependencyRange: true,
+        _isSemVerMajor: false,
+      }
+    }
+
+    const star = pickManifest(packument, '*', looseOpts)
+    if (!star || !star._shouldAvoid) {
+      return {
+        ...star,
+        _outsideDependencyRange: true,
+        _isSemVerMajor: true,
+      }
+    }
+
+    throw Object.assign(new Error(`No avoidable versions for ${name}`), {
+      code: 'ETARGET',
+      name,
+      wanted,
+      avoid,
+      before,
+      versions: Object.keys(versions),
+    })
+  }
+
+  const staged = (includeStaged && packument.stagedVersions &&
+    packument.stagedVersions.versions) || {}
+  const restricted = (packument.policyRestrictions &&
+    packument.policyRestrictions.versions) || {}
+
+  const time = before && verTimes ? +(new Date(before)) : Infinity
+  const spec = npa.resolve(name, wanted || defaultTag)
+  const type = spec.type
+  const distTags = packument['dist-tags'] || {}
+
+  if (type !== 'tag' && type !== 'version' && type !== 'range') {
+    throw new Error('Only tag, version, and range are supported')
+  }
+
+  // if the type is 'tag', and not just the implicit default, then it must be that exactly, or nothing else will do.
+  if (wanted && type === 'tag') {
+    const ver = distTags[wanted]
+    // if the version in the dist-tags is before the before date, then we use that. Otherwise, we get the highest precedence version prior to the dist-tag.
+    if (isBefore(verTimes, ver, time)) {
+      return decorateAvoid(versions[ver] || staged[ver] || restricted[ver], avoid)
+    } else {
+      return pickManifest(packument, `<=${ver}`, opts)
+    }
+  }
+
+  // similarly, if a specific version, then only that version will do
+  if (wanted && type === 'version') {
+    const ver = semver.clean(wanted, { loose: true })
+    const mani = versions[ver] || staged[ver] || restricted[ver]
+    return isBefore(verTimes, ver, time) ? decorateAvoid(mani, avoid) : null
+  }
+
+  // ok, sort based on our heuristics, and pick the best fit
+  const range = type === 'range' ? wanted : '*'
+
+  // if the range is *, then we prefer the 'latest' if available but skip this if it should be avoided, in that case we have to try a little harder.
+  const defaultVer = distTags[defaultTag]
+  if (defaultVer &&
+      (range === '*' || semver.satisfies(defaultVer, range, { loose: true })) &&
+      !restricted[defaultVer] &&
+      !shouldAvoid(defaultVer, avoid)) {
+    const mani = versions[defaultVer]
+    const ok = mani &&
+      isBefore(verTimes, defaultVer, time) &&
+      engineOk(mani, npmVersion, nodeVersion) &&
+      !mani.deprecated &&
+      !staged[defaultVer]
+    if (ok) {
+      return mani
+    }
+  }
+
+  // ok, actually have to sort the list and take the winner
+  const allEntries = Object.entries(versions)
+    .concat(Object.entries(staged))
+    .concat(Object.entries(restricted))
+    .filter(([ver]) => isBefore(verTimes, ver, time))
+
+  if (!allEntries.length) {
+    throw Object.assign(new Error(`No versions available for ${name}`), {
+      code: 'ENOVERSIONS',
+      name,
+      type,
+      wanted,
+      before,
+      versions: Object.keys(versions),
+    })
+  }
+
+  const sortSemverOpt = { loose: true }
+  const entries = allEntries.filter(([ver]) =>
+    semver.satisfies(ver, range, { loose: true }))
+    .sort((a, b) => {
+      const [vera, mania] = a
+      const [verb, manib] = b
+      const notavoida = !shouldAvoid(vera, avoid)
+      const notavoidb = !shouldAvoid(verb, avoid)
+      const notrestra = !restricted[vera]
+      const notrestrb = !restricted[verb]
+      const notstagea = !staged[vera]
+      const notstageb = !staged[verb]
+      const notdepra = !mania.deprecated
+      const notdeprb = !manib.deprecated
+      const enginea = engineOk(mania, npmVersion, nodeVersion)
+      const engineb = engineOk(manib, npmVersion, nodeVersion)
+      // sort by:
+      // - not an avoided version
+      // - not restricted
+      // - not staged
+      // - not deprecated and engine ok
+      // - engine ok
+      // - not deprecated
+      // - semver
+      return (notavoidb - notavoida) ||
+        (notrestrb - notrestra) ||
+        (notstageb - notstagea) ||
+        ((notdeprb && engineb) - (notdepra && enginea)) ||
+        (engineb - enginea) ||
+        (notdeprb - notdepra) ||
+        semver.rcompare(vera, verb, sortSemverOpt)
+    })
+
+  return decorateAvoid(entries[0] && entries[0][1], avoid)
+}
+
+module.exports = (packument, wanted, opts = {}) => {
+  const mani = pickManifest(packument, wanted, opts)
+  const picked = mani && normalizeBin(mani)
+  const policyRestrictions = packument.policyRestrictions
+  const restricted = (policyRestrictions && policyRestrictions.versions) || {}
+
+  if (picked && !restricted[picked.version]) {
+    return picked
+  }
+
+  const { before = null, defaultTag = 'latest' } = opts
+  const bstr = before ? new Date(before).toLocaleString() : ''
+  const { name } = packument
+  const pckg = `${name}@${wanted}` +
+    (before ? ` with a date before ${bstr}` : '')
+
+  const isForbidden = picked && !!restricted[picked.version]
+  const polMsg = isForbidden ? policyRestrictions.message : ''
+
+  const msg = !isForbidden ? `No matching version found for ${pckg}.`
+    : `Could not download ${pckg} due to policy violations:\n${polMsg}`
+
+  const code = isForbidden ? 'E403' : 'ETARGET'
+  throw Object.assign(new Error(msg), {
+    code,
+    type: npa.resolve(packument.name, wanted).type,
+    wanted,
+    versions: Object.keys(packument.versions ?? {}),
+    name,
+    distTags: packument['dist-tags'],
+    defaultTag,
+  })
+}
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-profile/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-profile/lib/index.js
new file mode 100644
index 0000000000000000000000000000000000000000..83ab5e1b46b68edf2bf72cd33e9e1790b8667709
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-profile/lib/index.js
@@ -0,0 +1,278 @@
+const { URL } = require('node:url')
+const timers = require('node:timers/promises')
+const fetch = require('npm-registry-fetch')
+const { HttpErrorBase } = require('npm-registry-fetch/lib/errors')
+const { log } = require('proc-log')
+
+// try loginWeb, catch the "not supported" message and fall back to couch
+const login = async (opener, prompter, opts = {}) => {
+  try {
+    return await loginWeb(opener, opts)
+  } catch (er) {
+    if (er instanceof WebLoginNotSupported) {
+      log.verbose('web login', 'not supported, trying couch')
+      const { username, password } = await prompter(opts.creds)
+      return loginCouch(username, password, opts)
+    }
+    throw er
+  }
+}
+
+const adduser = async (opener, prompter, opts = {}) => {
+  try {
+    return await adduserWeb(opener, opts)
+  } catch (er) {
+    if (er instanceof WebLoginNotSupported) {
+      log.verbose('web adduser', 'not supported, trying couch')
+      const { username, email, password } = await prompter(opts.creds)
+      return adduserCouch(username, email, password, opts)
+    }
+    throw er
+  }
+}
+
+const adduserWeb = (opener, opts = {}) => {
+  log.verbose('web adduser', 'before first POST')
+  return webAuth(opener, opts, { create: true })
+}
+
+const loginWeb = (opener, opts = {}) => {
+  log.verbose('web login', 'before first POST')
+  return webAuth(opener, opts, {})
+}
+
+const isValidUrl = u => {
+  try {
+    return /^https?:$/.test(new URL(u).protocol)
+  } catch {
+    return false
+  }
+}
+
+const webAuth = async (opener, opts, body) => {
+  try {
+    const res = await fetch('/-/v1/login', {
+      ...opts,
+      method: 'POST',
+      body,
+    })
+
+    const content = await res.json()
+    log.verbose('web auth', 'got response', content)
+
+    const { doneUrl, loginUrl } = content
+    if (!isValidUrl(doneUrl) || !isValidUrl(loginUrl)) {
+      throw new WebLoginInvalidResponse('POST', res, content)
+    }
+
+    return await webAuthOpener(opener, loginUrl, doneUrl, opts)
+  } catch (er) {
+    if ((er.statusCode >= 400 && er.statusCode <= 499) || er.statusCode === 500) {
+      throw new WebLoginNotSupported('POST', {
+        status: er.statusCode,
+        headers: er.headers,
+      }, er.body)
+    }
+    throw er
+  }
+}
+
+const webAuthOpener = async (opener, loginUrl, doneUrl, opts) => {
+  const abortController = new AbortController()
+  const { signal } = abortController
+  try {
+    log.verbose('web auth', 'opening url pair')
+    const [, authResult] = await Promise.all([
+      opener(loginUrl, { signal }).catch((err) => {
+        if (err.name === 'AbortError') {
+          abortController.abort()
+          return
+        }
+        throw err
+      }),
+      webAuthCheckLogin(doneUrl, { ...opts, cache: false }, { signal }).then((r) => {
+        log.verbose('web auth', 'done-check finished')
+        abortController.abort()
+        return r
+      }),
+    ])
+    return authResult
+  } catch (er) {
+    abortController.abort()
+    throw er
+  }
+}
+
+const webAuthCheckLogin = async (doneUrl, opts, { signal } = {}) => {
+  signal?.throwIfAborted()
+
+  const res = await fetch(doneUrl, opts)
+  const content = await res.json()
+
+  if (res.status === 200) {
+    if (!content.token) {
+      throw new WebLoginInvalidResponse('GET', res, content)
+    }
+    return content
+  }
+
+  if (res.status === 202) {
+    const retry = +res.headers.get('retry-after') * 1000
+    if (retry > 0) {
+      await timers.setTimeout(retry, null, { signal })
+    }
+    return webAuthCheckLogin(doneUrl, opts, { signal })
+  }
+
+  throw new WebLoginInvalidResponse('GET', res, content)
+}
+
+const couchEndpoint = (username) => `/-/user/org.couchdb.user:${encodeURIComponent(username)}`
+
+const putCouch = async (path, username, body, opts) => {
+  const result = await fetch.json(`${couchEndpoint(username)}${path}`, {
+    ...opts,
+    method: 'PUT',
+    body,
+  })
+  result.username = username
+  return result
+}
+
+const adduserCouch = async (username, email, password, opts = {}) => {
+  const body = {
+    _id: `org.couchdb.user:${username}`,
+    name: username,
+    password: password,
+    email: email,
+    type: 'user',
+    roles: [],
+    date: new Date().toISOString(),
+  }
+
+  log.verbose('adduser', 'before first PUT', {
+    ...body,
+    password: 'XXXXX',
+  })
+
+  return putCouch('', username, body, opts)
+}
+
+const loginCouch = async (username, password, opts = {}) => {
+  const body = {
+    _id: `org.couchdb.user:${username}`,
+    name: username,
+    password: password,
+    type: 'user',
+    roles: [],
+    date: new Date().toISOString(),
+  }
+
+  log.verbose('login', 'before first PUT', {
+    ...body,
+    password: 'XXXXX',
+  })
+
+  try {
+    return await putCouch('', username, body, opts)
+  } catch (err) {
+    if (err.code === 'E400') {
+      err.message = `There is no user with the username "${username}".`
+      throw err
+    }
+
+    if (err.code !== 'E409') {
+      throw err
+    }
+  }
+
+  const result = await fetch.json(couchEndpoint(username), {
+    ...opts,
+    query: { write: true },
+  })
+
+  for (const k of Object.keys(result)) {
+    if (!body[k] || k === 'roles') {
+      body[k] = result[k]
+    }
+  }
+
+  return putCouch(`/-rev/${body._rev}`, username, body, {
+    ...opts,
+    forceAuth: {
+      username,
+      password: Buffer.from(password, 'utf8').toString('base64'),
+      otp: opts.otp,
+    },
+  })
+}
+
+const get = (opts = {}) => fetch.json('/-/npm/v1/user', opts)
+
+const set = (profile, opts = {}) => fetch.json('/-/npm/v1/user', {
+  ...opts,
+  method: 'POST',
+  // profile keys can't be empty strings, but they CAN be null
+  body: Object.fromEntries(Object.entries(profile).map(([k, v]) => [k, v === '' ? null : v])),
+})
+
+const paginate = async (href, opts, items = []) => {
+  const result = await fetch.json(href, opts)
+  items = items.concat(result.objects)
+  if (result.urls.next) {
+    return paginate(result.urls.next, opts, items)
+  }
+  return items
+}
+
+const listTokens = (opts = {}) => paginate('/-/npm/v1/tokens', opts)
+
+const removeToken = async (tokenKey, opts = {}) => {
+  await fetch(`/-/npm/v1/tokens/token/${tokenKey}`, {
+    ...opts,
+    method: 'DELETE',
+    ignoreBody: true,
+  })
+  return null
+}
+
+const createToken = (password, readonly, cidrs, opts = {}) => fetch.json('/-/npm/v1/tokens', {
+  ...opts,
+  method: 'POST',
+  body: {
+    password: password,
+    readonly: readonly,
+    cidr_whitelist: cidrs,
+  },
+})
+
+class WebLoginInvalidResponse extends HttpErrorBase {
+  constructor (method, res, body) {
+    super(method, res, body)
+    this.message = 'Invalid response from web login endpoint'
+  }
+}
+
+class WebLoginNotSupported extends HttpErrorBase {
+  constructor (method, res, body) {
+    super(method, res, body)
+    this.message = 'Web login not supported'
+    this.code = 'ENYI'
+  }
+}
+
+module.exports = {
+  adduserCouch,
+  loginCouch,
+  adduserWeb,
+  loginWeb,
+  login,
+  adduser,
+  get,
+  set,
+  listTokens,
+  removeToken,
+  createToken,
+  webAuthCheckLogin,
+  webAuthOpener,
+}
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-registry-fetch/LICENSE.md b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-registry-fetch/LICENSE.md
new file mode 100644
index 0000000000000000000000000000000000000000..5fc208ff122e08e2ca9777f80b0551617b30ba2a
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-registry-fetch/LICENSE.md
@@ -0,0 +1,20 @@
+
+
+ISC License
+
+Copyright npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this
+software for any purpose with or without fee is hereby
+granted, provided that the above copyright notice and this
+permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL
+WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO
+EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
+WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
+TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
+USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-registry-fetch/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-registry-fetch/package.json
new file mode 100644
index 0000000000000000000000000000000000000000..a8e954cdf3c1456f41c35b6c8309497e906d5876
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-registry-fetch/package.json
@@ -0,0 +1,68 @@
+{
+  "name": "npm-registry-fetch",
+  "version": "19.0.0",
+  "description": "Fetch-based http client for use with npm registry APIs",
+  "main": "lib",
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "scripts": {
+    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
+    "lint": "npm run eslint",
+    "lintfix": "npm run eslint -- --fix",
+    "test": "tap",
+    "posttest": "npm run lint",
+    "npmclilint": "npmcli-lint",
+    "postsnap": "npm run lintfix --",
+    "postlint": "template-oss-check",
+    "snap": "tap",
+    "template-oss-apply": "template-oss-apply --force"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/npm/npm-registry-fetch.git"
+  },
+  "keywords": [
+    "npm",
+    "registry",
+    "fetch"
+  ],
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "dependencies": {
+    "@npmcli/redact": "^3.0.0",
+    "jsonparse": "^1.3.1",
+    "make-fetch-happen": "^15.0.0",
+    "minipass": "^7.0.2",
+    "minipass-fetch": "^4.0.0",
+    "minizlib": "^3.0.1",
+    "npm-package-arg": "^13.0.0",
+    "proc-log": "^5.0.0"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^5.0.0",
+    "@npmcli/template-oss": "4.25.0",
+    "cacache": "^20.0.0",
+    "nock": "^13.2.4",
+    "require-inject": "^1.4.4",
+    "ssri": "^12.0.0",
+    "tap": "^16.0.1"
+  },
+  "tap": {
+    "check-coverage": true,
+    "test-ignore": "test[\\\\/](util|cache)[\\\\/]",
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  },
+  "engines": {
+    "node": "^20.17.0 || >=22.9.0"
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.25.0",
+    "publish": "true"
+  }
+}
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-user-validate/LICENSE b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-user-validate/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..79128b23d319faef2724dceaff426d13dc2b189f
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-user-validate/LICENSE
@@ -0,0 +1,27 @@
+Copyright (c) Robert Kowalski
+All rights reserved.
+
+The BSD License
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+
+1. Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-user-validate/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-user-validate/package.json
new file mode 100644
index 0000000000000000000000000000000000000000..8c66f8f3e47b9b4e308c67567295c4897f68916a
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/npm-user-validate/package.json
@@ -0,0 +1,50 @@
+{
+  "name": "npm-user-validate",
+  "version": "3.0.0",
+  "description": "User validations for npm",
+  "main": "lib/index.js",
+  "devDependencies": {
+    "@npmcli/eslint-config": "^5.0.0",
+    "@npmcli/template-oss": "4.23.3",
+    "tap": "^16.3.2"
+  },
+  "scripts": {
+    "test": "tap",
+    "lint": "npm run eslint",
+    "postlint": "template-oss-check",
+    "template-oss-apply": "template-oss-apply --force",
+    "lintfix": "npm run eslint -- --fix",
+    "snap": "tap",
+    "posttest": "npm run lint",
+    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/npm/npm-user-validate.git"
+  },
+  "keywords": [
+    "npm",
+    "validation",
+    "registry"
+  ],
+  "author": "GitHub Inc.",
+  "license": "BSD-2-Clause",
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "engines": {
+    "node": "^18.17.0 || >=20.5.0"
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.23.3",
+    "publish": true
+  },
+  "tap": {
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  }
+}
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/bin/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/bin/index.js
new file mode 100644
index 0000000000000000000000000000000000000000..f35b62ca71a537115d33c197e93a185a91958aae
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/bin/index.js
@@ -0,0 +1,158 @@
+#!/usr/bin/env node
+
+const run = conf => {
+  const pacote = require('../')
+  switch (conf._[0]) {
+    case 'resolve':
+    case 'manifest':
+    case 'packument':
+      if (conf._[0] === 'resolve' && conf.long) {
+        return pacote.manifest(conf._[1], conf).then(mani => ({
+          resolved: mani._resolved,
+          integrity: mani._integrity,
+          from: mani._from,
+        }))
+      }
+      return pacote[conf._[0]](conf._[1], conf)
+
+    case 'tarball':
+      if (!conf._[2] || conf._[2] === '-') {
+        return pacote.tarball.stream(conf._[1], stream => {
+          stream.pipe(
+            conf.testStdout ||
+            /* istanbul ignore next */
+            process.stdout
+          )
+          // make sure it resolves something falsey
+          return stream.promise().then(() => {
+            return false
+          })
+        }, conf)
+      } else {
+        return pacote.tarball.file(conf._[1], conf._[2], conf)
+      }
+
+    case 'extract':
+      return pacote.extract(conf._[1], conf._[2], conf)
+
+    default: /* istanbul ignore next */ {
+      throw new Error(`bad command: ${conf._[0]}`)
+    }
+  }
+}
+
+const version = require('../package.json').version
+const usage = () =>
+`Pacote - The JavaScript Package Handler, v${version}
+
+Usage:
+
+  pacote resolve 
+    Resolve a specifier and output the fully resolved target
+    Returns integrity and from if '--long' flag is set.
+
+  pacote manifest 
+    Fetch a manifest and print to stdout
+
+  pacote packument 
+    Fetch a full packument and print to stdout
+
+  pacote tarball  []
+    Fetch a package tarball and save to 
+    If  is missing or '-', the tarball will be streamed to stdout.
+
+  pacote extract  
+    Extract a package to the destination folder.
+
+Configuration values all match the names of configs passed to npm, or
+options passed to Pacote.  Additional flags for this executable:
+
+  --long     Print an object from 'resolve', including integrity and spec.
+  --json     Print result objects as JSON rather than node's default.
+             (This is the default if stdout is not a TTY.)
+  --help -h  Print this helpful text.
+
+For example '--cache=/path/to/folder' will use that folder as the cache.
+`
+
+const shouldJSON = (conf, result) =>
+  conf.json ||
+  !process.stdout.isTTY &&
+  conf.json === undefined &&
+  result &&
+  typeof result === 'object'
+
+const pretty = (conf, result) =>
+  shouldJSON(conf, result) ? JSON.stringify(result, 0, 2) : result
+
+let addedLogListener = false
+const main = args => {
+  const conf = parse(args)
+  if (conf.help || conf.h) {
+    return console.log(usage())
+  }
+
+  if (!addedLogListener) {
+    process.on('log', console.error)
+    addedLogListener = true
+  }
+
+  try {
+    return run(conf)
+      .then(result => result && console.log(pretty(conf, result)))
+      .catch(er => {
+        console.error(er)
+        process.exit(1)
+      })
+  } catch (er) {
+    console.error(er.message)
+    console.error(usage())
+  }
+}
+
+const parseArg = arg => {
+  const split = arg.slice(2).split('=')
+  const k = split.shift()
+  const v = split.join('=')
+  const no = /^no-/.test(k) && !v
+  const key = (no ? k.slice(3) : k)
+    .replace(/^tag$/, 'defaultTag')
+    .replace(/-([a-z])/g, (_, c) => c.toUpperCase())
+  const value = v ? v.replace(/^~/, process.env.HOME) : !no
+  return { key, value }
+}
+
+const parse = args => {
+  const conf = {
+    _: [],
+    cache: process.env.HOME + '/.npm/_cacache',
+  }
+  let dashdash = false
+  args.forEach(arg => {
+    if (dashdash) {
+      conf._.push(arg)
+    } else if (arg === '--') {
+      dashdash = true
+    } else if (arg === '-h') {
+      conf.help = true
+    } else if (/^--/.test(arg)) {
+      const { key, value } = parseArg(arg)
+      conf[key] = value
+    } else {
+      conf._.push(arg)
+    }
+  })
+  return conf
+}
+
+if (module === require.main) {
+  main(process.argv.slice(2))
+} else {
+  module.exports = {
+    main,
+    run,
+    usage,
+    parseArg,
+    parse,
+  }
+}
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/dir.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/dir.js
new file mode 100644
index 0000000000000000000000000000000000000000..04846eb8a6e2210553723480142a81fbe7a78cbf
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/dir.js
@@ -0,0 +1,105 @@
+const { resolve } = require('node:path')
+const packlist = require('npm-packlist')
+const runScript = require('@npmcli/run-script')
+const tar = require('tar')
+const { Minipass } = require('minipass')
+const Fetcher = require('./fetcher.js')
+const FileFetcher = require('./file.js')
+const _ = require('./util/protected.js')
+const tarCreateOptions = require('./util/tar-create-options.js')
+
+class DirFetcher extends Fetcher {
+  constructor (spec, opts) {
+    super(spec, opts)
+    // just the fully resolved filename
+    this.resolved = this.spec.fetchSpec
+
+    this.tree = opts.tree || null
+    this.Arborist = opts.Arborist || null
+  }
+
+  // exposes tarCreateOptions as public API
+  static tarCreateOptions (manifest) {
+    return tarCreateOptions(manifest)
+  }
+
+  get types () {
+    return ['directory']
+  }
+
+  #prepareDir () {
+    return this.manifest().then(mani => {
+      if (!mani.scripts || !mani.scripts.prepare) {
+        return
+      }
+      if (this.opts.ignoreScripts) {
+        return
+      }
+
+      // we *only* run prepare.
+      // pre/post-pack is run by the npm CLI for publish and pack,
+      // but this function is *also* run when installing git deps
+      const stdio = this.opts.foregroundScripts ? 'inherit' : 'pipe'
+
+      return runScript({
+        // this || undefined is because runScript will be unhappy with the default null value
+        scriptShell: this.opts.scriptShell || undefined,
+        pkg: mani,
+        event: 'prepare',
+        path: this.resolved,
+        stdio,
+        env: {
+          npm_package_resolved: this.resolved,
+          npm_package_integrity: this.integrity,
+          npm_package_json: resolve(this.resolved, 'package.json'),
+        },
+      })
+    })
+  }
+
+  [_.tarballFromResolved] () {
+    if (!this.tree && !this.Arborist) {
+      throw new Error('DirFetcher requires either a tree or an Arborist constructor to pack')
+    }
+
+    const stream = new Minipass()
+    stream.resolved = this.resolved
+    stream.integrity = this.integrity
+
+    const { prefix, workspaces } = this.opts
+
+    // run the prepare script, get the list of files, and tar it up
+    // pipe to the stream, and proxy errors the chain.
+    this.#prepareDir()
+      .then(async () => {
+        if (!this.tree) {
+          const arb = new this.Arborist({ path: this.resolved })
+          this.tree = await arb.loadActual()
+        }
+        return packlist(this.tree, { path: this.resolved, prefix, workspaces })
+      })
+      .then(files => tar.c(tarCreateOptions(this.package), files)
+        .on('error', er => stream.emit('error', er)).pipe(stream))
+      .catch(er => stream.emit('error', er))
+    return stream
+  }
+
+  manifest () {
+    if (this.package) {
+      return Promise.resolve(this.package)
+    }
+
+    return this[_.readPackageJson](this.resolved)
+      .then(mani => this.package = {
+        ...mani,
+        _integrity: this.integrity && String(this.integrity),
+        _resolved: this.resolved,
+        _from: this.from,
+      })
+  }
+
+  packument () {
+    return FileFetcher.prototype.packument.apply(this)
+  }
+}
+module.exports = DirFetcher
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/fetcher.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/fetcher.js
new file mode 100644
index 0000000000000000000000000000000000000000..f2ac97619d3af1f99905fc6054d7f103d963baa3
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/fetcher.js
@@ -0,0 +1,497 @@
+// This is the base class that the other fetcher types in lib
+// all descend from.
+// It handles the unpacking and retry logic that is shared among
+// all of the other Fetcher types.
+
+const { basename, dirname } = require('node:path')
+const { rm, mkdir } = require('node:fs/promises')
+const PackageJson = require('@npmcli/package-json')
+const cacache = require('cacache')
+const fsm = require('fs-minipass')
+const getContents = require('@npmcli/installed-package-contents')
+const npa = require('npm-package-arg')
+const retry = require('promise-retry')
+const ssri = require('ssri')
+const tar = require('tar')
+const { Minipass } = require('minipass')
+const { log } = require('proc-log')
+const _ = require('./util/protected.js')
+const cacheDir = require('./util/cache-dir.js')
+const isPackageBin = require('./util/is-package-bin.js')
+const removeTrailingSlashes = require('./util/trailing-slashes.js')
+
+// Pacote is only concerned with the package.json contents
+const packageJsonPrepare = (p) => PackageJson.prepare(p).then(pkg => pkg.content)
+const packageJsonNormalize = (p) => PackageJson.normalize(p).then(pkg => pkg.content)
+
+class FetcherBase {
+  constructor (spec, opts) {
+    if (!opts || typeof opts !== 'object') {
+      throw new TypeError('options object is required')
+    }
+    this.spec = npa(spec, opts.where)
+
+    this.allowGitIgnore = !!opts.allowGitIgnore
+
+    // a bit redundant because presumably the caller already knows this,
+    // but it makes it easier to not have to keep track of the requested
+    // spec when we're dispatching thousands of these at once, and normalizing
+    // is nice.  saveSpec is preferred if set, because it turns stuff like
+    // x/y#committish into github:x/y#committish.  use name@rawSpec for
+    // registry deps so that we turn xyz and xyz@ -> xyz@
+    this.from = this.spec.registry
+      ? `${this.spec.name}@${this.spec.rawSpec}` : this.spec.saveSpec
+
+    this.#assertType()
+    // clone the opts object so that others aren't upset when we mutate it
+    // by adding/modifying the integrity value.
+    this.opts = { ...opts }
+
+    this.cache = opts.cache || cacheDir().cacache
+    this.tufCache = opts.tufCache || cacheDir().tufcache
+    this.resolved = opts.resolved || null
+
+    // default to caching/verifying with sha512, that's what we usually have
+    // need to change this default, or start overriding it, when sha512
+    // is no longer strong enough.
+    this.defaultIntegrityAlgorithm = opts.defaultIntegrityAlgorithm || 'sha512'
+
+    if (typeof opts.integrity === 'string') {
+      this.opts.integrity = ssri.parse(opts.integrity)
+    }
+
+    this.package = null
+    this.type = this.constructor.name
+    this.fmode = opts.fmode || 0o666
+    this.dmode = opts.dmode || 0o777
+    // we don't need a default umask, because we don't chmod files coming
+    // out of package tarballs.  they're forced to have a mode that is
+    // valid, regardless of what's in the tarball entry, and then we let
+    // the process's umask setting do its job.  but if configured, we do
+    // respect it.
+    this.umask = opts.umask || 0
+
+    this.preferOnline = !!opts.preferOnline
+    this.preferOffline = !!opts.preferOffline
+    this.offline = !!opts.offline
+
+    this.before = opts.before
+    this.fullMetadata = this.before ? true : !!opts.fullMetadata
+    this.fullReadJson = !!opts.fullReadJson
+    this[_.readPackageJson] = this.fullReadJson
+      ? packageJsonPrepare
+      : packageJsonNormalize
+
+    // rrh is a registry hostname or 'never' or 'always'
+    // defaults to registry.npmjs.org
+    this.replaceRegistryHost = (!opts.replaceRegistryHost || opts.replaceRegistryHost === 'npmjs') ?
+      'registry.npmjs.org' : opts.replaceRegistryHost
+
+    this.defaultTag = opts.defaultTag || 'latest'
+    this.registry = removeTrailingSlashes(opts.registry || 'https://registry.npmjs.org')
+
+    // command to run 'prepare' scripts on directories and git dirs
+    // To use pacote with yarn, for example, set npmBin to 'yarn'
+    // and npmCliConfig with yarn's equivalents.
+    this.npmBin = opts.npmBin || 'npm'
+
+    // command to install deps for preparing
+    this.npmInstallCmd = opts.npmInstallCmd || ['install', '--force']
+
+    // XXX fill more of this in based on what we know from this.opts
+    // we explicitly DO NOT fill in --tag, though, since we are often
+    // going to be packing in the context of a publish, which may set
+    // a dist-tag, but certainly wants to keep defaulting to latest.
+    this.npmCliConfig = opts.npmCliConfig || [
+      `--cache=${dirname(this.cache)}`,
+      `--prefer-offline=${!!this.preferOffline}`,
+      `--prefer-online=${!!this.preferOnline}`,
+      `--offline=${!!this.offline}`,
+      ...(this.before ? [`--before=${this.before.toISOString()}`] : []),
+      '--no-progress',
+      '--no-save',
+      '--no-audit',
+      // override any omit settings from the environment
+      '--include=dev',
+      '--include=peer',
+      '--include=optional',
+      // we need the actual things, not just the lockfile
+      '--no-package-lock-only',
+      '--no-dry-run',
+    ]
+  }
+
+  get integrity () {
+    return this.opts.integrity || null
+  }
+
+  set integrity (i) {
+    if (!i) {
+      return
+    }
+
+    i = ssri.parse(i)
+    const current = this.opts.integrity
+
+    // do not ever update an existing hash value, but do
+    // merge in NEW algos and hashes that we don't already have.
+    if (current) {
+      current.merge(i)
+    } else {
+      this.opts.integrity = i
+    }
+  }
+
+  get notImplementedError () {
+    return new Error('not implemented in this fetcher type: ' + this.type)
+  }
+
+  // override in child classes
+  // Returns a Promise that resolves to this.resolved string value
+  resolve () {
+    return this.resolved ? Promise.resolve(this.resolved)
+      : Promise.reject(this.notImplementedError)
+  }
+
+  packument () {
+    return Promise.reject(this.notImplementedError)
+  }
+
+  // override in child class
+  // returns a manifest containing:
+  // - name
+  // - version
+  // - _resolved
+  // - _integrity
+  // - plus whatever else was in there (corgi, full metadata, or pj file)
+  manifest () {
+    return Promise.reject(this.notImplementedError)
+  }
+
+  // private, should be overridden.
+  // Note that they should *not* calculate or check integrity or cache,
+  // but *just*  return the raw tarball data stream.
+  [_.tarballFromResolved] () {
+    throw this.notImplementedError
+  }
+
+  // public, should not be overridden
+  tarball () {
+    return this.tarballStream(stream => stream.concat().then(data => {
+      data.integrity = this.integrity && String(this.integrity)
+      data.resolved = this.resolved
+      data.from = this.from
+      return data
+    }))
+  }
+
+  // private
+  // Note: cacache will raise a EINTEGRITY error if the integrity doesn't match
+  #tarballFromCache () {
+    const startTime = Date.now()
+    const stream = cacache.get.stream.byDigest(this.cache, this.integrity, this.opts)
+    const elapsedTime = Date.now() - startTime
+    // cache is good, so log it as a hit in particular since there was no fetch logged
+    log.http(
+      'cache',
+      `${this.spec} ${elapsedTime}ms (cache hit)`
+    )
+    return stream
+  }
+
+  get [_.cacheFetches] () {
+    return true
+  }
+
+  #istream (stream) {
+    // if not caching this, just return it
+    if (!this.opts.cache || !this[_.cacheFetches]) {
+      // instead of creating a new integrity stream, we only piggyback on the
+      // provided stream's events
+      if (stream.hasIntegrityEmitter) {
+        stream.on('integrity', i => this.integrity = i)
+        return stream
+      }
+
+      const istream = ssri.integrityStream(this.opts)
+      istream.on('integrity', i => this.integrity = i)
+      stream.on('error', err => istream.emit('error', err))
+      return stream.pipe(istream)
+    }
+
+    // we have to return a stream that gets ALL the data, and proxies errors,
+    // but then pipe from the original tarball stream into the cache as well.
+    // To do this without losing any data, and since the cacache put stream
+    // is not a passthrough, we have to pipe from the original stream into
+    // the cache AFTER we pipe into the middleStream.  Since the cache stream
+    // has an asynchronous flush to write its contents to disk, we need to
+    // defer the middleStream end until the cache stream ends.
+    const middleStream = new Minipass()
+    stream.on('error', err => middleStream.emit('error', err))
+    stream.pipe(middleStream, { end: false })
+    const cstream = cacache.put.stream(
+      this.opts.cache,
+      `pacote:tarball:${this.from}`,
+      this.opts
+    )
+    cstream.on('integrity', i => this.integrity = i)
+    cstream.on('error', err => stream.emit('error', err))
+    stream.pipe(cstream)
+
+    // eslint-disable-next-line promise/catch-or-return
+    cstream.promise().catch(() => {}).then(() => middleStream.end())
+    return middleStream
+  }
+
+  pickIntegrityAlgorithm () {
+    return this.integrity ? this.integrity.pickAlgorithm(this.opts)
+      : this.defaultIntegrityAlgorithm
+  }
+
+  // TODO: check error class, once those are rolled out to our deps
+  isDataCorruptionError (er) {
+    return er.code === 'EINTEGRITY' || er.code === 'Z_DATA_ERROR'
+  }
+
+  // override the types getter
+  get types () {
+    return false
+  }
+
+  #assertType () {
+    if (this.types && !this.types.includes(this.spec.type)) {
+      throw new TypeError(`Wrong spec type (${
+        this.spec.type
+      }) for ${
+        this.constructor.name
+      }. Supported types: ${this.types.join(', ')}`)
+    }
+  }
+
+  // We allow ENOENTs from cacache, but not anywhere else.
+  // An ENOENT trying to read a tgz file, for example, is Right Out.
+  isRetriableError (er) {
+    // TODO: check error class, once those are rolled out to our deps
+    return this.isDataCorruptionError(er) ||
+      er.code === 'ENOENT' ||
+      er.code === 'EISDIR'
+  }
+
+  // Mostly internal, but has some uses
+  // Pass in a function which returns a promise
+  // Function will be called 1 or more times with streams that may fail.
+  // Retries:
+  // Function MUST handle errors on the stream by rejecting the promise,
+  // so that retry logic can pick it up and either retry or fail whatever
+  // promise it was making (ie, failing extraction, etc.)
+  //
+  // The return value of this method is a Promise that resolves the same
+  // as whatever the streamHandler resolves to.
+  //
+  // This should never be overridden by child classes, but it is public.
+  tarballStream (streamHandler) {
+    // Only short-circuit via cache if we have everything else we'll need,
+    // and the user has not expressed a preference for checking online.
+
+    const fromCache = (
+      !this.preferOnline &&
+      this.integrity &&
+      this.resolved
+    ) ? streamHandler(this.#tarballFromCache()).catch(er => {
+        if (this.isDataCorruptionError(er)) {
+          log.warn('tarball', `cached data for ${
+          this.spec
+        } (${this.integrity}) seems to be corrupted. Refreshing cache.`)
+          return this.cleanupCached().then(() => {
+            throw er
+          })
+        } else {
+          throw er
+        }
+      }) : null
+
+    const fromResolved = er => {
+      if (er) {
+        if (!this.isRetriableError(er)) {
+          throw er
+        }
+        log.silly('tarball', `no local data for ${
+          this.spec
+        }. Extracting by manifest.`)
+      }
+      return this.resolve().then(() => retry(tryAgain =>
+        streamHandler(this.#istream(this[_.tarballFromResolved]()))
+          .catch(streamErr => {
+          // Most likely data integrity.  A cache ENOENT error is unlikely
+          // here, since we're definitely not reading from the cache, but it
+          // IS possible that the fetch subsystem accessed the cache, and the
+          // entry got blown away or something.  Try one more time to be sure.
+            if (this.isRetriableError(streamErr)) {
+              log.warn('tarball', `tarball data for ${
+              this.spec
+            } (${this.integrity}) seems to be corrupted. Trying again.`)
+              return this.cleanupCached().then(() => tryAgain(streamErr))
+            }
+            throw streamErr
+          }), { retries: 1, minTimeout: 0, maxTimeout: 0 }))
+    }
+
+    return fromCache ? fromCache.catch(fromResolved) : fromResolved()
+  }
+
+  cleanupCached () {
+    return cacache.rm.content(this.cache, this.integrity, this.opts)
+  }
+
+  #empty (path) {
+    return getContents({ path, depth: 1 }).then(contents => Promise.all(
+      contents.map(entry => rm(entry, { recursive: true, force: true }))))
+  }
+
+  async #mkdir (dest) {
+    await this.#empty(dest)
+    return await mkdir(dest, { recursive: true })
+  }
+
+  // extraction is always the same.  the only difference is where
+  // the tarball comes from.
+  async extract (dest) {
+    await this.#mkdir(dest)
+    return this.tarballStream((tarball) => this.#extract(dest, tarball))
+  }
+
+  #toFile (dest) {
+    return this.tarballStream(str => new Promise((res, rej) => {
+      const writer = new fsm.WriteStream(dest)
+      str.on('error', er => writer.emit('error', er))
+      writer.on('error', er => rej(er))
+      writer.on('close', () => res({
+        integrity: this.integrity && String(this.integrity),
+        resolved: this.resolved,
+        from: this.from,
+      }))
+      str.pipe(writer)
+    }))
+  }
+
+  // don't use this.#mkdir because we don't want to rimraf anything
+  async tarballFile (dest) {
+    const dir = dirname(dest)
+    await mkdir(dir, { recursive: true })
+    return this.#toFile(dest)
+  }
+
+  #extract (dest, tarball) {
+    const extractor = tar.x(this.#tarxOptions({ cwd: dest }))
+    const p = new Promise((resolve, reject) => {
+      extractor.on('end', () => {
+        resolve({
+          resolved: this.resolved,
+          integrity: this.integrity && String(this.integrity),
+          from: this.from,
+        })
+      })
+
+      extractor.on('error', er => {
+        log.warn('tar', er.message)
+        log.silly('tar', er)
+        reject(er)
+      })
+
+      tarball.on('error', er => reject(er))
+    })
+
+    tarball.pipe(extractor)
+    return p
+  }
+
+  // always ensure that entries are at least as permissive as our configured
+  // dmode/fmode, but never more permissive than the umask allows.
+  #entryMode (path, mode, type) {
+    const m = /Directory|GNUDumpDir/.test(type) ? this.dmode
+      : /File$/.test(type) ? this.fmode
+      : /* istanbul ignore next - should never happen in a pkg */ 0
+
+    // make sure package bins are executable
+    const exe = isPackageBin(this.package, path) ? 0o111 : 0
+    // always ensure that files are read/writable by the owner
+    return ((mode | m) & ~this.umask) | exe | 0o600
+  }
+
+  #tarxOptions ({ cwd }) {
+    const sawIgnores = new Set()
+    return {
+      cwd,
+      noChmod: true,
+      noMtime: true,
+      filter: (name, entry) => {
+        if (/Link$/.test(entry.type)) {
+          return false
+        }
+        entry.mode = this.#entryMode(entry.path, entry.mode, entry.type)
+        // this replicates the npm pack behavior where .gitignore files
+        // are treated like .npmignore files, but only if a .npmignore
+        // file is not present.
+        if (/File$/.test(entry.type)) {
+          const base = basename(entry.path)
+          if (base === '.npmignore') {
+            sawIgnores.add(entry.path)
+          } else if (base === '.gitignore' && !this.allowGitIgnore) {
+            // rename, but only if there's not already a .npmignore
+            const ni = entry.path.replace(/\.gitignore$/, '.npmignore')
+            if (sawIgnores.has(ni)) {
+              return false
+            }
+            entry.path = ni
+          }
+          return true
+        }
+      },
+      strip: 1,
+      onwarn: /* istanbul ignore next - we can trust that tar logs */
+      (code, msg, data) => {
+        log.warn('tar', code, msg)
+        log.silly('tar', code, msg, data)
+      },
+      umask: this.umask,
+      // always ignore ownership info from tarball metadata
+      preserveOwner: false,
+    }
+  }
+}
+
+module.exports = FetcherBase
+
+// Child classes
+const GitFetcher = require('./git.js')
+const RegistryFetcher = require('./registry.js')
+const FileFetcher = require('./file.js')
+const DirFetcher = require('./dir.js')
+const RemoteFetcher = require('./remote.js')
+
+// Get an appropriate fetcher object from a spec and options
+FetcherBase.get = (rawSpec, opts = {}) => {
+  const spec = npa(rawSpec, opts.where)
+  switch (spec.type) {
+    case 'git':
+      return new GitFetcher(spec, opts)
+
+    case 'remote':
+      return new RemoteFetcher(spec, opts)
+
+    case 'version':
+    case 'range':
+    case 'tag':
+    case 'alias':
+      return new RegistryFetcher(spec.subSpec || spec, opts)
+
+    case 'file':
+      return new FileFetcher(spec, opts)
+
+    case 'directory':
+      return new DirFetcher(spec, opts)
+
+    default:
+      throw new TypeError('Unknown spec type: ' + spec.type)
+  }
+}
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/file.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/file.js
new file mode 100644
index 0000000000000000000000000000000000000000..2021325085e4f0d16e0224eb7fd9e23c92cebe3d
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/file.js
@@ -0,0 +1,94 @@
+const { resolve } = require('node:path')
+const { stat, chmod } = require('node:fs/promises')
+const cacache = require('cacache')
+const fsm = require('fs-minipass')
+const Fetcher = require('./fetcher.js')
+const _ = require('./util/protected.js')
+
+class FileFetcher extends Fetcher {
+  constructor (spec, opts) {
+    super(spec, opts)
+    // just the fully resolved filename
+    this.resolved = this.spec.fetchSpec
+  }
+
+  get types () {
+    return ['file']
+  }
+
+  manifest () {
+    if (this.package) {
+      return Promise.resolve(this.package)
+    }
+
+    // have to unpack the tarball for this.
+    return cacache.tmp.withTmp(this.cache, this.opts, dir =>
+      this.extract(dir)
+        .then(() => this[_.readPackageJson](dir))
+        .then(mani => this.package = {
+          ...mani,
+          _integrity: this.integrity && String(this.integrity),
+          _resolved: this.resolved,
+          _from: this.from,
+        }))
+  }
+
+  #exeBins (pkg, dest) {
+    if (!pkg.bin) {
+      return Promise.resolve()
+    }
+
+    return Promise.all(Object.keys(pkg.bin).map(async k => {
+      const script = resolve(dest, pkg.bin[k])
+      // Best effort.  Ignore errors here, the only result is that
+      // a bin script is not executable.  But if it's missing or
+      // something, we just leave it for a later stage to trip over
+      // when we can provide a more useful contextual error.
+      try {
+        const st = await stat(script)
+        const mode = st.mode | 0o111
+        if (mode === st.mode) {
+          return
+        }
+        await chmod(script, mode)
+      } catch {
+        // Ignore errors here
+      }
+    }))
+  }
+
+  extract (dest) {
+    // if we've already loaded the manifest, then the super got it.
+    // but if not, read the unpacked manifest and chmod properly.
+    return super.extract(dest)
+      .then(result => this.package ? result
+      : this[_.readPackageJson](dest).then(pkg =>
+        this.#exeBins(pkg, dest)).then(() => result))
+  }
+
+  [_.tarballFromResolved] () {
+    // create a read stream and return it
+    return new fsm.ReadStream(this.resolved)
+  }
+
+  packument () {
+    // simulate based on manifest
+    return this.manifest().then(mani => ({
+      name: mani.name,
+      'dist-tags': {
+        [this.defaultTag]: mani.version,
+      },
+      versions: {
+        [mani.version]: {
+          ...mani,
+          dist: {
+            tarball: `file:${this.resolved}`,
+            integrity: this.integrity && String(this.integrity),
+          },
+        },
+      },
+    }))
+  }
+}
+
+module.exports = FileFetcher
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/git.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/git.js
new file mode 100644
index 0000000000000000000000000000000000000000..077193a86f026ffc3283d83eab0f3482c5ce0b78
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/git.js
@@ -0,0 +1,317 @@
+const cacache = require('cacache')
+const git = require('@npmcli/git')
+const npa = require('npm-package-arg')
+const pickManifest = require('npm-pick-manifest')
+const { Minipass } = require('minipass')
+const { log } = require('proc-log')
+const DirFetcher = require('./dir.js')
+const Fetcher = require('./fetcher.js')
+const FileFetcher = require('./file.js')
+const RemoteFetcher = require('./remote.js')
+const _ = require('./util/protected.js')
+const addGitSha = require('./util/add-git-sha.js')
+const npm = require('./util/npm.js')
+
+const hashre = /^[a-f0-9]{40}$/
+
+// get the repository url.
+// prefer https if there's auth, since ssh will drop that.
+// otherwise, prefer ssh if available (more secure).
+// We have to add the git+ back because npa suppresses it.
+const repoUrl = (h, opts) =>
+  h.sshurl && !(h.https && h.auth) && addGitPlus(h.sshurl(opts)) ||
+  h.https && addGitPlus(h.https(opts))
+
+// add git+ to the url, but only one time.
+const addGitPlus = url => url && `git+${url}`.replace(/^(git\+)+/, 'git+')
+
+class GitFetcher extends Fetcher {
+  constructor (spec, opts) {
+    super(spec, opts)
+
+    // we never want to compare integrity for git dependencies: npm/rfcs#525
+    if (this.opts.integrity) {
+      delete this.opts.integrity
+      log.warn(`skipping integrity check for git dependency ${this.spec.fetchSpec}`)
+    }
+
+    this.resolvedRef = null
+    if (this.spec.hosted) {
+      this.from = this.spec.hosted.shortcut({ noCommittish: false })
+    }
+
+    // shortcut: avoid full clone when we can go straight to the tgz
+    // if we have the full sha and it's a hosted git platform
+    if (this.spec.gitCommittish && hashre.test(this.spec.gitCommittish)) {
+      this.resolvedSha = this.spec.gitCommittish
+      // use hosted.tarball() when we shell to RemoteFetcher later
+      this.resolved = this.spec.hosted
+        ? repoUrl(this.spec.hosted, { noCommittish: false })
+        : this.spec.rawSpec
+    } else {
+      this.resolvedSha = ''
+    }
+
+    this.Arborist = opts.Arborist || null
+  }
+
+  // just exposed to make it easier to test all the combinations
+  static repoUrl (hosted, opts) {
+    return repoUrl(hosted, opts)
+  }
+
+  get types () {
+    return ['git']
+  }
+
+  resolve () {
+    // likely a hosted git repo with a sha, so get the tarball url
+    // but in general, no reason to resolve() more than necessary!
+    if (this.resolved) {
+      return super.resolve()
+    }
+
+    // fetch the git repo and then look at the current hash
+    const h = this.spec.hosted
+    // try to use ssh, fall back to git.
+    return h
+      ? this.#resolvedFromHosted(h)
+      : this.#resolvedFromRepo(this.spec.fetchSpec)
+  }
+
+  // first try https, since that's faster and passphrase-less for
+  // public repos, and supports private repos when auth is provided.
+  // Fall back to SSH to support private repos
+  // NB: we always store the https url in resolved field if auth
+  // is present, otherwise ssh if the hosted type provides it
+  #resolvedFromHosted (hosted) {
+    return this.#resolvedFromRepo(hosted.https && hosted.https()).catch(er => {
+      // Throw early since we know pathspec errors will fail again if retried
+      if (er instanceof git.errors.GitPathspecError) {
+        throw er
+      }
+      const ssh = hosted.sshurl && hosted.sshurl()
+      // no fallthrough if we can't fall through or have https auth
+      if (!ssh || hosted.auth) {
+        throw er
+      }
+      return this.#resolvedFromRepo(ssh)
+    })
+  }
+
+  #resolvedFromRepo (gitRemote) {
+    // XXX make this a custom error class
+    if (!gitRemote) {
+      return Promise.reject(new Error(`No git url for ${this.spec}`))
+    }
+    const gitRange = this.spec.gitRange
+    const name = this.spec.name
+    return git.revs(gitRemote, this.opts).then(remoteRefs => {
+      return gitRange ? pickManifest({
+        versions: remoteRefs.versions,
+        'dist-tags': remoteRefs['dist-tags'],
+        name,
+      }, gitRange, this.opts)
+        : this.spec.gitCommittish ?
+          remoteRefs.refs[this.spec.gitCommittish] ||
+          remoteRefs.refs[remoteRefs.shas[this.spec.gitCommittish]]
+          : remoteRefs.refs.HEAD // no git committish, get default head
+    }).then(revDoc => {
+      // the committish provided isn't in the rev list
+      // things like HEAD~3 or @yesterday can land here.
+      if (!revDoc || !revDoc.sha) {
+        return this.#resolvedFromClone()
+      }
+
+      this.resolvedRef = revDoc
+      this.resolvedSha = revDoc.sha
+      this.#addGitSha(revDoc.sha)
+      return this.resolved
+    })
+  }
+
+  #setResolvedWithSha (withSha) {
+    // we haven't cloned, so a tgz download is still faster
+    // of course, if it's not a known host, we can't do that.
+    this.resolved = !this.spec.hosted ? withSha
+      : repoUrl(npa(withSha).hosted, { noCommittish: false })
+  }
+
+  // when we get the git sha, we affix it to our spec to build up
+  // either a git url with a hash, or a tarball download URL
+  #addGitSha (sha) {
+    this.#setResolvedWithSha(addGitSha(this.spec, sha))
+  }
+
+  #resolvedFromClone () {
+    // do a full or shallow clone, then look at the HEAD
+    // kind of wasteful, but no other option, really
+    return this.#clone(() => this.resolved)
+  }
+
+  #prepareDir (dir) {
+    return this[_.readPackageJson](dir).then(mani => {
+      // no need if we aren't going to do any preparation.
+      const scripts = mani.scripts
+      if (!mani.workspaces && (!scripts || !(
+        scripts.postinstall ||
+          scripts.build ||
+          scripts.preinstall ||
+          scripts.install ||
+          scripts.prepack ||
+          scripts.prepare))) {
+        return
+      }
+
+      // to avoid cases where we have an cycle of git deps that depend
+      // on one another, we only ever do preparation for one instance
+      // of a given git dep along the chain of installations.
+      // Note that this does mean that a dependency MAY in theory end up
+      // trying to run its prepare script using a dependency that has not
+      // been properly prepared itself, but that edge case is smaller
+      // and less hazardous than a fork bomb of npm and git commands.
+      const noPrepare = !process.env._PACOTE_NO_PREPARE_ ? []
+        : process.env._PACOTE_NO_PREPARE_.split('\n')
+      if (noPrepare.includes(this.resolved)) {
+        log.info('prepare', 'skip prepare, already seen', this.resolved)
+        return
+      }
+      noPrepare.push(this.resolved)
+
+      // the DirFetcher will do its own preparation to run the prepare scripts
+      // All we have to do is put the deps in place so that it can succeed.
+      return npm(
+        this.npmBin,
+        [].concat(this.npmInstallCmd).concat(this.npmCliConfig),
+        dir,
+        { ...process.env, _PACOTE_NO_PREPARE_: noPrepare.join('\n') },
+        { message: 'git dep preparation failed' }
+      )
+    })
+  }
+
+  [_.tarballFromResolved] () {
+    const stream = new Minipass()
+    stream.resolved = this.resolved
+    stream.from = this.from
+
+    // check it out and then shell out to the DirFetcher tarball packer
+    this.#clone(dir => this.#prepareDir(dir)
+      .then(() => new Promise((res, rej) => {
+        if (!this.Arborist) {
+          throw new Error('GitFetcher requires an Arborist constructor to pack a tarball')
+        }
+        const df = new DirFetcher(`file:${dir}`, {
+          ...this.opts,
+          Arborist: this.Arborist,
+          resolved: null,
+          integrity: null,
+        })
+        const dirStream = df[_.tarballFromResolved]()
+        dirStream.on('error', rej)
+        dirStream.on('end', res)
+        dirStream.pipe(stream)
+      }))).catch(
+      /* istanbul ignore next: very unlikely and hard to test */
+      er => stream.emit('error', er)
+    )
+    return stream
+  }
+
+  // clone a git repo into a temp folder (or fetch and unpack if possible)
+  // handler accepts a directory, and returns a promise that resolves
+  // when we're done with it, at which point, cacache deletes it
+  //
+  // TODO: after cloning, create a tarball of the folder, and add to the cache
+  // with cacache.put.stream(), using a key that's deterministic based on the
+  // spec and repo, so that we don't ever clone the same thing multiple times.
+  #clone (handler, tarballOk = true) {
+    const o = { tmpPrefix: 'git-clone' }
+    const ref = this.resolvedSha || this.spec.gitCommittish
+    const h = this.spec.hosted
+    const resolved = this.resolved
+
+    // can be set manually to false to fall back to actual git clone
+    tarballOk = tarballOk &&
+      h && resolved === repoUrl(h, { noCommittish: false }) && h.tarball
+
+    return cacache.tmp.withTmp(this.cache, o, async tmp => {
+      // if we're resolved, and have a tarball url, shell out to RemoteFetcher
+      if (tarballOk) {
+        const nameat = this.spec.name ? `${this.spec.name}@` : ''
+        return new RemoteFetcher(h.tarball({ noCommittish: false }), {
+          ...this.opts,
+          allowGitIgnore: true,
+          pkgid: `git:${nameat}${this.resolved}`,
+          resolved: this.resolved,
+          integrity: null, // it'll always be different, if we have one
+        }).extract(tmp).then(() => handler(tmp), er => {
+          // fall back to ssh download if tarball fails
+          if (er.constructor.name.match(/^Http/)) {
+            return this.#clone(handler, false)
+          } else {
+            throw er
+          }
+        })
+      }
+
+      const sha = await (
+        h ? this.#cloneHosted(ref, tmp)
+        : this.#cloneRepo(this.spec.fetchSpec, ref, tmp)
+      )
+      this.resolvedSha = sha
+      if (!this.resolved) {
+        await this.#addGitSha(sha)
+      }
+      return handler(tmp)
+    })
+  }
+
+  // first try https, since that's faster and passphrase-less for
+  // public repos, and supports private repos when auth is provided.
+  // Fall back to SSH to support private repos
+  // NB: we always store the https url in resolved field if auth
+  // is present, otherwise ssh if the hosted type provides it
+  #cloneHosted (ref, tmp) {
+    const hosted = this.spec.hosted
+    return this.#cloneRepo(hosted.https({ noCommittish: true }), ref, tmp)
+      .catch(er => {
+        // Throw early since we know pathspec errors will fail again if retried
+        if (er instanceof git.errors.GitPathspecError) {
+          throw er
+        }
+        const ssh = hosted.sshurl && hosted.sshurl({ noCommittish: true })
+        // no fallthrough if we can't fall through or have https auth
+        if (!ssh || hosted.auth) {
+          throw er
+        }
+        return this.#cloneRepo(ssh, ref, tmp)
+      })
+  }
+
+  #cloneRepo (repo, ref, tmp) {
+    const { opts, spec } = this
+    return git.clone(repo, ref, tmp, { ...opts, spec })
+  }
+
+  manifest () {
+    if (this.package) {
+      return Promise.resolve(this.package)
+    }
+
+    return this.spec.hosted && this.resolved
+      ? FileFetcher.prototype.manifest.apply(this)
+      : this.#clone(dir =>
+        this[_.readPackageJson](dir)
+          .then(mani => this.package = {
+            ...mani,
+            _resolved: this.resolved,
+            _from: this.from,
+          }))
+  }
+
+  packument () {
+    return FileFetcher.prototype.packument.apply(this)
+  }
+}
+module.exports = GitFetcher
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/index.js
new file mode 100644
index 0000000000000000000000000000000000000000..f35314d275d5fd1bf8c0d186531a7252b45c47a4
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/index.js
@@ -0,0 +1,23 @@
+const { get } = require('./fetcher.js')
+const GitFetcher = require('./git.js')
+const RegistryFetcher = require('./registry.js')
+const FileFetcher = require('./file.js')
+const DirFetcher = require('./dir.js')
+const RemoteFetcher = require('./remote.js')
+
+const tarball = (spec, opts) => get(spec, opts).tarball()
+tarball.stream = (spec, handler, opts) => get(spec, opts).tarballStream(handler)
+tarball.file = (spec, dest, opts) => get(spec, opts).tarballFile(dest)
+
+module.exports = {
+  GitFetcher,
+  RegistryFetcher,
+  FileFetcher,
+  DirFetcher,
+  RemoteFetcher,
+  resolve: (spec, opts) => get(spec, opts).resolve(),
+  extract: (spec, dest, opts) => get(spec, opts).extract(dest),
+  manifest: (spec, opts) => get(spec, opts).manifest(),
+  packument: (spec, opts) => get(spec, opts).packument(),
+  tarball,
+}
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/registry.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/registry.js
new file mode 100644
index 0000000000000000000000000000000000000000..1ecf4ee1773499cda10f8c6dde3d4410c36ec27e
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/registry.js
@@ -0,0 +1,369 @@
+const crypto = require('node:crypto')
+const PackageJson = require('@npmcli/package-json')
+const pickManifest = require('npm-pick-manifest')
+const ssri = require('ssri')
+const npa = require('npm-package-arg')
+const sigstore = require('sigstore')
+const fetch = require('npm-registry-fetch')
+const Fetcher = require('./fetcher.js')
+const RemoteFetcher = require('./remote.js')
+const pacoteVersion = require('../package.json').version
+const removeTrailingSlashes = require('./util/trailing-slashes.js')
+const _ = require('./util/protected.js')
+
+// Corgis are cute. 🐕🐶
+const corgiDoc = 'application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*'
+const fullDoc = 'application/json'
+
+// Some really old packages have no time field in their packument so we need a
+// cutoff date.
+const MISSING_TIME_CUTOFF = '2015-01-01T00:00:00.000Z'
+
+class RegistryFetcher extends Fetcher {
+  #cacheKey
+  constructor (spec, opts) {
+    super(spec, opts)
+
+    // you usually don't want to fetch the same packument multiple times in
+    // the span of a given script or command, no matter how many pacote calls
+    // are made, so this lets us avoid doing that.  It's only relevant for
+    // registry fetchers, because other types simulate their packument from
+    // the manifest, which they memoize on this.package, so it's very cheap
+    // already.
+    this.packumentCache = this.opts.packumentCache || null
+
+    this.registry = fetch.pickRegistry(spec, opts)
+    this.packumentUrl = `${removeTrailingSlashes(this.registry)}/${this.spec.escapedName}`
+    this.#cacheKey = `${this.fullMetadata ? 'full' : 'corgi'}:${this.packumentUrl}`
+
+    const parsed = new URL(this.registry)
+    const regKey = `//${parsed.host}${parsed.pathname}`
+    // unlike the nerf-darted auth keys, this one does *not* allow a mismatch
+    // of trailing slashes.  It must match exactly.
+    if (this.opts[`${regKey}:_keys`]) {
+      this.registryKeys = this.opts[`${regKey}:_keys`]
+    }
+
+    // XXX pacote <=9 has some logic to ignore opts.resolved if
+    // the resolved URL doesn't go to the same registry.
+    // Consider reproducing that here, to throw away this.resolved
+    // in that case.
+  }
+
+  async resolve () {
+    // fetching the manifest sets resolved and (if present) integrity
+    await this.manifest()
+    if (!this.resolved) {
+      throw Object.assign(
+        new Error('Invalid package manifest: no `dist.tarball` field'),
+        { package: this.spec.toString() }
+      )
+    }
+    return this.resolved
+  }
+
+  #headers () {
+    return {
+      // npm will override UA, but ensure that we always send *something*
+      'user-agent': this.opts.userAgent ||
+        `pacote/${pacoteVersion} node/${process.version}`,
+      ...(this.opts.headers || {}),
+      'pacote-version': pacoteVersion,
+      'pacote-req-type': 'packument',
+      'pacote-pkg-id': `registry:${this.spec.name}`,
+      accept: this.fullMetadata ? fullDoc : corgiDoc,
+    }
+  }
+
+  async packument () {
+    // note this might be either an in-flight promise for a request,
+    // or the actual packument, but we never want to make more than
+    // one request at a time for the same thing regardless.
+    if (this.packumentCache?.has(this.#cacheKey)) {
+      return this.packumentCache.get(this.#cacheKey)
+    }
+
+    // npm-registry-fetch the packument
+    // set the appropriate header for corgis if fullMetadata isn't set
+    // return the res.json() promise
+    try {
+      const res = await fetch(this.packumentUrl, {
+        ...this.opts,
+        headers: this.#headers(),
+        spec: this.spec,
+
+        // never check integrity for packuments themselves
+        integrity: null,
+      })
+      const packument = await res.json()
+      const contentLength = res.headers.get('content-length')
+      if (contentLength) {
+        packument._contentLength = Number(contentLength)
+      }
+      this.packumentCache?.set(this.#cacheKey, packument)
+      return packument
+    } catch (err) {
+      this.packumentCache?.delete(this.#cacheKey)
+      if (err.code !== 'E404' || this.fullMetadata) {
+        throw err
+      }
+      // possible that corgis are not supported by this registry
+      this.fullMetadata = true
+      return this.packument()
+    }
+  }
+
+  async manifest () {
+    if (this.package) {
+      return this.package
+    }
+
+    // When verifying signatures, we need to fetch the full/uncompressed
+    // packument to get publish time as this is not included in the
+    // corgi/compressed packument.
+    if (this.opts.verifySignatures) {
+      this.fullMetadata = true
+    }
+
+    const packument = await this.packument()
+    const steps = PackageJson.normalizeSteps.filter(s => s !== '_attributes')
+    const mani = await new PackageJson().fromContent(pickManifest(packument, this.spec.fetchSpec, {
+      ...this.opts,
+      defaultTag: this.defaultTag,
+      before: this.before,
+    })).normalize({ steps }).then(p => p.content)
+
+    /* XXX add ETARGET and E403 revalidation of cached packuments here */
+
+    // add _time from packument if fetched with fullMetadata
+    const time = packument.time?.[mani.version]
+    if (time) {
+      mani._time = time
+    }
+
+    // add _resolved and _integrity from dist object
+    const { dist } = mani
+    if (dist) {
+      this.resolved = mani._resolved = dist.tarball
+      mani._from = this.from
+      const distIntegrity = dist.integrity ? ssri.parse(dist.integrity)
+        : dist.shasum ? ssri.fromHex(dist.shasum, 'sha1', { ...this.opts })
+        : null
+      if (distIntegrity) {
+        if (this.integrity && !this.integrity.match(distIntegrity)) {
+          // only bork if they have algos in common.
+          // otherwise we end up breaking if we have saved a sha512
+          // previously for the tarball, but the manifest only
+          // provides a sha1, which is possible for older publishes.
+          // Otherwise, this is almost certainly a case of holding it
+          // wrong, and will result in weird or insecure behavior
+          // later on when building package tree.
+          for (const algo of Object.keys(this.integrity)) {
+            if (distIntegrity[algo]) {
+              throw Object.assign(new Error(
+                `Integrity checksum failed when using ${algo}: ` +
+                `wanted ${this.integrity} but got ${distIntegrity}.`
+              ), { code: 'EINTEGRITY' })
+            }
+          }
+        }
+        // made it this far, the integrity is worthwhile.  accept it.
+        // the setter here will take care of merging it into what we already
+        // had.
+        this.integrity = distIntegrity
+      }
+    }
+    if (this.integrity) {
+      mani._integrity = String(this.integrity)
+      if (dist.signatures) {
+        if (this.opts.verifySignatures) {
+          // validate and throw on error, then set _signatures
+          const message = `${mani._id}:${mani._integrity}`
+          for (const signature of dist.signatures) {
+            const publicKey = this.registryKeys &&
+              this.registryKeys.filter(key => (key.keyid === signature.keyid))[0]
+            if (!publicKey) {
+              throw Object.assign(new Error(
+                  `${mani._id} has a registry signature with keyid: ${signature.keyid} ` +
+                  'but no corresponding public key can be found'
+              ), { code: 'EMISSINGSIGNATUREKEY' })
+            }
+
+            const publishedTime = Date.parse(mani._time || MISSING_TIME_CUTOFF)
+            const validPublicKey = !publicKey.expires ||
+              publishedTime < Date.parse(publicKey.expires)
+            if (!validPublicKey) {
+              throw Object.assign(new Error(
+                  `${mani._id} has a registry signature with keyid: ${signature.keyid} ` +
+                  `but the corresponding public key has expired ${publicKey.expires}`
+              ), { code: 'EEXPIREDSIGNATUREKEY' })
+            }
+            const verifier = crypto.createVerify('SHA256')
+            verifier.write(message)
+            verifier.end()
+            const valid = verifier.verify(
+              publicKey.pemkey,
+              signature.sig,
+              'base64'
+            )
+            if (!valid) {
+              throw Object.assign(new Error(
+                  `${mani._id} has an invalid registry signature with ` +
+                  `keyid: ${publicKey.keyid} and signature: ${signature.sig}`
+              ), {
+                code: 'EINTEGRITYSIGNATURE',
+                keyid: publicKey.keyid,
+                signature: signature.sig,
+                resolved: mani._resolved,
+                integrity: mani._integrity,
+              })
+            }
+          }
+          mani._signatures = dist.signatures
+        } else {
+          mani._signatures = dist.signatures
+        }
+      }
+
+      if (dist.attestations) {
+        if (this.opts.verifyAttestations) {
+          // Always fetch attestations from the current registry host
+          const attestationsPath = new URL(dist.attestations.url).pathname
+          const attestationsUrl = removeTrailingSlashes(this.registry) + attestationsPath
+          const res = await fetch(attestationsUrl, {
+            ...this.opts,
+            // disable integrity check for attestations json payload, we check the
+            // integrity in the verification steps below
+            integrity: null,
+          })
+          const { attestations } = await res.json()
+          const bundles = attestations.map(({ predicateType, bundle }) => {
+            const statement = JSON.parse(
+              Buffer.from(bundle.dsseEnvelope.payload, 'base64').toString('utf8')
+            )
+            const keyid = bundle.dsseEnvelope.signatures[0].keyid
+            const signature = bundle.dsseEnvelope.signatures[0].sig
+
+            return {
+              predicateType,
+              bundle,
+              statement,
+              keyid,
+              signature,
+            }
+          })
+
+          const attestationKeyIds = bundles.map((b) => b.keyid).filter((k) => !!k)
+          const attestationRegistryKeys = (this.registryKeys || [])
+            .filter(key => attestationKeyIds.includes(key.keyid))
+          if (!attestationRegistryKeys.length) {
+            throw Object.assign(new Error(
+              `${mani._id} has attestations but no corresponding public key(s) can be found`
+            ), { code: 'EMISSINGSIGNATUREKEY' })
+          }
+
+          for (const { predicateType, bundle, keyid, signature, statement } of bundles) {
+            const publicKey = attestationRegistryKeys.find(key => key.keyid === keyid)
+            // Publish attestations have a keyid set and a valid public key must be found
+            if (keyid) {
+              if (!publicKey) {
+                throw Object.assign(new Error(
+                  `${mani._id} has attestations with keyid: ${keyid} ` +
+                  'but no corresponding public key can be found'
+                ), { code: 'EMISSINGSIGNATUREKEY' })
+              }
+
+              const integratedTime = new Date(
+                Number(
+                  bundle.verificationMaterial.tlogEntries[0].integratedTime
+                ) * 1000
+              )
+              const validPublicKey = !publicKey.expires ||
+                (integratedTime < Date.parse(publicKey.expires))
+              if (!validPublicKey) {
+                throw Object.assign(new Error(
+                  `${mani._id} has attestations with keyid: ${keyid} ` +
+                  `but the corresponding public key has expired ${publicKey.expires}`
+                ), { code: 'EEXPIREDSIGNATUREKEY' })
+              }
+            }
+
+            const subject = {
+              name: statement.subject[0].name,
+              sha512: statement.subject[0].digest.sha512,
+            }
+
+            // Only type 'version' can be turned into a PURL
+            const purl = this.spec.type === 'version' ? npa.toPurl(this.spec) : this.spec
+            // Verify the statement subject matches the package, version
+            if (subject.name !== purl) {
+              throw Object.assign(new Error(
+                `${mani._id} package name and version (PURL): ${purl} ` +
+                `doesn't match what was signed: ${subject.name}`
+              ), { code: 'EATTESTATIONSUBJECT' })
+            }
+
+            // Verify the statement subject matches the tarball integrity
+            const integrityHexDigest = ssri.parse(this.integrity).hexDigest()
+            if (subject.sha512 !== integrityHexDigest) {
+              throw Object.assign(new Error(
+                `${mani._id} package integrity (hex digest): ` +
+                `${integrityHexDigest} ` +
+                `doesn't match what was signed: ${subject.sha512}`
+              ), { code: 'EATTESTATIONSUBJECT' })
+            }
+
+            try {
+              // Provenance attestations are signed with a signing certificate
+              // (including the key) so we don't need to return a public key.
+              //
+              // Publish attestations are signed with a keyid so we need to
+              // specify a public key from the keys endpoint: `registry-host.tld/-/npm/v1/keys`
+              const options = {
+                tufCachePath: this.tufCache,
+                tufForceCache: true,
+                keySelector: publicKey ? () => publicKey.pemkey : undefined,
+              }
+              await sigstore.verify(bundle, options)
+            } catch (e) {
+              throw Object.assign(new Error(
+                `${mani._id} failed to verify attestation: ${e.message}`
+              ), {
+                code: 'EATTESTATIONVERIFY',
+                predicateType,
+                keyid,
+                signature,
+                resolved: mani._resolved,
+                integrity: mani._integrity,
+              })
+            }
+          }
+          mani._attestations = dist.attestations
+        } else {
+          mani._attestations = dist.attestations
+        }
+      }
+    }
+
+    this.package = mani
+    return this.package
+  }
+
+  [_.tarballFromResolved] () {
+    // we use a RemoteFetcher to get the actual tarball stream
+    return new RemoteFetcher(this.resolved, {
+      ...this.opts,
+      resolved: this.resolved,
+      pkgid: `registry:${this.spec.name}@${this.resolved}`,
+    })[_.tarballFromResolved]()
+  }
+
+  get types () {
+    return [
+      'tag',
+      'version',
+      'range',
+    ]
+  }
+}
+module.exports = RegistryFetcher
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/remote.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/remote.js
new file mode 100644
index 0000000000000000000000000000000000000000..bd321e65a1f18a4440310a7dc000c7250a4292d0
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/remote.js
@@ -0,0 +1,89 @@
+const fetch = require('npm-registry-fetch')
+const { Minipass } = require('minipass')
+const Fetcher = require('./fetcher.js')
+const FileFetcher = require('./file.js')
+const _ = require('./util/protected.js')
+const pacoteVersion = require('../package.json').version
+
+class RemoteFetcher extends Fetcher {
+  constructor (spec, opts) {
+    super(spec, opts)
+    this.resolved = this.spec.fetchSpec
+    const resolvedURL = new URL(this.resolved)
+    if (this.replaceRegistryHost !== 'never'
+      && (this.replaceRegistryHost === 'always'
+      || this.replaceRegistryHost === resolvedURL.host)) {
+      this.resolved = new URL(resolvedURL.pathname, this.registry).href
+    }
+
+    // nam is a fermented pork sausage that is good to eat
+    const nameat = this.spec.name ? `${this.spec.name}@` : ''
+    this.pkgid = opts.pkgid ? opts.pkgid : `remote:${nameat}${this.resolved}`
+  }
+
+  // Don't need to cache tarball fetches in pacote, because make-fetch-happen
+  // will write into cacache anyway.
+  get [_.cacheFetches] () {
+    return false
+  }
+
+  [_.tarballFromResolved] () {
+    const stream = new Minipass()
+    stream.hasIntegrityEmitter = true
+
+    const fetchOpts = {
+      ...this.opts,
+      headers: this.#headers(),
+      spec: this.spec,
+      integrity: this.integrity,
+      algorithms: [this.pickIntegrityAlgorithm()],
+    }
+
+    // eslint-disable-next-line promise/always-return
+    fetch(this.resolved, fetchOpts).then(res => {
+      res.body.on('error',
+        /* istanbul ignore next - exceedingly rare and hard to simulate */
+        er => stream.emit('error', er)
+      )
+
+      res.body.on('integrity', i => {
+        this.integrity = i
+        stream.emit('integrity', i)
+      })
+
+      res.body.pipe(stream)
+    }).catch(er => stream.emit('error', er))
+
+    return stream
+  }
+
+  #headers () {
+    return {
+      // npm will override this, but ensure that we always send *something*
+      'user-agent': this.opts.userAgent ||
+        `pacote/${pacoteVersion} node/${process.version}`,
+      ...(this.opts.headers || {}),
+      'pacote-version': pacoteVersion,
+      'pacote-req-type': 'tarball',
+      'pacote-pkg-id': this.pkgid,
+      ...(this.integrity ? { 'pacote-integrity': String(this.integrity) }
+      : {}),
+      ...(this.opts.headers || {}),
+    }
+  }
+
+  get types () {
+    return ['remote']
+  }
+
+  // getting a packument and/or manifest is the same as with a file: spec.
+  // unpack the tarball stream, and then read from the package.json file.
+  packument () {
+    return FileFetcher.prototype.packument.apply(this)
+  }
+
+  manifest () {
+    return FileFetcher.prototype.manifest.apply(this)
+  }
+}
+module.exports = RemoteFetcher
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/util/add-git-sha.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/util/add-git-sha.js
new file mode 100644
index 0000000000000000000000000000000000000000..843fe5b600cafa783d8473d4de288fa5bc1e398e
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/util/add-git-sha.js
@@ -0,0 +1,15 @@
+// add a sha to a git remote url spec
+const addGitSha = (spec, sha) => {
+  if (spec.hosted) {
+    const h = spec.hosted
+    const opt = { noCommittish: true }
+    const base = h.https && h.auth ? h.https(opt) : h.shortcut(opt)
+
+    return `${base}#${sha}`
+  } else {
+    // don't use new URL for this, because it doesn't handle scp urls
+    return spec.rawSpec.replace(/#.*$/, '') + `#${sha}`
+  }
+}
+
+module.exports = addGitSha
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/util/cache-dir.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/util/cache-dir.js
new file mode 100644
index 0000000000000000000000000000000000000000..ba5683a7bb5bf3b255c3eb658f1688fefbdcdd84
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/util/cache-dir.js
@@ -0,0 +1,15 @@
+const { resolve } = require('node:path')
+const { tmpdir, homedir } = require('node:os')
+
+module.exports = (fakePlatform = false) => {
+  const temp = tmpdir()
+  const uidOrPid = process.getuid ? process.getuid() : process.pid
+  const home = homedir() || resolve(temp, 'npm-' + uidOrPid)
+  const platform = fakePlatform || process.platform
+  const cacheExtra = platform === 'win32' ? 'npm-cache' : '.npm'
+  const cacheRoot = (platform === 'win32' && process.env.LOCALAPPDATA) || home
+  return {
+    cacache: resolve(cacheRoot, cacheExtra, '_cacache'),
+    tufcache: resolve(cacheRoot, cacheExtra, '_tuf'),
+  }
+}
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/util/is-package-bin.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/util/is-package-bin.js
new file mode 100644
index 0000000000000000000000000000000000000000..49a3f73f537ce96ec85142308a23165e504cdb9e
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/util/is-package-bin.js
@@ -0,0 +1,25 @@
+// Function to determine whether a path is in the package.bin set.
+// Used to prevent issues when people publish a package from a
+// windows machine, and then install with --no-bin-links.
+//
+// Note: this is not possible in remote or file fetchers, since
+// we don't have the manifest until AFTER we've unpacked.  But the
+// main use case is registry fetching with git a distant second,
+// so that's an acceptable edge case to not handle.
+
+const binObj = (name, bin) =>
+  typeof bin === 'string' ? { [name]: bin } : bin
+
+const hasBin = (pkg, path) => {
+  const bin = binObj(pkg.name, pkg.bin)
+  const p = path.replace(/^[^\\/]*\//, '')
+  for (const kv of Object.entries(bin)) {
+    if (kv[1] === p) {
+      return true
+    }
+  }
+  return false
+}
+
+module.exports = (pkg, path) =>
+  pkg && pkg.bin ? hasBin(pkg, path) : false
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/util/npm.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/util/npm.js
new file mode 100644
index 0000000000000000000000000000000000000000..a3005c255565fbe7d73385ed73d2bb101544415c
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/util/npm.js
@@ -0,0 +1,14 @@
+// run an npm command
+const spawn = require('@npmcli/promise-spawn')
+
+module.exports = (npmBin, npmCommand, cwd, env, extra) => {
+  const isJS = npmBin.endsWith('.js')
+  const cmd = isJS ? process.execPath : npmBin
+  const args = (isJS ? [npmBin] : []).concat(npmCommand)
+  // when installing to run the `prepare` script for a git dep, we need
+  // to ensure that we don't run into a cycle of checking out packages
+  // in temp directories.  this lets us link previously-seen repos that
+  // are also being prepared.
+
+  return spawn(cmd, args, { cwd, env }, extra)
+}
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/util/protected.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/util/protected.js
new file mode 100644
index 0000000000000000000000000000000000000000..e05203b481e6aa0ea591df398f3745510ff30495
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/util/protected.js
@@ -0,0 +1,5 @@
+module.exports = {
+  cacheFetches: Symbol.for('pacote.Fetcher._cacheFetches'),
+  readPackageJson: Symbol.for('package.Fetcher._readPackageJson'),
+  tarballFromResolved: Symbol.for('pacote.Fetcher._tarballFromResolved'),
+}
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/util/tar-create-options.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/util/tar-create-options.js
new file mode 100644
index 0000000000000000000000000000000000000000..d070f0f7ba2d4eb49d20383b594f0df1811655e9
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/util/tar-create-options.js
@@ -0,0 +1,31 @@
+const isPackageBin = require('./is-package-bin.js')
+
+const tarCreateOptions = manifest => ({
+  cwd: manifest._resolved,
+  prefix: 'package/',
+  portable: true,
+  gzip: {
+    // forcing the level to 9 seems to avoid some
+    // platform specific optimizations that cause
+    // integrity mismatch errors due to differing
+    // end results after compression
+    level: 9,
+  },
+
+  // ensure that package bins are always executable
+  // Note that npm-packlist is already filtering out
+  // anything that is not a regular file, ignored by
+  // .npmignore or package.json "files", etc.
+  filter: (path, stat) => {
+    if (isPackageBin(manifest, path)) {
+      stat.mode |= 0o111
+    }
+    return true
+  },
+
+  // Provide a specific date in the 1980s for the benefit of zip,
+  // which is confounded by files dated at the Unix epoch 0.
+  mtime: new Date('1985-10-26T08:15:00.000Z'),
+})
+
+module.exports = tarCreateOptions
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/util/trailing-slashes.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/util/trailing-slashes.js
new file mode 100644
index 0000000000000000000000000000000000000000..c50cb6173b92eb44ed6f57f339cef9b91830c4f4
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/pacote/lib/util/trailing-slashes.js
@@ -0,0 +1,10 @@
+const removeTrailingSlashes = (input) => {
+  // in order to avoid regexp redos detection
+  let output = input
+  while (output.endsWith('/')) {
+    output = output.slice(0, -1)
+  }
+  return output
+}
+
+module.exports = removeTrailingSlashes
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/parse-conflict-json/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/parse-conflict-json/lib/index.js
new file mode 100644
index 0000000000000000000000000000000000000000..21b295d04b902c21ff40137e19f473f7f2deef79
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/parse-conflict-json/lib/index.js
@@ -0,0 +1,104 @@
+const parseJSON = require('json-parse-even-better-errors')
+const { diff } = require('just-diff')
+const { diffApply } = require('just-diff-apply')
+
+const globalObjectProperties = Object.getOwnPropertyNames(Object.prototype)
+
+const stripBOM = content => {
+  content = content.toString()
+  // Remove byte order marker. This catches EF BB BF (the UTF-8 BOM)
+  // because the buffer-to-string conversion in `fs.readFileSync()`
+  // translates it to FEFF, the UTF-16 BOM.
+  if (content.charCodeAt(0) === 0xFEFF) {
+    content = content.slice(1)
+  }
+  return content
+}
+
+const PARENT_RE = /\|{7,}/g
+const OURS_RE = /<{7,}/g
+const THEIRS_RE = /={7,}/g
+const END_RE = />{7,}/g
+
+const isDiff = str =>
+  str.match(OURS_RE) && str.match(THEIRS_RE) && str.match(END_RE)
+
+const parseConflictJSON = (str, reviver, prefer) => {
+  prefer = prefer || 'ours'
+  if (prefer !== 'theirs' && prefer !== 'ours') {
+    throw new TypeError('prefer param must be "ours" or "theirs" if set')
+  }
+
+  str = stripBOM(str)
+
+  if (!isDiff(str)) {
+    return parseJSON(str)
+  }
+
+  const pieces = str.split(/[\n\r]+/g).reduce((acc, line) => {
+    if (line.match(PARENT_RE)) {
+      acc.state = 'parent'
+    } else if (line.match(OURS_RE)) {
+      acc.state = 'ours'
+    } else if (line.match(THEIRS_RE)) {
+      acc.state = 'theirs'
+    } else if (line.match(END_RE)) {
+      acc.state = 'top'
+    } else {
+      if (acc.state === 'top' || acc.state === 'ours') {
+        acc.ours += line
+      }
+      if (acc.state === 'top' || acc.state === 'theirs') {
+        acc.theirs += line
+      }
+      if (acc.state === 'top' || acc.state === 'parent') {
+        acc.parent += line
+      }
+    }
+    return acc
+  }, {
+    state: 'top',
+    ours: '',
+    theirs: '',
+    parent: '',
+  })
+
+  // this will throw if either piece is not valid JSON, that's intended
+  const parent = parseJSON(pieces.parent, reviver)
+  const ours = parseJSON(pieces.ours, reviver)
+  const theirs = parseJSON(pieces.theirs, reviver)
+
+  return prefer === 'ours'
+    ? resolve(parent, ours, theirs)
+    : resolve(parent, theirs, ours)
+}
+
+const isObj = obj => obj && typeof obj === 'object'
+
+const copyPath = (to, from, path, i) => {
+  const p = path[i]
+  if (isObj(to[p]) && isObj(from[p]) &&
+      Array.isArray(to[p]) === Array.isArray(from[p])) {
+    return copyPath(to[p], from[p], path, i + 1)
+  }
+  to[p] = from[p]
+}
+
+// get the diff from parent->ours and applying our changes on top of theirs.
+// If they turned an object into a non-object, then put it back.
+const resolve = (parent, ours, theirs) => {
+  const dours = diff(parent, ours)
+  for (let i = 0; i < dours.length; i++) {
+    if (globalObjectProperties.find(prop => dours[i].path.includes(prop))) {
+      continue
+    }
+    try {
+      diffApply(theirs, [dours[i]])
+    } catch (e) {
+      copyPath(theirs, ours, dours[i].path, 0)
+    }
+  }
+  return theirs
+}
+
+module.exports = Object.assign(parseConflictJSON, { isDiff })
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/index.js
new file mode 100644
index 0000000000000000000000000000000000000000..995741a7ff3bef1b8942a0a279516684e5e54819
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/index.js
@@ -0,0 +1,17 @@
+"use strict";
+
+exports.__esModule = true;
+exports["default"] = void 0;
+var _processor = _interopRequireDefault(require("./processor"));
+var selectors = _interopRequireWildcard(require("./selectors"));
+function _getRequireWildcardCache(nodeInterop) { if (typeof WeakMap !== "function") return null; var cacheBabelInterop = new WeakMap(); var cacheNodeInterop = new WeakMap(); return (_getRequireWildcardCache = function _getRequireWildcardCache(nodeInterop) { return nodeInterop ? cacheNodeInterop : cacheBabelInterop; })(nodeInterop); }
+function _interopRequireWildcard(obj, nodeInterop) { if (!nodeInterop && obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { "default": obj }; } var cache = _getRequireWildcardCache(nodeInterop); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj["default"] = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
+var parser = function parser(processor) {
+  return new _processor["default"](processor);
+};
+Object.assign(parser, selectors);
+delete parser.__esModule;
+var _default = parser;
+exports["default"] = _default;
+module.exports = exports.default;
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/parser.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/parser.js
new file mode 100644
index 0000000000000000000000000000000000000000..ada61582777805359a7da8daaa6d6f5b6b6d556a
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/parser.js
@@ -0,0 +1,1015 @@
+"use strict";
+
+exports.__esModule = true;
+exports["default"] = void 0;
+var _root = _interopRequireDefault(require("./selectors/root"));
+var _selector = _interopRequireDefault(require("./selectors/selector"));
+var _className = _interopRequireDefault(require("./selectors/className"));
+var _comment = _interopRequireDefault(require("./selectors/comment"));
+var _id = _interopRequireDefault(require("./selectors/id"));
+var _tag = _interopRequireDefault(require("./selectors/tag"));
+var _string = _interopRequireDefault(require("./selectors/string"));
+var _pseudo = _interopRequireDefault(require("./selectors/pseudo"));
+var _attribute = _interopRequireWildcard(require("./selectors/attribute"));
+var _universal = _interopRequireDefault(require("./selectors/universal"));
+var _combinator = _interopRequireDefault(require("./selectors/combinator"));
+var _nesting = _interopRequireDefault(require("./selectors/nesting"));
+var _sortAscending = _interopRequireDefault(require("./sortAscending"));
+var _tokenize = _interopRequireWildcard(require("./tokenize"));
+var tokens = _interopRequireWildcard(require("./tokenTypes"));
+var types = _interopRequireWildcard(require("./selectors/types"));
+var _util = require("./util");
+var _WHITESPACE_TOKENS, _Object$assign;
+function _getRequireWildcardCache(nodeInterop) { if (typeof WeakMap !== "function") return null; var cacheBabelInterop = new WeakMap(); var cacheNodeInterop = new WeakMap(); return (_getRequireWildcardCache = function _getRequireWildcardCache(nodeInterop) { return nodeInterop ? cacheNodeInterop : cacheBabelInterop; })(nodeInterop); }
+function _interopRequireWildcard(obj, nodeInterop) { if (!nodeInterop && obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { "default": obj }; } var cache = _getRequireWildcardCache(nodeInterop); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj["default"] = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
+function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); Object.defineProperty(Constructor, "prototype", { writable: false }); return Constructor; }
+var WHITESPACE_TOKENS = (_WHITESPACE_TOKENS = {}, _WHITESPACE_TOKENS[tokens.space] = true, _WHITESPACE_TOKENS[tokens.cr] = true, _WHITESPACE_TOKENS[tokens.feed] = true, _WHITESPACE_TOKENS[tokens.newline] = true, _WHITESPACE_TOKENS[tokens.tab] = true, _WHITESPACE_TOKENS);
+var WHITESPACE_EQUIV_TOKENS = Object.assign({}, WHITESPACE_TOKENS, (_Object$assign = {}, _Object$assign[tokens.comment] = true, _Object$assign));
+function tokenStart(token) {
+  return {
+    line: token[_tokenize.FIELDS.START_LINE],
+    column: token[_tokenize.FIELDS.START_COL]
+  };
+}
+function tokenEnd(token) {
+  return {
+    line: token[_tokenize.FIELDS.END_LINE],
+    column: token[_tokenize.FIELDS.END_COL]
+  };
+}
+function getSource(startLine, startColumn, endLine, endColumn) {
+  return {
+    start: {
+      line: startLine,
+      column: startColumn
+    },
+    end: {
+      line: endLine,
+      column: endColumn
+    }
+  };
+}
+function getTokenSource(token) {
+  return getSource(token[_tokenize.FIELDS.START_LINE], token[_tokenize.FIELDS.START_COL], token[_tokenize.FIELDS.END_LINE], token[_tokenize.FIELDS.END_COL]);
+}
+function getTokenSourceSpan(startToken, endToken) {
+  if (!startToken) {
+    return undefined;
+  }
+  return getSource(startToken[_tokenize.FIELDS.START_LINE], startToken[_tokenize.FIELDS.START_COL], endToken[_tokenize.FIELDS.END_LINE], endToken[_tokenize.FIELDS.END_COL]);
+}
+function unescapeProp(node, prop) {
+  var value = node[prop];
+  if (typeof value !== "string") {
+    return;
+  }
+  if (value.indexOf("\\") !== -1) {
+    (0, _util.ensureObject)(node, 'raws');
+    node[prop] = (0, _util.unesc)(value);
+    if (node.raws[prop] === undefined) {
+      node.raws[prop] = value;
+    }
+  }
+  return node;
+}
+function indexesOf(array, item) {
+  var i = -1;
+  var indexes = [];
+  while ((i = array.indexOf(item, i + 1)) !== -1) {
+    indexes.push(i);
+  }
+  return indexes;
+}
+function uniqs() {
+  var list = Array.prototype.concat.apply([], arguments);
+  return list.filter(function (item, i) {
+    return i === list.indexOf(item);
+  });
+}
+var Parser = /*#__PURE__*/function () {
+  function Parser(rule, options) {
+    if (options === void 0) {
+      options = {};
+    }
+    this.rule = rule;
+    this.options = Object.assign({
+      lossy: false,
+      safe: false
+    }, options);
+    this.position = 0;
+    this.css = typeof this.rule === 'string' ? this.rule : this.rule.selector;
+    this.tokens = (0, _tokenize["default"])({
+      css: this.css,
+      error: this._errorGenerator(),
+      safe: this.options.safe
+    });
+    var rootSource = getTokenSourceSpan(this.tokens[0], this.tokens[this.tokens.length - 1]);
+    this.root = new _root["default"]({
+      source: rootSource
+    });
+    this.root.errorGenerator = this._errorGenerator();
+    var selector = new _selector["default"]({
+      source: {
+        start: {
+          line: 1,
+          column: 1
+        }
+      },
+      sourceIndex: 0
+    });
+    this.root.append(selector);
+    this.current = selector;
+    this.loop();
+  }
+  var _proto = Parser.prototype;
+  _proto._errorGenerator = function _errorGenerator() {
+    var _this = this;
+    return function (message, errorOptions) {
+      if (typeof _this.rule === 'string') {
+        return new Error(message);
+      }
+      return _this.rule.error(message, errorOptions);
+    };
+  };
+  _proto.attribute = function attribute() {
+    var attr = [];
+    var startingToken = this.currToken;
+    this.position++;
+    while (this.position < this.tokens.length && this.currToken[_tokenize.FIELDS.TYPE] !== tokens.closeSquare) {
+      attr.push(this.currToken);
+      this.position++;
+    }
+    if (this.currToken[_tokenize.FIELDS.TYPE] !== tokens.closeSquare) {
+      return this.expected('closing square bracket', this.currToken[_tokenize.FIELDS.START_POS]);
+    }
+    var len = attr.length;
+    var node = {
+      source: getSource(startingToken[1], startingToken[2], this.currToken[3], this.currToken[4]),
+      sourceIndex: startingToken[_tokenize.FIELDS.START_POS]
+    };
+    if (len === 1 && !~[tokens.word].indexOf(attr[0][_tokenize.FIELDS.TYPE])) {
+      return this.expected('attribute', attr[0][_tokenize.FIELDS.START_POS]);
+    }
+    var pos = 0;
+    var spaceBefore = '';
+    var commentBefore = '';
+    var lastAdded = null;
+    var spaceAfterMeaningfulToken = false;
+    while (pos < len) {
+      var token = attr[pos];
+      var content = this.content(token);
+      var next = attr[pos + 1];
+      switch (token[_tokenize.FIELDS.TYPE]) {
+        case tokens.space:
+          // if (
+          //     len === 1 ||
+          //     pos === 0 && this.content(next) === '|'
+          // ) {
+          //     return this.expected('attribute', token[TOKEN.START_POS], content);
+          // }
+          spaceAfterMeaningfulToken = true;
+          if (this.options.lossy) {
+            break;
+          }
+          if (lastAdded) {
+            (0, _util.ensureObject)(node, 'spaces', lastAdded);
+            var prevContent = node.spaces[lastAdded].after || '';
+            node.spaces[lastAdded].after = prevContent + content;
+            var existingComment = (0, _util.getProp)(node, 'raws', 'spaces', lastAdded, 'after') || null;
+            if (existingComment) {
+              node.raws.spaces[lastAdded].after = existingComment + content;
+            }
+          } else {
+            spaceBefore = spaceBefore + content;
+            commentBefore = commentBefore + content;
+          }
+          break;
+        case tokens.asterisk:
+          if (next[_tokenize.FIELDS.TYPE] === tokens.equals) {
+            node.operator = content;
+            lastAdded = 'operator';
+          } else if ((!node.namespace || lastAdded === "namespace" && !spaceAfterMeaningfulToken) && next) {
+            if (spaceBefore) {
+              (0, _util.ensureObject)(node, 'spaces', 'attribute');
+              node.spaces.attribute.before = spaceBefore;
+              spaceBefore = '';
+            }
+            if (commentBefore) {
+              (0, _util.ensureObject)(node, 'raws', 'spaces', 'attribute');
+              node.raws.spaces.attribute.before = spaceBefore;
+              commentBefore = '';
+            }
+            node.namespace = (node.namespace || "") + content;
+            var rawValue = (0, _util.getProp)(node, 'raws', 'namespace') || null;
+            if (rawValue) {
+              node.raws.namespace += content;
+            }
+            lastAdded = 'namespace';
+          }
+          spaceAfterMeaningfulToken = false;
+          break;
+        case tokens.dollar:
+          if (lastAdded === "value") {
+            var oldRawValue = (0, _util.getProp)(node, 'raws', 'value');
+            node.value += "$";
+            if (oldRawValue) {
+              node.raws.value = oldRawValue + "$";
+            }
+            break;
+          }
+        // Falls through
+        case tokens.caret:
+          if (next[_tokenize.FIELDS.TYPE] === tokens.equals) {
+            node.operator = content;
+            lastAdded = 'operator';
+          }
+          spaceAfterMeaningfulToken = false;
+          break;
+        case tokens.combinator:
+          if (content === '~' && next[_tokenize.FIELDS.TYPE] === tokens.equals) {
+            node.operator = content;
+            lastAdded = 'operator';
+          }
+          if (content !== '|') {
+            spaceAfterMeaningfulToken = false;
+            break;
+          }
+          if (next[_tokenize.FIELDS.TYPE] === tokens.equals) {
+            node.operator = content;
+            lastAdded = 'operator';
+          } else if (!node.namespace && !node.attribute) {
+            node.namespace = true;
+          }
+          spaceAfterMeaningfulToken = false;
+          break;
+        case tokens.word:
+          if (next && this.content(next) === '|' && attr[pos + 2] && attr[pos + 2][_tokenize.FIELDS.TYPE] !== tokens.equals &&
+          // this look-ahead probably fails with comment nodes involved.
+          !node.operator && !node.namespace) {
+            node.namespace = content;
+            lastAdded = 'namespace';
+          } else if (!node.attribute || lastAdded === "attribute" && !spaceAfterMeaningfulToken) {
+            if (spaceBefore) {
+              (0, _util.ensureObject)(node, 'spaces', 'attribute');
+              node.spaces.attribute.before = spaceBefore;
+              spaceBefore = '';
+            }
+            if (commentBefore) {
+              (0, _util.ensureObject)(node, 'raws', 'spaces', 'attribute');
+              node.raws.spaces.attribute.before = commentBefore;
+              commentBefore = '';
+            }
+            node.attribute = (node.attribute || "") + content;
+            var _rawValue = (0, _util.getProp)(node, 'raws', 'attribute') || null;
+            if (_rawValue) {
+              node.raws.attribute += content;
+            }
+            lastAdded = 'attribute';
+          } else if (!node.value && node.value !== "" || lastAdded === "value" && !(spaceAfterMeaningfulToken || node.quoteMark)) {
+            var _unescaped = (0, _util.unesc)(content);
+            var _oldRawValue = (0, _util.getProp)(node, 'raws', 'value') || '';
+            var oldValue = node.value || '';
+            node.value = oldValue + _unescaped;
+            node.quoteMark = null;
+            if (_unescaped !== content || _oldRawValue) {
+              (0, _util.ensureObject)(node, 'raws');
+              node.raws.value = (_oldRawValue || oldValue) + content;
+            }
+            lastAdded = 'value';
+          } else {
+            var insensitive = content === 'i' || content === "I";
+            if ((node.value || node.value === '') && (node.quoteMark || spaceAfterMeaningfulToken)) {
+              node.insensitive = insensitive;
+              if (!insensitive || content === "I") {
+                (0, _util.ensureObject)(node, 'raws');
+                node.raws.insensitiveFlag = content;
+              }
+              lastAdded = 'insensitive';
+              if (spaceBefore) {
+                (0, _util.ensureObject)(node, 'spaces', 'insensitive');
+                node.spaces.insensitive.before = spaceBefore;
+                spaceBefore = '';
+              }
+              if (commentBefore) {
+                (0, _util.ensureObject)(node, 'raws', 'spaces', 'insensitive');
+                node.raws.spaces.insensitive.before = commentBefore;
+                commentBefore = '';
+              }
+            } else if (node.value || node.value === '') {
+              lastAdded = 'value';
+              node.value += content;
+              if (node.raws.value) {
+                node.raws.value += content;
+              }
+            }
+          }
+          spaceAfterMeaningfulToken = false;
+          break;
+        case tokens.str:
+          if (!node.attribute || !node.operator) {
+            return this.error("Expected an attribute followed by an operator preceding the string.", {
+              index: token[_tokenize.FIELDS.START_POS]
+            });
+          }
+          var _unescapeValue = (0, _attribute.unescapeValue)(content),
+            unescaped = _unescapeValue.unescaped,
+            quoteMark = _unescapeValue.quoteMark;
+          node.value = unescaped;
+          node.quoteMark = quoteMark;
+          lastAdded = 'value';
+          (0, _util.ensureObject)(node, 'raws');
+          node.raws.value = content;
+          spaceAfterMeaningfulToken = false;
+          break;
+        case tokens.equals:
+          if (!node.attribute) {
+            return this.expected('attribute', token[_tokenize.FIELDS.START_POS], content);
+          }
+          if (node.value) {
+            return this.error('Unexpected "=" found; an operator was already defined.', {
+              index: token[_tokenize.FIELDS.START_POS]
+            });
+          }
+          node.operator = node.operator ? node.operator + content : content;
+          lastAdded = 'operator';
+          spaceAfterMeaningfulToken = false;
+          break;
+        case tokens.comment:
+          if (lastAdded) {
+            if (spaceAfterMeaningfulToken || next && next[_tokenize.FIELDS.TYPE] === tokens.space || lastAdded === 'insensitive') {
+              var lastComment = (0, _util.getProp)(node, 'spaces', lastAdded, 'after') || '';
+              var rawLastComment = (0, _util.getProp)(node, 'raws', 'spaces', lastAdded, 'after') || lastComment;
+              (0, _util.ensureObject)(node, 'raws', 'spaces', lastAdded);
+              node.raws.spaces[lastAdded].after = rawLastComment + content;
+            } else {
+              var lastValue = node[lastAdded] || '';
+              var rawLastValue = (0, _util.getProp)(node, 'raws', lastAdded) || lastValue;
+              (0, _util.ensureObject)(node, 'raws');
+              node.raws[lastAdded] = rawLastValue + content;
+            }
+          } else {
+            commentBefore = commentBefore + content;
+          }
+          break;
+        default:
+          return this.error("Unexpected \"" + content + "\" found.", {
+            index: token[_tokenize.FIELDS.START_POS]
+          });
+      }
+      pos++;
+    }
+    unescapeProp(node, "attribute");
+    unescapeProp(node, "namespace");
+    this.newNode(new _attribute["default"](node));
+    this.position++;
+  }
+
+  /**
+   * return a node containing meaningless garbage up to (but not including) the specified token position.
+   * if the token position is negative, all remaining tokens are consumed.
+   *
+   * This returns an array containing a single string node if all whitespace,
+   * otherwise an array of comment nodes with space before and after.
+   *
+   * These tokens are not added to the current selector, the caller can add them or use them to amend
+   * a previous node's space metadata.
+   *
+   * In lossy mode, this returns only comments.
+   */;
+  _proto.parseWhitespaceEquivalentTokens = function parseWhitespaceEquivalentTokens(stopPosition) {
+    if (stopPosition < 0) {
+      stopPosition = this.tokens.length;
+    }
+    var startPosition = this.position;
+    var nodes = [];
+    var space = "";
+    var lastComment = undefined;
+    do {
+      if (WHITESPACE_TOKENS[this.currToken[_tokenize.FIELDS.TYPE]]) {
+        if (!this.options.lossy) {
+          space += this.content();
+        }
+      } else if (this.currToken[_tokenize.FIELDS.TYPE] === tokens.comment) {
+        var spaces = {};
+        if (space) {
+          spaces.before = space;
+          space = "";
+        }
+        lastComment = new _comment["default"]({
+          value: this.content(),
+          source: getTokenSource(this.currToken),
+          sourceIndex: this.currToken[_tokenize.FIELDS.START_POS],
+          spaces: spaces
+        });
+        nodes.push(lastComment);
+      }
+    } while (++this.position < stopPosition);
+    if (space) {
+      if (lastComment) {
+        lastComment.spaces.after = space;
+      } else if (!this.options.lossy) {
+        var firstToken = this.tokens[startPosition];
+        var lastToken = this.tokens[this.position - 1];
+        nodes.push(new _string["default"]({
+          value: '',
+          source: getSource(firstToken[_tokenize.FIELDS.START_LINE], firstToken[_tokenize.FIELDS.START_COL], lastToken[_tokenize.FIELDS.END_LINE], lastToken[_tokenize.FIELDS.END_COL]),
+          sourceIndex: firstToken[_tokenize.FIELDS.START_POS],
+          spaces: {
+            before: space,
+            after: ''
+          }
+        }));
+      }
+    }
+    return nodes;
+  }
+
+  /**
+   *
+   * @param {*} nodes
+   */;
+  _proto.convertWhitespaceNodesToSpace = function convertWhitespaceNodesToSpace(nodes, requiredSpace) {
+    var _this2 = this;
+    if (requiredSpace === void 0) {
+      requiredSpace = false;
+    }
+    var space = "";
+    var rawSpace = "";
+    nodes.forEach(function (n) {
+      var spaceBefore = _this2.lossySpace(n.spaces.before, requiredSpace);
+      var rawSpaceBefore = _this2.lossySpace(n.rawSpaceBefore, requiredSpace);
+      space += spaceBefore + _this2.lossySpace(n.spaces.after, requiredSpace && spaceBefore.length === 0);
+      rawSpace += spaceBefore + n.value + _this2.lossySpace(n.rawSpaceAfter, requiredSpace && rawSpaceBefore.length === 0);
+    });
+    if (rawSpace === space) {
+      rawSpace = undefined;
+    }
+    var result = {
+      space: space,
+      rawSpace: rawSpace
+    };
+    return result;
+  };
+  _proto.isNamedCombinator = function isNamedCombinator(position) {
+    if (position === void 0) {
+      position = this.position;
+    }
+    return this.tokens[position + 0] && this.tokens[position + 0][_tokenize.FIELDS.TYPE] === tokens.slash && this.tokens[position + 1] && this.tokens[position + 1][_tokenize.FIELDS.TYPE] === tokens.word && this.tokens[position + 2] && this.tokens[position + 2][_tokenize.FIELDS.TYPE] === tokens.slash;
+  };
+  _proto.namedCombinator = function namedCombinator() {
+    if (this.isNamedCombinator()) {
+      var nameRaw = this.content(this.tokens[this.position + 1]);
+      var name = (0, _util.unesc)(nameRaw).toLowerCase();
+      var raws = {};
+      if (name !== nameRaw) {
+        raws.value = "/" + nameRaw + "/";
+      }
+      var node = new _combinator["default"]({
+        value: "/" + name + "/",
+        source: getSource(this.currToken[_tokenize.FIELDS.START_LINE], this.currToken[_tokenize.FIELDS.START_COL], this.tokens[this.position + 2][_tokenize.FIELDS.END_LINE], this.tokens[this.position + 2][_tokenize.FIELDS.END_COL]),
+        sourceIndex: this.currToken[_tokenize.FIELDS.START_POS],
+        raws: raws
+      });
+      this.position = this.position + 3;
+      return node;
+    } else {
+      this.unexpected();
+    }
+  };
+  _proto.combinator = function combinator() {
+    var _this3 = this;
+    if (this.content() === '|') {
+      return this.namespace();
+    }
+    // We need to decide between a space that's a descendant combinator and meaningless whitespace at the end of a selector.
+    var nextSigTokenPos = this.locateNextMeaningfulToken(this.position);
+    if (nextSigTokenPos < 0 || this.tokens[nextSigTokenPos][_tokenize.FIELDS.TYPE] === tokens.comma || this.tokens[nextSigTokenPos][_tokenize.FIELDS.TYPE] === tokens.closeParenthesis) {
+      var nodes = this.parseWhitespaceEquivalentTokens(nextSigTokenPos);
+      if (nodes.length > 0) {
+        var last = this.current.last;
+        if (last) {
+          var _this$convertWhitespa = this.convertWhitespaceNodesToSpace(nodes),
+            space = _this$convertWhitespa.space,
+            rawSpace = _this$convertWhitespa.rawSpace;
+          if (rawSpace !== undefined) {
+            last.rawSpaceAfter += rawSpace;
+          }
+          last.spaces.after += space;
+        } else {
+          nodes.forEach(function (n) {
+            return _this3.newNode(n);
+          });
+        }
+      }
+      return;
+    }
+    var firstToken = this.currToken;
+    var spaceOrDescendantSelectorNodes = undefined;
+    if (nextSigTokenPos > this.position) {
+      spaceOrDescendantSelectorNodes = this.parseWhitespaceEquivalentTokens(nextSigTokenPos);
+    }
+    var node;
+    if (this.isNamedCombinator()) {
+      node = this.namedCombinator();
+    } else if (this.currToken[_tokenize.FIELDS.TYPE] === tokens.combinator) {
+      node = new _combinator["default"]({
+        value: this.content(),
+        source: getTokenSource(this.currToken),
+        sourceIndex: this.currToken[_tokenize.FIELDS.START_POS]
+      });
+      this.position++;
+    } else if (WHITESPACE_TOKENS[this.currToken[_tokenize.FIELDS.TYPE]]) {
+      // pass
+    } else if (!spaceOrDescendantSelectorNodes) {
+      this.unexpected();
+    }
+    if (node) {
+      if (spaceOrDescendantSelectorNodes) {
+        var _this$convertWhitespa2 = this.convertWhitespaceNodesToSpace(spaceOrDescendantSelectorNodes),
+          _space = _this$convertWhitespa2.space,
+          _rawSpace = _this$convertWhitespa2.rawSpace;
+        node.spaces.before = _space;
+        node.rawSpaceBefore = _rawSpace;
+      }
+    } else {
+      // descendant combinator
+      var _this$convertWhitespa3 = this.convertWhitespaceNodesToSpace(spaceOrDescendantSelectorNodes, true),
+        _space2 = _this$convertWhitespa3.space,
+        _rawSpace2 = _this$convertWhitespa3.rawSpace;
+      if (!_rawSpace2) {
+        _rawSpace2 = _space2;
+      }
+      var spaces = {};
+      var raws = {
+        spaces: {}
+      };
+      if (_space2.endsWith(' ') && _rawSpace2.endsWith(' ')) {
+        spaces.before = _space2.slice(0, _space2.length - 1);
+        raws.spaces.before = _rawSpace2.slice(0, _rawSpace2.length - 1);
+      } else if (_space2.startsWith(' ') && _rawSpace2.startsWith(' ')) {
+        spaces.after = _space2.slice(1);
+        raws.spaces.after = _rawSpace2.slice(1);
+      } else {
+        raws.value = _rawSpace2;
+      }
+      node = new _combinator["default"]({
+        value: ' ',
+        source: getTokenSourceSpan(firstToken, this.tokens[this.position - 1]),
+        sourceIndex: firstToken[_tokenize.FIELDS.START_POS],
+        spaces: spaces,
+        raws: raws
+      });
+    }
+    if (this.currToken && this.currToken[_tokenize.FIELDS.TYPE] === tokens.space) {
+      node.spaces.after = this.optionalSpace(this.content());
+      this.position++;
+    }
+    return this.newNode(node);
+  };
+  _proto.comma = function comma() {
+    if (this.position === this.tokens.length - 1) {
+      this.root.trailingComma = true;
+      this.position++;
+      return;
+    }
+    this.current._inferEndPosition();
+    var selector = new _selector["default"]({
+      source: {
+        start: tokenStart(this.tokens[this.position + 1])
+      },
+      sourceIndex: this.tokens[this.position + 1][_tokenize.FIELDS.START_POS]
+    });
+    this.current.parent.append(selector);
+    this.current = selector;
+    this.position++;
+  };
+  _proto.comment = function comment() {
+    var current = this.currToken;
+    this.newNode(new _comment["default"]({
+      value: this.content(),
+      source: getTokenSource(current),
+      sourceIndex: current[_tokenize.FIELDS.START_POS]
+    }));
+    this.position++;
+  };
+  _proto.error = function error(message, opts) {
+    throw this.root.error(message, opts);
+  };
+  _proto.missingBackslash = function missingBackslash() {
+    return this.error('Expected a backslash preceding the semicolon.', {
+      index: this.currToken[_tokenize.FIELDS.START_POS]
+    });
+  };
+  _proto.missingParenthesis = function missingParenthesis() {
+    return this.expected('opening parenthesis', this.currToken[_tokenize.FIELDS.START_POS]);
+  };
+  _proto.missingSquareBracket = function missingSquareBracket() {
+    return this.expected('opening square bracket', this.currToken[_tokenize.FIELDS.START_POS]);
+  };
+  _proto.unexpected = function unexpected() {
+    return this.error("Unexpected '" + this.content() + "'. Escaping special characters with \\ may help.", this.currToken[_tokenize.FIELDS.START_POS]);
+  };
+  _proto.unexpectedPipe = function unexpectedPipe() {
+    return this.error("Unexpected '|'.", this.currToken[_tokenize.FIELDS.START_POS]);
+  };
+  _proto.namespace = function namespace() {
+    var before = this.prevToken && this.content(this.prevToken) || true;
+    if (this.nextToken[_tokenize.FIELDS.TYPE] === tokens.word) {
+      this.position++;
+      return this.word(before);
+    } else if (this.nextToken[_tokenize.FIELDS.TYPE] === tokens.asterisk) {
+      this.position++;
+      return this.universal(before);
+    }
+    this.unexpectedPipe();
+  };
+  _proto.nesting = function nesting() {
+    if (this.nextToken) {
+      var nextContent = this.content(this.nextToken);
+      if (nextContent === "|") {
+        this.position++;
+        return;
+      }
+    }
+    var current = this.currToken;
+    this.newNode(new _nesting["default"]({
+      value: this.content(),
+      source: getTokenSource(current),
+      sourceIndex: current[_tokenize.FIELDS.START_POS]
+    }));
+    this.position++;
+  };
+  _proto.parentheses = function parentheses() {
+    var last = this.current.last;
+    var unbalanced = 1;
+    this.position++;
+    if (last && last.type === types.PSEUDO) {
+      var selector = new _selector["default"]({
+        source: {
+          start: tokenStart(this.tokens[this.position])
+        },
+        sourceIndex: this.tokens[this.position][_tokenize.FIELDS.START_POS]
+      });
+      var cache = this.current;
+      last.append(selector);
+      this.current = selector;
+      while (this.position < this.tokens.length && unbalanced) {
+        if (this.currToken[_tokenize.FIELDS.TYPE] === tokens.openParenthesis) {
+          unbalanced++;
+        }
+        if (this.currToken[_tokenize.FIELDS.TYPE] === tokens.closeParenthesis) {
+          unbalanced--;
+        }
+        if (unbalanced) {
+          this.parse();
+        } else {
+          this.current.source.end = tokenEnd(this.currToken);
+          this.current.parent.source.end = tokenEnd(this.currToken);
+          this.position++;
+        }
+      }
+      this.current = cache;
+    } else {
+      // I think this case should be an error. It's used to implement a basic parse of media queries
+      // but I don't think it's a good idea.
+      var parenStart = this.currToken;
+      var parenValue = "(";
+      var parenEnd;
+      while (this.position < this.tokens.length && unbalanced) {
+        if (this.currToken[_tokenize.FIELDS.TYPE] === tokens.openParenthesis) {
+          unbalanced++;
+        }
+        if (this.currToken[_tokenize.FIELDS.TYPE] === tokens.closeParenthesis) {
+          unbalanced--;
+        }
+        parenEnd = this.currToken;
+        parenValue += this.parseParenthesisToken(this.currToken);
+        this.position++;
+      }
+      if (last) {
+        last.appendToPropertyAndEscape("value", parenValue, parenValue);
+      } else {
+        this.newNode(new _string["default"]({
+          value: parenValue,
+          source: getSource(parenStart[_tokenize.FIELDS.START_LINE], parenStart[_tokenize.FIELDS.START_COL], parenEnd[_tokenize.FIELDS.END_LINE], parenEnd[_tokenize.FIELDS.END_COL]),
+          sourceIndex: parenStart[_tokenize.FIELDS.START_POS]
+        }));
+      }
+    }
+    if (unbalanced) {
+      return this.expected('closing parenthesis', this.currToken[_tokenize.FIELDS.START_POS]);
+    }
+  };
+  _proto.pseudo = function pseudo() {
+    var _this4 = this;
+    var pseudoStr = '';
+    var startingToken = this.currToken;
+    while (this.currToken && this.currToken[_tokenize.FIELDS.TYPE] === tokens.colon) {
+      pseudoStr += this.content();
+      this.position++;
+    }
+    if (!this.currToken) {
+      return this.expected(['pseudo-class', 'pseudo-element'], this.position - 1);
+    }
+    if (this.currToken[_tokenize.FIELDS.TYPE] === tokens.word) {
+      this.splitWord(false, function (first, length) {
+        pseudoStr += first;
+        _this4.newNode(new _pseudo["default"]({
+          value: pseudoStr,
+          source: getTokenSourceSpan(startingToken, _this4.currToken),
+          sourceIndex: startingToken[_tokenize.FIELDS.START_POS]
+        }));
+        if (length > 1 && _this4.nextToken && _this4.nextToken[_tokenize.FIELDS.TYPE] === tokens.openParenthesis) {
+          _this4.error('Misplaced parenthesis.', {
+            index: _this4.nextToken[_tokenize.FIELDS.START_POS]
+          });
+        }
+      });
+    } else {
+      return this.expected(['pseudo-class', 'pseudo-element'], this.currToken[_tokenize.FIELDS.START_POS]);
+    }
+  };
+  _proto.space = function space() {
+    var content = this.content();
+    // Handle space before and after the selector
+    if (this.position === 0 || this.prevToken[_tokenize.FIELDS.TYPE] === tokens.comma || this.prevToken[_tokenize.FIELDS.TYPE] === tokens.openParenthesis || this.current.nodes.every(function (node) {
+      return node.type === 'comment';
+    })) {
+      this.spaces = this.optionalSpace(content);
+      this.position++;
+    } else if (this.position === this.tokens.length - 1 || this.nextToken[_tokenize.FIELDS.TYPE] === tokens.comma || this.nextToken[_tokenize.FIELDS.TYPE] === tokens.closeParenthesis) {
+      this.current.last.spaces.after = this.optionalSpace(content);
+      this.position++;
+    } else {
+      this.combinator();
+    }
+  };
+  _proto.string = function string() {
+    var current = this.currToken;
+    this.newNode(new _string["default"]({
+      value: this.content(),
+      source: getTokenSource(current),
+      sourceIndex: current[_tokenize.FIELDS.START_POS]
+    }));
+    this.position++;
+  };
+  _proto.universal = function universal(namespace) {
+    var nextToken = this.nextToken;
+    if (nextToken && this.content(nextToken) === '|') {
+      this.position++;
+      return this.namespace();
+    }
+    var current = this.currToken;
+    this.newNode(new _universal["default"]({
+      value: this.content(),
+      source: getTokenSource(current),
+      sourceIndex: current[_tokenize.FIELDS.START_POS]
+    }), namespace);
+    this.position++;
+  };
+  _proto.splitWord = function splitWord(namespace, firstCallback) {
+    var _this5 = this;
+    var nextToken = this.nextToken;
+    var word = this.content();
+    while (nextToken && ~[tokens.dollar, tokens.caret, tokens.equals, tokens.word].indexOf(nextToken[_tokenize.FIELDS.TYPE])) {
+      this.position++;
+      var current = this.content();
+      word += current;
+      if (current.lastIndexOf('\\') === current.length - 1) {
+        var next = this.nextToken;
+        if (next && next[_tokenize.FIELDS.TYPE] === tokens.space) {
+          word += this.requiredSpace(this.content(next));
+          this.position++;
+        }
+      }
+      nextToken = this.nextToken;
+    }
+    var hasClass = indexesOf(word, '.').filter(function (i) {
+      // Allow escaped dot within class name
+      var escapedDot = word[i - 1] === '\\';
+      // Allow decimal numbers percent in @keyframes
+      var isKeyframesPercent = /^\d+\.\d+%$/.test(word);
+      return !escapedDot && !isKeyframesPercent;
+    });
+    var hasId = indexesOf(word, '#').filter(function (i) {
+      return word[i - 1] !== '\\';
+    });
+    // Eliminate Sass interpolations from the list of id indexes
+    var interpolations = indexesOf(word, '#{');
+    if (interpolations.length) {
+      hasId = hasId.filter(function (hashIndex) {
+        return !~interpolations.indexOf(hashIndex);
+      });
+    }
+    var indices = (0, _sortAscending["default"])(uniqs([0].concat(hasClass, hasId)));
+    indices.forEach(function (ind, i) {
+      var index = indices[i + 1] || word.length;
+      var value = word.slice(ind, index);
+      if (i === 0 && firstCallback) {
+        return firstCallback.call(_this5, value, indices.length);
+      }
+      var node;
+      var current = _this5.currToken;
+      var sourceIndex = current[_tokenize.FIELDS.START_POS] + indices[i];
+      var source = getSource(current[1], current[2] + ind, current[3], current[2] + (index - 1));
+      if (~hasClass.indexOf(ind)) {
+        var classNameOpts = {
+          value: value.slice(1),
+          source: source,
+          sourceIndex: sourceIndex
+        };
+        node = new _className["default"](unescapeProp(classNameOpts, "value"));
+      } else if (~hasId.indexOf(ind)) {
+        var idOpts = {
+          value: value.slice(1),
+          source: source,
+          sourceIndex: sourceIndex
+        };
+        node = new _id["default"](unescapeProp(idOpts, "value"));
+      } else {
+        var tagOpts = {
+          value: value,
+          source: source,
+          sourceIndex: sourceIndex
+        };
+        unescapeProp(tagOpts, "value");
+        node = new _tag["default"](tagOpts);
+      }
+      _this5.newNode(node, namespace);
+      // Ensure that the namespace is used only once
+      namespace = null;
+    });
+    this.position++;
+  };
+  _proto.word = function word(namespace) {
+    var nextToken = this.nextToken;
+    if (nextToken && this.content(nextToken) === '|') {
+      this.position++;
+      return this.namespace();
+    }
+    return this.splitWord(namespace);
+  };
+  _proto.loop = function loop() {
+    while (this.position < this.tokens.length) {
+      this.parse(true);
+    }
+    this.current._inferEndPosition();
+    return this.root;
+  };
+  _proto.parse = function parse(throwOnParenthesis) {
+    switch (this.currToken[_tokenize.FIELDS.TYPE]) {
+      case tokens.space:
+        this.space();
+        break;
+      case tokens.comment:
+        this.comment();
+        break;
+      case tokens.openParenthesis:
+        this.parentheses();
+        break;
+      case tokens.closeParenthesis:
+        if (throwOnParenthesis) {
+          this.missingParenthesis();
+        }
+        break;
+      case tokens.openSquare:
+        this.attribute();
+        break;
+      case tokens.dollar:
+      case tokens.caret:
+      case tokens.equals:
+      case tokens.word:
+        this.word();
+        break;
+      case tokens.colon:
+        this.pseudo();
+        break;
+      case tokens.comma:
+        this.comma();
+        break;
+      case tokens.asterisk:
+        this.universal();
+        break;
+      case tokens.ampersand:
+        this.nesting();
+        break;
+      case tokens.slash:
+      case tokens.combinator:
+        this.combinator();
+        break;
+      case tokens.str:
+        this.string();
+        break;
+      // These cases throw; no break needed.
+      case tokens.closeSquare:
+        this.missingSquareBracket();
+      case tokens.semicolon:
+        this.missingBackslash();
+      default:
+        this.unexpected();
+    }
+  }
+
+  /**
+   * Helpers
+   */;
+  _proto.expected = function expected(description, index, found) {
+    if (Array.isArray(description)) {
+      var last = description.pop();
+      description = description.join(', ') + " or " + last;
+    }
+    var an = /^[aeiou]/.test(description[0]) ? 'an' : 'a';
+    if (!found) {
+      return this.error("Expected " + an + " " + description + ".", {
+        index: index
+      });
+    }
+    return this.error("Expected " + an + " " + description + ", found \"" + found + "\" instead.", {
+      index: index
+    });
+  };
+  _proto.requiredSpace = function requiredSpace(space) {
+    return this.options.lossy ? ' ' : space;
+  };
+  _proto.optionalSpace = function optionalSpace(space) {
+    return this.options.lossy ? '' : space;
+  };
+  _proto.lossySpace = function lossySpace(space, required) {
+    if (this.options.lossy) {
+      return required ? ' ' : '';
+    } else {
+      return space;
+    }
+  };
+  _proto.parseParenthesisToken = function parseParenthesisToken(token) {
+    var content = this.content(token);
+    if (token[_tokenize.FIELDS.TYPE] === tokens.space) {
+      return this.requiredSpace(content);
+    } else {
+      return content;
+    }
+  };
+  _proto.newNode = function newNode(node, namespace) {
+    if (namespace) {
+      if (/^ +$/.test(namespace)) {
+        if (!this.options.lossy) {
+          this.spaces = (this.spaces || '') + namespace;
+        }
+        namespace = true;
+      }
+      node.namespace = namespace;
+      unescapeProp(node, "namespace");
+    }
+    if (this.spaces) {
+      node.spaces.before = this.spaces;
+      this.spaces = '';
+    }
+    return this.current.append(node);
+  };
+  _proto.content = function content(token) {
+    if (token === void 0) {
+      token = this.currToken;
+    }
+    return this.css.slice(token[_tokenize.FIELDS.START_POS], token[_tokenize.FIELDS.END_POS]);
+  };
+  /**
+   * returns the index of the next non-whitespace, non-comment token.
+   * returns -1 if no meaningful token is found.
+   */
+  _proto.locateNextMeaningfulToken = function locateNextMeaningfulToken(startPosition) {
+    if (startPosition === void 0) {
+      startPosition = this.position + 1;
+    }
+    var searchPosition = startPosition;
+    while (searchPosition < this.tokens.length) {
+      if (WHITESPACE_EQUIV_TOKENS[this.tokens[searchPosition][_tokenize.FIELDS.TYPE]]) {
+        searchPosition++;
+        continue;
+      } else {
+        return searchPosition;
+      }
+    }
+    return -1;
+  };
+  _createClass(Parser, [{
+    key: "currToken",
+    get: function get() {
+      return this.tokens[this.position];
+    }
+  }, {
+    key: "nextToken",
+    get: function get() {
+      return this.tokens[this.position + 1];
+    }
+  }, {
+    key: "prevToken",
+    get: function get() {
+      return this.tokens[this.position - 1];
+    }
+  }]);
+  return Parser;
+}();
+exports["default"] = Parser;
+module.exports = exports.default;
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/processor.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/processor.js
new file mode 100644
index 0000000000000000000000000000000000000000..dbfa09188e63dbf2894e8dba1a1de2cde6866464
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/processor.js
@@ -0,0 +1,170 @@
+"use strict";
+
+exports.__esModule = true;
+exports["default"] = void 0;
+var _parser = _interopRequireDefault(require("./parser"));
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
+var Processor = /*#__PURE__*/function () {
+  function Processor(func, options) {
+    this.func = func || function noop() {};
+    this.funcRes = null;
+    this.options = options;
+  }
+  var _proto = Processor.prototype;
+  _proto._shouldUpdateSelector = function _shouldUpdateSelector(rule, options) {
+    if (options === void 0) {
+      options = {};
+    }
+    var merged = Object.assign({}, this.options, options);
+    if (merged.updateSelector === false) {
+      return false;
+    } else {
+      return typeof rule !== "string";
+    }
+  };
+  _proto._isLossy = function _isLossy(options) {
+    if (options === void 0) {
+      options = {};
+    }
+    var merged = Object.assign({}, this.options, options);
+    if (merged.lossless === false) {
+      return true;
+    } else {
+      return false;
+    }
+  };
+  _proto._root = function _root(rule, options) {
+    if (options === void 0) {
+      options = {};
+    }
+    var parser = new _parser["default"](rule, this._parseOptions(options));
+    return parser.root;
+  };
+  _proto._parseOptions = function _parseOptions(options) {
+    return {
+      lossy: this._isLossy(options)
+    };
+  };
+  _proto._run = function _run(rule, options) {
+    var _this = this;
+    if (options === void 0) {
+      options = {};
+    }
+    return new Promise(function (resolve, reject) {
+      try {
+        var root = _this._root(rule, options);
+        Promise.resolve(_this.func(root)).then(function (transform) {
+          var string = undefined;
+          if (_this._shouldUpdateSelector(rule, options)) {
+            string = root.toString();
+            rule.selector = string;
+          }
+          return {
+            transform: transform,
+            root: root,
+            string: string
+          };
+        }).then(resolve, reject);
+      } catch (e) {
+        reject(e);
+        return;
+      }
+    });
+  };
+  _proto._runSync = function _runSync(rule, options) {
+    if (options === void 0) {
+      options = {};
+    }
+    var root = this._root(rule, options);
+    var transform = this.func(root);
+    if (transform && typeof transform.then === "function") {
+      throw new Error("Selector processor returned a promise to a synchronous call.");
+    }
+    var string = undefined;
+    if (options.updateSelector && typeof rule !== "string") {
+      string = root.toString();
+      rule.selector = string;
+    }
+    return {
+      transform: transform,
+      root: root,
+      string: string
+    };
+  }
+
+  /**
+   * Process rule into a selector AST.
+   *
+   * @param rule {postcss.Rule | string} The css selector to be processed
+   * @param options The options for processing
+   * @returns {Promise} The AST of the selector after processing it.
+   */;
+  _proto.ast = function ast(rule, options) {
+    return this._run(rule, options).then(function (result) {
+      return result.root;
+    });
+  }
+
+  /**
+   * Process rule into a selector AST synchronously.
+   *
+   * @param rule {postcss.Rule | string} The css selector to be processed
+   * @param options The options for processing
+   * @returns {parser.Root} The AST of the selector after processing it.
+   */;
+  _proto.astSync = function astSync(rule, options) {
+    return this._runSync(rule, options).root;
+  }
+
+  /**
+   * Process a selector into a transformed value asynchronously
+   *
+   * @param rule {postcss.Rule | string} The css selector to be processed
+   * @param options The options for processing
+   * @returns {Promise} The value returned by the processor.
+   */;
+  _proto.transform = function transform(rule, options) {
+    return this._run(rule, options).then(function (result) {
+      return result.transform;
+    });
+  }
+
+  /**
+   * Process a selector into a transformed value synchronously.
+   *
+   * @param rule {postcss.Rule | string} The css selector to be processed
+   * @param options The options for processing
+   * @returns {any} The value returned by the processor.
+   */;
+  _proto.transformSync = function transformSync(rule, options) {
+    return this._runSync(rule, options).transform;
+  }
+
+  /**
+   * Process a selector into a new selector string asynchronously.
+   *
+   * @param rule {postcss.Rule | string} The css selector to be processed
+   * @param options The options for processing
+   * @returns {string} the selector after processing.
+   */;
+  _proto.process = function process(rule, options) {
+    return this._run(rule, options).then(function (result) {
+      return result.string || result.root.toString();
+    });
+  }
+
+  /**
+   * Process a selector into a new selector string synchronously.
+   *
+   * @param rule {postcss.Rule | string} The css selector to be processed
+   * @param options The options for processing
+   * @returns {string} the selector after processing.
+   */;
+  _proto.processSync = function processSync(rule, options) {
+    var result = this._runSync(rule, options);
+    return result.string || result.root.toString();
+  };
+  return Processor;
+}();
+exports["default"] = Processor;
+module.exports = exports.default;
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/attribute.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/attribute.js
new file mode 100644
index 0000000000000000000000000000000000000000..0351a22bfa597caa3b991433f56b9b3a73de68d2
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/attribute.js
@@ -0,0 +1,448 @@
+"use strict";
+
+exports.__esModule = true;
+exports["default"] = void 0;
+exports.unescapeValue = unescapeValue;
+var _cssesc = _interopRequireDefault(require("cssesc"));
+var _unesc = _interopRequireDefault(require("../util/unesc"));
+var _namespace = _interopRequireDefault(require("./namespace"));
+var _types = require("./types");
+var _CSSESC_QUOTE_OPTIONS;
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
+function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); Object.defineProperty(Constructor, "prototype", { writable: false }); return Constructor; }
+function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); }
+function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf ? Object.setPrototypeOf.bind() : function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
+var deprecate = require("util-deprecate");
+var WRAPPED_IN_QUOTES = /^('|")([^]*)\1$/;
+var warnOfDeprecatedValueAssignment = deprecate(function () {}, "Assigning an attribute a value containing characters that might need to be escaped is deprecated. " + "Call attribute.setValue() instead.");
+var warnOfDeprecatedQuotedAssignment = deprecate(function () {}, "Assigning attr.quoted is deprecated and has no effect. Assign to attr.quoteMark instead.");
+var warnOfDeprecatedConstructor = deprecate(function () {}, "Constructing an Attribute selector with a value without specifying quoteMark is deprecated. Note: The value should be unescaped now.");
+function unescapeValue(value) {
+  var deprecatedUsage = false;
+  var quoteMark = null;
+  var unescaped = value;
+  var m = unescaped.match(WRAPPED_IN_QUOTES);
+  if (m) {
+    quoteMark = m[1];
+    unescaped = m[2];
+  }
+  unescaped = (0, _unesc["default"])(unescaped);
+  if (unescaped !== value) {
+    deprecatedUsage = true;
+  }
+  return {
+    deprecatedUsage: deprecatedUsage,
+    unescaped: unescaped,
+    quoteMark: quoteMark
+  };
+}
+function handleDeprecatedContructorOpts(opts) {
+  if (opts.quoteMark !== undefined) {
+    return opts;
+  }
+  if (opts.value === undefined) {
+    return opts;
+  }
+  warnOfDeprecatedConstructor();
+  var _unescapeValue = unescapeValue(opts.value),
+    quoteMark = _unescapeValue.quoteMark,
+    unescaped = _unescapeValue.unescaped;
+  if (!opts.raws) {
+    opts.raws = {};
+  }
+  if (opts.raws.value === undefined) {
+    opts.raws.value = opts.value;
+  }
+  opts.value = unescaped;
+  opts.quoteMark = quoteMark;
+  return opts;
+}
+var Attribute = /*#__PURE__*/function (_Namespace) {
+  _inheritsLoose(Attribute, _Namespace);
+  function Attribute(opts) {
+    var _this;
+    if (opts === void 0) {
+      opts = {};
+    }
+    _this = _Namespace.call(this, handleDeprecatedContructorOpts(opts)) || this;
+    _this.type = _types.ATTRIBUTE;
+    _this.raws = _this.raws || {};
+    Object.defineProperty(_this.raws, 'unquoted', {
+      get: deprecate(function () {
+        return _this.value;
+      }, "attr.raws.unquoted is deprecated. Call attr.value instead."),
+      set: deprecate(function () {
+        return _this.value;
+      }, "Setting attr.raws.unquoted is deprecated and has no effect. attr.value is unescaped by default now.")
+    });
+    _this._constructed = true;
+    return _this;
+  }
+
+  /**
+   * Returns the Attribute's value quoted such that it would be legal to use
+   * in the value of a css file. The original value's quotation setting
+   * used for stringification is left unchanged. See `setValue(value, options)`
+   * if you want to control the quote settings of a new value for the attribute.
+   *
+   * You can also change the quotation used for the current value by setting quoteMark.
+   *
+   * Options:
+   *   * quoteMark {'"' | "'" | null} - Use this value to quote the value. If this
+   *     option is not set, the original value for quoteMark will be used. If
+   *     indeterminate, a double quote is used. The legal values are:
+   *     * `null` - the value will be unquoted and characters will be escaped as necessary.
+   *     * `'` - the value will be quoted with a single quote and single quotes are escaped.
+   *     * `"` - the value will be quoted with a double quote and double quotes are escaped.
+   *   * preferCurrentQuoteMark {boolean} - if true, prefer the source quote mark
+   *     over the quoteMark option value.
+   *   * smart {boolean} - if true, will select a quote mark based on the value
+   *     and the other options specified here. See the `smartQuoteMark()`
+   *     method.
+   **/
+  var _proto = Attribute.prototype;
+  _proto.getQuotedValue = function getQuotedValue(options) {
+    if (options === void 0) {
+      options = {};
+    }
+    var quoteMark = this._determineQuoteMark(options);
+    var cssescopts = CSSESC_QUOTE_OPTIONS[quoteMark];
+    var escaped = (0, _cssesc["default"])(this._value, cssescopts);
+    return escaped;
+  };
+  _proto._determineQuoteMark = function _determineQuoteMark(options) {
+    return options.smart ? this.smartQuoteMark(options) : this.preferredQuoteMark(options);
+  }
+
+  /**
+   * Set the unescaped value with the specified quotation options. The value
+   * provided must not include any wrapping quote marks -- those quotes will
+   * be interpreted as part of the value and escaped accordingly.
+   */;
+  _proto.setValue = function setValue(value, options) {
+    if (options === void 0) {
+      options = {};
+    }
+    this._value = value;
+    this._quoteMark = this._determineQuoteMark(options);
+    this._syncRawValue();
+  }
+
+  /**
+   * Intelligently select a quoteMark value based on the value's contents. If
+   * the value is a legal CSS ident, it will not be quoted. Otherwise a quote
+   * mark will be picked that minimizes the number of escapes.
+   *
+   * If there's no clear winner, the quote mark from these options is used,
+   * then the source quote mark (this is inverted if `preferCurrentQuoteMark` is
+   * true). If the quoteMark is unspecified, a double quote is used.
+   *
+   * @param options This takes the quoteMark and preferCurrentQuoteMark options
+   * from the quoteValue method.
+   */;
+  _proto.smartQuoteMark = function smartQuoteMark(options) {
+    var v = this.value;
+    var numSingleQuotes = v.replace(/[^']/g, '').length;
+    var numDoubleQuotes = v.replace(/[^"]/g, '').length;
+    if (numSingleQuotes + numDoubleQuotes === 0) {
+      var escaped = (0, _cssesc["default"])(v, {
+        isIdentifier: true
+      });
+      if (escaped === v) {
+        return Attribute.NO_QUOTE;
+      } else {
+        var pref = this.preferredQuoteMark(options);
+        if (pref === Attribute.NO_QUOTE) {
+          // pick a quote mark that isn't none and see if it's smaller
+          var quote = this.quoteMark || options.quoteMark || Attribute.DOUBLE_QUOTE;
+          var opts = CSSESC_QUOTE_OPTIONS[quote];
+          var quoteValue = (0, _cssesc["default"])(v, opts);
+          if (quoteValue.length < escaped.length) {
+            return quote;
+          }
+        }
+        return pref;
+      }
+    } else if (numDoubleQuotes === numSingleQuotes) {
+      return this.preferredQuoteMark(options);
+    } else if (numDoubleQuotes < numSingleQuotes) {
+      return Attribute.DOUBLE_QUOTE;
+    } else {
+      return Attribute.SINGLE_QUOTE;
+    }
+  }
+
+  /**
+   * Selects the preferred quote mark based on the options and the current quote mark value.
+   * If you want the quote mark to depend on the attribute value, call `smartQuoteMark(opts)`
+   * instead.
+   */;
+  _proto.preferredQuoteMark = function preferredQuoteMark(options) {
+    var quoteMark = options.preferCurrentQuoteMark ? this.quoteMark : options.quoteMark;
+    if (quoteMark === undefined) {
+      quoteMark = options.preferCurrentQuoteMark ? options.quoteMark : this.quoteMark;
+    }
+    if (quoteMark === undefined) {
+      quoteMark = Attribute.DOUBLE_QUOTE;
+    }
+    return quoteMark;
+  };
+  _proto._syncRawValue = function _syncRawValue() {
+    var rawValue = (0, _cssesc["default"])(this._value, CSSESC_QUOTE_OPTIONS[this.quoteMark]);
+    if (rawValue === this._value) {
+      if (this.raws) {
+        delete this.raws.value;
+      }
+    } else {
+      this.raws.value = rawValue;
+    }
+  };
+  _proto._handleEscapes = function _handleEscapes(prop, value) {
+    if (this._constructed) {
+      var escaped = (0, _cssesc["default"])(value, {
+        isIdentifier: true
+      });
+      if (escaped !== value) {
+        this.raws[prop] = escaped;
+      } else {
+        delete this.raws[prop];
+      }
+    }
+  };
+  _proto._spacesFor = function _spacesFor(name) {
+    var attrSpaces = {
+      before: '',
+      after: ''
+    };
+    var spaces = this.spaces[name] || {};
+    var rawSpaces = this.raws.spaces && this.raws.spaces[name] || {};
+    return Object.assign(attrSpaces, spaces, rawSpaces);
+  };
+  _proto._stringFor = function _stringFor(name, spaceName, concat) {
+    if (spaceName === void 0) {
+      spaceName = name;
+    }
+    if (concat === void 0) {
+      concat = defaultAttrConcat;
+    }
+    var attrSpaces = this._spacesFor(spaceName);
+    return concat(this.stringifyProperty(name), attrSpaces);
+  }
+
+  /**
+   * returns the offset of the attribute part specified relative to the
+   * start of the node of the output string.
+   *
+   * * "ns" - alias for "namespace"
+   * * "namespace" - the namespace if it exists.
+   * * "attribute" - the attribute name
+   * * "attributeNS" - the start of the attribute or its namespace
+   * * "operator" - the match operator of the attribute
+   * * "value" - The value (string or identifier)
+   * * "insensitive" - the case insensitivity flag;
+   * @param part One of the possible values inside an attribute.
+   * @returns -1 if the name is invalid or the value doesn't exist in this attribute.
+   */;
+  _proto.offsetOf = function offsetOf(name) {
+    var count = 1;
+    var attributeSpaces = this._spacesFor("attribute");
+    count += attributeSpaces.before.length;
+    if (name === "namespace" || name === "ns") {
+      return this.namespace ? count : -1;
+    }
+    if (name === "attributeNS") {
+      return count;
+    }
+    count += this.namespaceString.length;
+    if (this.namespace) {
+      count += 1;
+    }
+    if (name === "attribute") {
+      return count;
+    }
+    count += this.stringifyProperty("attribute").length;
+    count += attributeSpaces.after.length;
+    var operatorSpaces = this._spacesFor("operator");
+    count += operatorSpaces.before.length;
+    var operator = this.stringifyProperty("operator");
+    if (name === "operator") {
+      return operator ? count : -1;
+    }
+    count += operator.length;
+    count += operatorSpaces.after.length;
+    var valueSpaces = this._spacesFor("value");
+    count += valueSpaces.before.length;
+    var value = this.stringifyProperty("value");
+    if (name === "value") {
+      return value ? count : -1;
+    }
+    count += value.length;
+    count += valueSpaces.after.length;
+    var insensitiveSpaces = this._spacesFor("insensitive");
+    count += insensitiveSpaces.before.length;
+    if (name === "insensitive") {
+      return this.insensitive ? count : -1;
+    }
+    return -1;
+  };
+  _proto.toString = function toString() {
+    var _this2 = this;
+    var selector = [this.rawSpaceBefore, '['];
+    selector.push(this._stringFor('qualifiedAttribute', 'attribute'));
+    if (this.operator && (this.value || this.value === '')) {
+      selector.push(this._stringFor('operator'));
+      selector.push(this._stringFor('value'));
+      selector.push(this._stringFor('insensitiveFlag', 'insensitive', function (attrValue, attrSpaces) {
+        if (attrValue.length > 0 && !_this2.quoted && attrSpaces.before.length === 0 && !(_this2.spaces.value && _this2.spaces.value.after)) {
+          attrSpaces.before = " ";
+        }
+        return defaultAttrConcat(attrValue, attrSpaces);
+      }));
+    }
+    selector.push(']');
+    selector.push(this.rawSpaceAfter);
+    return selector.join('');
+  };
+  _createClass(Attribute, [{
+    key: "quoted",
+    get: function get() {
+      var qm = this.quoteMark;
+      return qm === "'" || qm === '"';
+    },
+    set: function set(value) {
+      warnOfDeprecatedQuotedAssignment();
+    }
+
+    /**
+     * returns a single (`'`) or double (`"`) quote character if the value is quoted.
+     * returns `null` if the value is not quoted.
+     * returns `undefined` if the quotation state is unknown (this can happen when
+     * the attribute is constructed without specifying a quote mark.)
+     */
+  }, {
+    key: "quoteMark",
+    get: function get() {
+      return this._quoteMark;
+    }
+
+    /**
+     * Set the quote mark to be used by this attribute's value.
+     * If the quote mark changes, the raw (escaped) value at `attr.raws.value` of the attribute
+     * value is updated accordingly.
+     *
+     * @param {"'" | '"' | null} quoteMark The quote mark or `null` if the value should be unquoted.
+     */,
+    set: function set(quoteMark) {
+      if (!this._constructed) {
+        this._quoteMark = quoteMark;
+        return;
+      }
+      if (this._quoteMark !== quoteMark) {
+        this._quoteMark = quoteMark;
+        this._syncRawValue();
+      }
+    }
+  }, {
+    key: "qualifiedAttribute",
+    get: function get() {
+      return this.qualifiedName(this.raws.attribute || this.attribute);
+    }
+  }, {
+    key: "insensitiveFlag",
+    get: function get() {
+      return this.insensitive ? 'i' : '';
+    }
+  }, {
+    key: "value",
+    get: function get() {
+      return this._value;
+    },
+    set:
+    /**
+     * Before 3.0, the value had to be set to an escaped value including any wrapped
+     * quote marks. In 3.0, the semantics of `Attribute.value` changed so that the value
+     * is unescaped during parsing and any quote marks are removed.
+     *
+     * Because the ambiguity of this semantic change, if you set `attr.value = newValue`,
+     * a deprecation warning is raised when the new value contains any characters that would
+     * require escaping (including if it contains wrapped quotes).
+     *
+     * Instead, you should call `attr.setValue(newValue, opts)` and pass options that describe
+     * how the new value is quoted.
+     */
+    function set(v) {
+      if (this._constructed) {
+        var _unescapeValue2 = unescapeValue(v),
+          deprecatedUsage = _unescapeValue2.deprecatedUsage,
+          unescaped = _unescapeValue2.unescaped,
+          quoteMark = _unescapeValue2.quoteMark;
+        if (deprecatedUsage) {
+          warnOfDeprecatedValueAssignment();
+        }
+        if (unescaped === this._value && quoteMark === this._quoteMark) {
+          return;
+        }
+        this._value = unescaped;
+        this._quoteMark = quoteMark;
+        this._syncRawValue();
+      } else {
+        this._value = v;
+      }
+    }
+  }, {
+    key: "insensitive",
+    get: function get() {
+      return this._insensitive;
+    }
+
+    /**
+     * Set the case insensitive flag.
+     * If the case insensitive flag changes, the raw (escaped) value at `attr.raws.insensitiveFlag`
+     * of the attribute is updated accordingly.
+     *
+     * @param {true | false} insensitive true if the attribute should match case-insensitively.
+     */,
+    set: function set(insensitive) {
+      if (!insensitive) {
+        this._insensitive = false;
+
+        // "i" and "I" can be used in "this.raws.insensitiveFlag" to store the original notation.
+        // When setting `attr.insensitive = false` both should be erased to ensure correct serialization.
+        if (this.raws && (this.raws.insensitiveFlag === 'I' || this.raws.insensitiveFlag === 'i')) {
+          this.raws.insensitiveFlag = undefined;
+        }
+      }
+      this._insensitive = insensitive;
+    }
+  }, {
+    key: "attribute",
+    get: function get() {
+      return this._attribute;
+    },
+    set: function set(name) {
+      this._handleEscapes("attribute", name);
+      this._attribute = name;
+    }
+  }]);
+  return Attribute;
+}(_namespace["default"]);
+exports["default"] = Attribute;
+Attribute.NO_QUOTE = null;
+Attribute.SINGLE_QUOTE = "'";
+Attribute.DOUBLE_QUOTE = '"';
+var CSSESC_QUOTE_OPTIONS = (_CSSESC_QUOTE_OPTIONS = {
+  "'": {
+    quotes: 'single',
+    wrap: true
+  },
+  '"': {
+    quotes: 'double',
+    wrap: true
+  }
+}, _CSSESC_QUOTE_OPTIONS[null] = {
+  isIdentifier: true
+}, _CSSESC_QUOTE_OPTIONS);
+function defaultAttrConcat(attrValue, attrSpaces) {
+  return "" + attrSpaces.before + attrValue + attrSpaces.after;
+}
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/className.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/className.js
new file mode 100644
index 0000000000000000000000000000000000000000..af325977c1146d903cf94aad6836ed7e31fe1094
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/className.js
@@ -0,0 +1,50 @@
+"use strict";
+
+exports.__esModule = true;
+exports["default"] = void 0;
+var _cssesc = _interopRequireDefault(require("cssesc"));
+var _util = require("../util");
+var _node = _interopRequireDefault(require("./node"));
+var _types = require("./types");
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
+function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); Object.defineProperty(Constructor, "prototype", { writable: false }); return Constructor; }
+function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); }
+function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf ? Object.setPrototypeOf.bind() : function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
+var ClassName = /*#__PURE__*/function (_Node) {
+  _inheritsLoose(ClassName, _Node);
+  function ClassName(opts) {
+    var _this;
+    _this = _Node.call(this, opts) || this;
+    _this.type = _types.CLASS;
+    _this._constructed = true;
+    return _this;
+  }
+  var _proto = ClassName.prototype;
+  _proto.valueToString = function valueToString() {
+    return '.' + _Node.prototype.valueToString.call(this);
+  };
+  _createClass(ClassName, [{
+    key: "value",
+    get: function get() {
+      return this._value;
+    },
+    set: function set(v) {
+      if (this._constructed) {
+        var escaped = (0, _cssesc["default"])(v, {
+          isIdentifier: true
+        });
+        if (escaped !== v) {
+          (0, _util.ensureObject)(this, "raws");
+          this.raws.value = escaped;
+        } else if (this.raws) {
+          delete this.raws.value;
+        }
+      }
+      this._value = v;
+    }
+  }]);
+  return ClassName;
+}(_node["default"]);
+exports["default"] = ClassName;
+module.exports = exports.default;
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/combinator.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/combinator.js
new file mode 100644
index 0000000000000000000000000000000000000000..c6449f43cfddb72c7ffc21ce050480da8efe2742
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/combinator.js
@@ -0,0 +1,21 @@
+"use strict";
+
+exports.__esModule = true;
+exports["default"] = void 0;
+var _node = _interopRequireDefault(require("./node"));
+var _types = require("./types");
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
+function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); }
+function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf ? Object.setPrototypeOf.bind() : function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
+var Combinator = /*#__PURE__*/function (_Node) {
+  _inheritsLoose(Combinator, _Node);
+  function Combinator(opts) {
+    var _this;
+    _this = _Node.call(this, opts) || this;
+    _this.type = _types.COMBINATOR;
+    return _this;
+  }
+  return Combinator;
+}(_node["default"]);
+exports["default"] = Combinator;
+module.exports = exports.default;
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/comment.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/comment.js
new file mode 100644
index 0000000000000000000000000000000000000000..1709d5be925d6f7ab9d65a2de6d0eb73655093ce
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/comment.js
@@ -0,0 +1,21 @@
+"use strict";
+
+exports.__esModule = true;
+exports["default"] = void 0;
+var _node = _interopRequireDefault(require("./node"));
+var _types = require("./types");
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
+function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); }
+function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf ? Object.setPrototypeOf.bind() : function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
+var Comment = /*#__PURE__*/function (_Node) {
+  _inheritsLoose(Comment, _Node);
+  function Comment(opts) {
+    var _this;
+    _this = _Node.call(this, opts) || this;
+    _this.type = _types.COMMENT;
+    return _this;
+  }
+  return Comment;
+}(_node["default"]);
+exports["default"] = Comment;
+module.exports = exports.default;
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/constructors.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/constructors.js
new file mode 100644
index 0000000000000000000000000000000000000000..688259324cdd292e4c2351c18ded65ab440a57a1
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/constructors.js
@@ -0,0 +1,65 @@
+"use strict";
+
+exports.__esModule = true;
+exports.universal = exports.tag = exports.string = exports.selector = exports.root = exports.pseudo = exports.nesting = exports.id = exports.comment = exports.combinator = exports.className = exports.attribute = void 0;
+var _attribute = _interopRequireDefault(require("./attribute"));
+var _className = _interopRequireDefault(require("./className"));
+var _combinator = _interopRequireDefault(require("./combinator"));
+var _comment = _interopRequireDefault(require("./comment"));
+var _id = _interopRequireDefault(require("./id"));
+var _nesting = _interopRequireDefault(require("./nesting"));
+var _pseudo = _interopRequireDefault(require("./pseudo"));
+var _root = _interopRequireDefault(require("./root"));
+var _selector = _interopRequireDefault(require("./selector"));
+var _string = _interopRequireDefault(require("./string"));
+var _tag = _interopRequireDefault(require("./tag"));
+var _universal = _interopRequireDefault(require("./universal"));
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
+var attribute = function attribute(opts) {
+  return new _attribute["default"](opts);
+};
+exports.attribute = attribute;
+var className = function className(opts) {
+  return new _className["default"](opts);
+};
+exports.className = className;
+var combinator = function combinator(opts) {
+  return new _combinator["default"](opts);
+};
+exports.combinator = combinator;
+var comment = function comment(opts) {
+  return new _comment["default"](opts);
+};
+exports.comment = comment;
+var id = function id(opts) {
+  return new _id["default"](opts);
+};
+exports.id = id;
+var nesting = function nesting(opts) {
+  return new _nesting["default"](opts);
+};
+exports.nesting = nesting;
+var pseudo = function pseudo(opts) {
+  return new _pseudo["default"](opts);
+};
+exports.pseudo = pseudo;
+var root = function root(opts) {
+  return new _root["default"](opts);
+};
+exports.root = root;
+var selector = function selector(opts) {
+  return new _selector["default"](opts);
+};
+exports.selector = selector;
+var string = function string(opts) {
+  return new _string["default"](opts);
+};
+exports.string = string;
+var tag = function tag(opts) {
+  return new _tag["default"](opts);
+};
+exports.tag = tag;
+var universal = function universal(opts) {
+  return new _universal["default"](opts);
+};
+exports.universal = universal;
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/container.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/container.js
new file mode 100644
index 0000000000000000000000000000000000000000..84755cbd541a2b3196e249b0aeff0d12c6ed7c42
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/container.js
@@ -0,0 +1,321 @@
+"use strict";
+
+exports.__esModule = true;
+exports["default"] = void 0;
+var _node = _interopRequireDefault(require("./node"));
+var types = _interopRequireWildcard(require("./types"));
+function _getRequireWildcardCache(nodeInterop) { if (typeof WeakMap !== "function") return null; var cacheBabelInterop = new WeakMap(); var cacheNodeInterop = new WeakMap(); return (_getRequireWildcardCache = function _getRequireWildcardCache(nodeInterop) { return nodeInterop ? cacheNodeInterop : cacheBabelInterop; })(nodeInterop); }
+function _interopRequireWildcard(obj, nodeInterop) { if (!nodeInterop && obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { "default": obj }; } var cache = _getRequireWildcardCache(nodeInterop); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj["default"] = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
+function _createForOfIteratorHelperLoose(o, allowArrayLike) { var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"]; if (it) return (it = it.call(o)).next.bind(it); if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; return function () { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); }
+function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
+function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; }
+function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); Object.defineProperty(Constructor, "prototype", { writable: false }); return Constructor; }
+function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); }
+function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf ? Object.setPrototypeOf.bind() : function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
+var Container = /*#__PURE__*/function (_Node) {
+  _inheritsLoose(Container, _Node);
+  function Container(opts) {
+    var _this;
+    _this = _Node.call(this, opts) || this;
+    if (!_this.nodes) {
+      _this.nodes = [];
+    }
+    return _this;
+  }
+  var _proto = Container.prototype;
+  _proto.append = function append(selector) {
+    selector.parent = this;
+    this.nodes.push(selector);
+    return this;
+  };
+  _proto.prepend = function prepend(selector) {
+    selector.parent = this;
+    this.nodes.unshift(selector);
+    for (var id in this.indexes) {
+      this.indexes[id]++;
+    }
+    return this;
+  };
+  _proto.at = function at(index) {
+    return this.nodes[index];
+  };
+  _proto.index = function index(child) {
+    if (typeof child === 'number') {
+      return child;
+    }
+    return this.nodes.indexOf(child);
+  };
+  _proto.removeChild = function removeChild(child) {
+    child = this.index(child);
+    this.at(child).parent = undefined;
+    this.nodes.splice(child, 1);
+    var index;
+    for (var id in this.indexes) {
+      index = this.indexes[id];
+      if (index >= child) {
+        this.indexes[id] = index - 1;
+      }
+    }
+    return this;
+  };
+  _proto.removeAll = function removeAll() {
+    for (var _iterator = _createForOfIteratorHelperLoose(this.nodes), _step; !(_step = _iterator()).done;) {
+      var node = _step.value;
+      node.parent = undefined;
+    }
+    this.nodes = [];
+    return this;
+  };
+  _proto.empty = function empty() {
+    return this.removeAll();
+  };
+  _proto.insertAfter = function insertAfter(oldNode, newNode) {
+    var _this$nodes;
+    newNode.parent = this;
+    var oldIndex = this.index(oldNode);
+    var resetNode = [];
+    for (var i = 2; i < arguments.length; i++) {
+      resetNode.push(arguments[i]);
+    }
+    (_this$nodes = this.nodes).splice.apply(_this$nodes, [oldIndex + 1, 0, newNode].concat(resetNode));
+    newNode.parent = this;
+    var index;
+    for (var id in this.indexes) {
+      index = this.indexes[id];
+      if (oldIndex < index) {
+        this.indexes[id] = index + arguments.length - 1;
+      }
+    }
+    return this;
+  };
+  _proto.insertBefore = function insertBefore(oldNode, newNode) {
+    var _this$nodes2;
+    newNode.parent = this;
+    var oldIndex = this.index(oldNode);
+    var resetNode = [];
+    for (var i = 2; i < arguments.length; i++) {
+      resetNode.push(arguments[i]);
+    }
+    (_this$nodes2 = this.nodes).splice.apply(_this$nodes2, [oldIndex, 0, newNode].concat(resetNode));
+    newNode.parent = this;
+    var index;
+    for (var id in this.indexes) {
+      index = this.indexes[id];
+      if (index >= oldIndex) {
+        this.indexes[id] = index + arguments.length - 1;
+      }
+    }
+    return this;
+  };
+  _proto._findChildAtPosition = function _findChildAtPosition(line, col) {
+    var found = undefined;
+    this.each(function (node) {
+      if (node.atPosition) {
+        var foundChild = node.atPosition(line, col);
+        if (foundChild) {
+          found = foundChild;
+          return false;
+        }
+      } else if (node.isAtPosition(line, col)) {
+        found = node;
+        return false;
+      }
+    });
+    return found;
+  }
+
+  /**
+   * Return the most specific node at the line and column number given.
+   * The source location is based on the original parsed location, locations aren't
+   * updated as selector nodes are mutated.
+   *
+   * Note that this location is relative to the location of the first character
+   * of the selector, and not the location of the selector in the overall document
+   * when used in conjunction with postcss.
+   *
+   * If not found, returns undefined.
+   * @param {number} line The line number of the node to find. (1-based index)
+   * @param {number} col  The column number of the node to find. (1-based index)
+   */;
+  _proto.atPosition = function atPosition(line, col) {
+    if (this.isAtPosition(line, col)) {
+      return this._findChildAtPosition(line, col) || this;
+    } else {
+      return undefined;
+    }
+  };
+  _proto._inferEndPosition = function _inferEndPosition() {
+    if (this.last && this.last.source && this.last.source.end) {
+      this.source = this.source || {};
+      this.source.end = this.source.end || {};
+      Object.assign(this.source.end, this.last.source.end);
+    }
+  };
+  _proto.each = function each(callback) {
+    if (!this.lastEach) {
+      this.lastEach = 0;
+    }
+    if (!this.indexes) {
+      this.indexes = {};
+    }
+    this.lastEach++;
+    var id = this.lastEach;
+    this.indexes[id] = 0;
+    if (!this.length) {
+      return undefined;
+    }
+    var index, result;
+    while (this.indexes[id] < this.length) {
+      index = this.indexes[id];
+      result = callback(this.at(index), index);
+      if (result === false) {
+        break;
+      }
+      this.indexes[id] += 1;
+    }
+    delete this.indexes[id];
+    if (result === false) {
+      return false;
+    }
+  };
+  _proto.walk = function walk(callback) {
+    return this.each(function (node, i) {
+      var result = callback(node, i);
+      if (result !== false && node.length) {
+        result = node.walk(callback);
+      }
+      if (result === false) {
+        return false;
+      }
+    });
+  };
+  _proto.walkAttributes = function walkAttributes(callback) {
+    var _this2 = this;
+    return this.walk(function (selector) {
+      if (selector.type === types.ATTRIBUTE) {
+        return callback.call(_this2, selector);
+      }
+    });
+  };
+  _proto.walkClasses = function walkClasses(callback) {
+    var _this3 = this;
+    return this.walk(function (selector) {
+      if (selector.type === types.CLASS) {
+        return callback.call(_this3, selector);
+      }
+    });
+  };
+  _proto.walkCombinators = function walkCombinators(callback) {
+    var _this4 = this;
+    return this.walk(function (selector) {
+      if (selector.type === types.COMBINATOR) {
+        return callback.call(_this4, selector);
+      }
+    });
+  };
+  _proto.walkComments = function walkComments(callback) {
+    var _this5 = this;
+    return this.walk(function (selector) {
+      if (selector.type === types.COMMENT) {
+        return callback.call(_this5, selector);
+      }
+    });
+  };
+  _proto.walkIds = function walkIds(callback) {
+    var _this6 = this;
+    return this.walk(function (selector) {
+      if (selector.type === types.ID) {
+        return callback.call(_this6, selector);
+      }
+    });
+  };
+  _proto.walkNesting = function walkNesting(callback) {
+    var _this7 = this;
+    return this.walk(function (selector) {
+      if (selector.type === types.NESTING) {
+        return callback.call(_this7, selector);
+      }
+    });
+  };
+  _proto.walkPseudos = function walkPseudos(callback) {
+    var _this8 = this;
+    return this.walk(function (selector) {
+      if (selector.type === types.PSEUDO) {
+        return callback.call(_this8, selector);
+      }
+    });
+  };
+  _proto.walkTags = function walkTags(callback) {
+    var _this9 = this;
+    return this.walk(function (selector) {
+      if (selector.type === types.TAG) {
+        return callback.call(_this9, selector);
+      }
+    });
+  };
+  _proto.walkUniversals = function walkUniversals(callback) {
+    var _this10 = this;
+    return this.walk(function (selector) {
+      if (selector.type === types.UNIVERSAL) {
+        return callback.call(_this10, selector);
+      }
+    });
+  };
+  _proto.split = function split(callback) {
+    var _this11 = this;
+    var current = [];
+    return this.reduce(function (memo, node, index) {
+      var split = callback.call(_this11, node);
+      current.push(node);
+      if (split) {
+        memo.push(current);
+        current = [];
+      } else if (index === _this11.length - 1) {
+        memo.push(current);
+      }
+      return memo;
+    }, []);
+  };
+  _proto.map = function map(callback) {
+    return this.nodes.map(callback);
+  };
+  _proto.reduce = function reduce(callback, memo) {
+    return this.nodes.reduce(callback, memo);
+  };
+  _proto.every = function every(callback) {
+    return this.nodes.every(callback);
+  };
+  _proto.some = function some(callback) {
+    return this.nodes.some(callback);
+  };
+  _proto.filter = function filter(callback) {
+    return this.nodes.filter(callback);
+  };
+  _proto.sort = function sort(callback) {
+    return this.nodes.sort(callback);
+  };
+  _proto.toString = function toString() {
+    return this.map(String).join('');
+  };
+  _createClass(Container, [{
+    key: "first",
+    get: function get() {
+      return this.at(0);
+    }
+  }, {
+    key: "last",
+    get: function get() {
+      return this.at(this.length - 1);
+    }
+  }, {
+    key: "length",
+    get: function get() {
+      return this.nodes.length;
+    }
+  }]);
+  return Container;
+}(_node["default"]);
+exports["default"] = Container;
+module.exports = exports.default;
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/guards.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/guards.js
new file mode 100644
index 0000000000000000000000000000000000000000..f06161e97cb2655b5dbb6fb17cd106639aba6700
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/guards.js
@@ -0,0 +1,58 @@
+"use strict";
+
+exports.__esModule = true;
+exports.isComment = exports.isCombinator = exports.isClassName = exports.isAttribute = void 0;
+exports.isContainer = isContainer;
+exports.isIdentifier = void 0;
+exports.isNamespace = isNamespace;
+exports.isNesting = void 0;
+exports.isNode = isNode;
+exports.isPseudo = void 0;
+exports.isPseudoClass = isPseudoClass;
+exports.isPseudoElement = isPseudoElement;
+exports.isUniversal = exports.isTag = exports.isString = exports.isSelector = exports.isRoot = void 0;
+var _types = require("./types");
+var _IS_TYPE;
+var IS_TYPE = (_IS_TYPE = {}, _IS_TYPE[_types.ATTRIBUTE] = true, _IS_TYPE[_types.CLASS] = true, _IS_TYPE[_types.COMBINATOR] = true, _IS_TYPE[_types.COMMENT] = true, _IS_TYPE[_types.ID] = true, _IS_TYPE[_types.NESTING] = true, _IS_TYPE[_types.PSEUDO] = true, _IS_TYPE[_types.ROOT] = true, _IS_TYPE[_types.SELECTOR] = true, _IS_TYPE[_types.STRING] = true, _IS_TYPE[_types.TAG] = true, _IS_TYPE[_types.UNIVERSAL] = true, _IS_TYPE);
+function isNode(node) {
+  return typeof node === "object" && IS_TYPE[node.type];
+}
+function isNodeType(type, node) {
+  return isNode(node) && node.type === type;
+}
+var isAttribute = isNodeType.bind(null, _types.ATTRIBUTE);
+exports.isAttribute = isAttribute;
+var isClassName = isNodeType.bind(null, _types.CLASS);
+exports.isClassName = isClassName;
+var isCombinator = isNodeType.bind(null, _types.COMBINATOR);
+exports.isCombinator = isCombinator;
+var isComment = isNodeType.bind(null, _types.COMMENT);
+exports.isComment = isComment;
+var isIdentifier = isNodeType.bind(null, _types.ID);
+exports.isIdentifier = isIdentifier;
+var isNesting = isNodeType.bind(null, _types.NESTING);
+exports.isNesting = isNesting;
+var isPseudo = isNodeType.bind(null, _types.PSEUDO);
+exports.isPseudo = isPseudo;
+var isRoot = isNodeType.bind(null, _types.ROOT);
+exports.isRoot = isRoot;
+var isSelector = isNodeType.bind(null, _types.SELECTOR);
+exports.isSelector = isSelector;
+var isString = isNodeType.bind(null, _types.STRING);
+exports.isString = isString;
+var isTag = isNodeType.bind(null, _types.TAG);
+exports.isTag = isTag;
+var isUniversal = isNodeType.bind(null, _types.UNIVERSAL);
+exports.isUniversal = isUniversal;
+function isPseudoElement(node) {
+  return isPseudo(node) && node.value && (node.value.startsWith("::") || node.value.toLowerCase() === ":before" || node.value.toLowerCase() === ":after" || node.value.toLowerCase() === ":first-letter" || node.value.toLowerCase() === ":first-line");
+}
+function isPseudoClass(node) {
+  return isPseudo(node) && !isPseudoElement(node);
+}
+function isContainer(node) {
+  return !!(isNode(node) && node.walk);
+}
+function isNamespace(node) {
+  return isAttribute(node) || isTag(node);
+}
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/id.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/id.js
new file mode 100644
index 0000000000000000000000000000000000000000..8baef72860c9b6f0b32815d256e5198dca8beeb3
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/id.js
@@ -0,0 +1,25 @@
+"use strict";
+
+exports.__esModule = true;
+exports["default"] = void 0;
+var _node = _interopRequireDefault(require("./node"));
+var _types = require("./types");
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
+function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); }
+function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf ? Object.setPrototypeOf.bind() : function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
+var ID = /*#__PURE__*/function (_Node) {
+  _inheritsLoose(ID, _Node);
+  function ID(opts) {
+    var _this;
+    _this = _Node.call(this, opts) || this;
+    _this.type = _types.ID;
+    return _this;
+  }
+  var _proto = ID.prototype;
+  _proto.valueToString = function valueToString() {
+    return '#' + _Node.prototype.valueToString.call(this);
+  };
+  return ID;
+}(_node["default"]);
+exports["default"] = ID;
+module.exports = exports.default;
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/index.js
new file mode 100644
index 0000000000000000000000000000000000000000..f1f6b7f5e63ca306567f9f64319e84f8794fc9b4
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/index.js
@@ -0,0 +1,21 @@
+"use strict";
+
+exports.__esModule = true;
+var _types = require("./types");
+Object.keys(_types).forEach(function (key) {
+  if (key === "default" || key === "__esModule") return;
+  if (key in exports && exports[key] === _types[key]) return;
+  exports[key] = _types[key];
+});
+var _constructors = require("./constructors");
+Object.keys(_constructors).forEach(function (key) {
+  if (key === "default" || key === "__esModule") return;
+  if (key in exports && exports[key] === _constructors[key]) return;
+  exports[key] = _constructors[key];
+});
+var _guards = require("./guards");
+Object.keys(_guards).forEach(function (key) {
+  if (key === "default" || key === "__esModule") return;
+  if (key in exports && exports[key] === _guards[key]) return;
+  exports[key] = _guards[key];
+});
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/namespace.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/namespace.js
new file mode 100644
index 0000000000000000000000000000000000000000..cc97647bd174b58a5e8c1f3fbc02f70412355195
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/namespace.js
@@ -0,0 +1,80 @@
+"use strict";
+
+exports.__esModule = true;
+exports["default"] = void 0;
+var _cssesc = _interopRequireDefault(require("cssesc"));
+var _util = require("../util");
+var _node = _interopRequireDefault(require("./node"));
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
+function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); Object.defineProperty(Constructor, "prototype", { writable: false }); return Constructor; }
+function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); }
+function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf ? Object.setPrototypeOf.bind() : function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
+var Namespace = /*#__PURE__*/function (_Node) {
+  _inheritsLoose(Namespace, _Node);
+  function Namespace() {
+    return _Node.apply(this, arguments) || this;
+  }
+  var _proto = Namespace.prototype;
+  _proto.qualifiedName = function qualifiedName(value) {
+    if (this.namespace) {
+      return this.namespaceString + "|" + value;
+    } else {
+      return value;
+    }
+  };
+  _proto.valueToString = function valueToString() {
+    return this.qualifiedName(_Node.prototype.valueToString.call(this));
+  };
+  _createClass(Namespace, [{
+    key: "namespace",
+    get: function get() {
+      return this._namespace;
+    },
+    set: function set(namespace) {
+      if (namespace === true || namespace === "*" || namespace === "&") {
+        this._namespace = namespace;
+        if (this.raws) {
+          delete this.raws.namespace;
+        }
+        return;
+      }
+      var escaped = (0, _cssesc["default"])(namespace, {
+        isIdentifier: true
+      });
+      this._namespace = namespace;
+      if (escaped !== namespace) {
+        (0, _util.ensureObject)(this, "raws");
+        this.raws.namespace = escaped;
+      } else if (this.raws) {
+        delete this.raws.namespace;
+      }
+    }
+  }, {
+    key: "ns",
+    get: function get() {
+      return this._namespace;
+    },
+    set: function set(namespace) {
+      this.namespace = namespace;
+    }
+  }, {
+    key: "namespaceString",
+    get: function get() {
+      if (this.namespace) {
+        var ns = this.stringifyProperty("namespace");
+        if (ns === true) {
+          return '';
+        } else {
+          return ns;
+        }
+      } else {
+        return '';
+      }
+    }
+  }]);
+  return Namespace;
+}(_node["default"]);
+exports["default"] = Namespace;
+;
+module.exports = exports.default;
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/nesting.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/nesting.js
new file mode 100644
index 0000000000000000000000000000000000000000..218992875a60c9d30ee198e779c69f9f28a0f0c0
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/nesting.js
@@ -0,0 +1,22 @@
+"use strict";
+
+exports.__esModule = true;
+exports["default"] = void 0;
+var _node = _interopRequireDefault(require("./node"));
+var _types = require("./types");
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
+function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); }
+function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf ? Object.setPrototypeOf.bind() : function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
+var Nesting = /*#__PURE__*/function (_Node) {
+  _inheritsLoose(Nesting, _Node);
+  function Nesting(opts) {
+    var _this;
+    _this = _Node.call(this, opts) || this;
+    _this.type = _types.NESTING;
+    _this.value = '&';
+    return _this;
+  }
+  return Nesting;
+}(_node["default"]);
+exports["default"] = Nesting;
+module.exports = exports.default;
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/node.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/node.js
new file mode 100644
index 0000000000000000000000000000000000000000..9a8295101066ae32eef2f4a2eaf6a9b0ef11dc4a
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/node.js
@@ -0,0 +1,192 @@
+"use strict";
+
+exports.__esModule = true;
+exports["default"] = void 0;
+var _util = require("../util");
+function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); Object.defineProperty(Constructor, "prototype", { writable: false }); return Constructor; }
+var cloneNode = function cloneNode(obj, parent) {
+  if (typeof obj !== 'object' || obj === null) {
+    return obj;
+  }
+  var cloned = new obj.constructor();
+  for (var i in obj) {
+    if (!obj.hasOwnProperty(i)) {
+      continue;
+    }
+    var value = obj[i];
+    var type = typeof value;
+    if (i === 'parent' && type === 'object') {
+      if (parent) {
+        cloned[i] = parent;
+      }
+    } else if (value instanceof Array) {
+      cloned[i] = value.map(function (j) {
+        return cloneNode(j, cloned);
+      });
+    } else {
+      cloned[i] = cloneNode(value, cloned);
+    }
+  }
+  return cloned;
+};
+var Node = /*#__PURE__*/function () {
+  function Node(opts) {
+    if (opts === void 0) {
+      opts = {};
+    }
+    Object.assign(this, opts);
+    this.spaces = this.spaces || {};
+    this.spaces.before = this.spaces.before || '';
+    this.spaces.after = this.spaces.after || '';
+  }
+  var _proto = Node.prototype;
+  _proto.remove = function remove() {
+    if (this.parent) {
+      this.parent.removeChild(this);
+    }
+    this.parent = undefined;
+    return this;
+  };
+  _proto.replaceWith = function replaceWith() {
+    if (this.parent) {
+      for (var index in arguments) {
+        this.parent.insertBefore(this, arguments[index]);
+      }
+      this.remove();
+    }
+    return this;
+  };
+  _proto.next = function next() {
+    return this.parent.at(this.parent.index(this) + 1);
+  };
+  _proto.prev = function prev() {
+    return this.parent.at(this.parent.index(this) - 1);
+  };
+  _proto.clone = function clone(overrides) {
+    if (overrides === void 0) {
+      overrides = {};
+    }
+    var cloned = cloneNode(this);
+    for (var name in overrides) {
+      cloned[name] = overrides[name];
+    }
+    return cloned;
+  }
+
+  /**
+   * Some non-standard syntax doesn't follow normal escaping rules for css.
+   * This allows non standard syntax to be appended to an existing property
+   * by specifying the escaped value. By specifying the escaped value,
+   * illegal characters are allowed to be directly inserted into css output.
+   * @param {string} name the property to set
+   * @param {any} value the unescaped value of the property
+   * @param {string} valueEscaped optional. the escaped value of the property.
+   */;
+  _proto.appendToPropertyAndEscape = function appendToPropertyAndEscape(name, value, valueEscaped) {
+    if (!this.raws) {
+      this.raws = {};
+    }
+    var originalValue = this[name];
+    var originalEscaped = this.raws[name];
+    this[name] = originalValue + value; // this may trigger a setter that updates raws, so it has to be set first.
+    if (originalEscaped || valueEscaped !== value) {
+      this.raws[name] = (originalEscaped || originalValue) + valueEscaped;
+    } else {
+      delete this.raws[name]; // delete any escaped value that was created by the setter.
+    }
+  }
+
+  /**
+   * Some non-standard syntax doesn't follow normal escaping rules for css.
+   * This allows the escaped value to be specified directly, allowing illegal
+   * characters to be directly inserted into css output.
+   * @param {string} name the property to set
+   * @param {any} value the unescaped value of the property
+   * @param {string} valueEscaped the escaped value of the property.
+   */;
+  _proto.setPropertyAndEscape = function setPropertyAndEscape(name, value, valueEscaped) {
+    if (!this.raws) {
+      this.raws = {};
+    }
+    this[name] = value; // this may trigger a setter that updates raws, so it has to be set first.
+    this.raws[name] = valueEscaped;
+  }
+
+  /**
+   * When you want a value to passed through to CSS directly. This method
+   * deletes the corresponding raw value causing the stringifier to fallback
+   * to the unescaped value.
+   * @param {string} name the property to set.
+   * @param {any} value The value that is both escaped and unescaped.
+   */;
+  _proto.setPropertyWithoutEscape = function setPropertyWithoutEscape(name, value) {
+    this[name] = value; // this may trigger a setter that updates raws, so it has to be set first.
+    if (this.raws) {
+      delete this.raws[name];
+    }
+  }
+
+  /**
+   *
+   * @param {number} line The number (starting with 1)
+   * @param {number} column The column number (starting with 1)
+   */;
+  _proto.isAtPosition = function isAtPosition(line, column) {
+    if (this.source && this.source.start && this.source.end) {
+      if (this.source.start.line > line) {
+        return false;
+      }
+      if (this.source.end.line < line) {
+        return false;
+      }
+      if (this.source.start.line === line && this.source.start.column > column) {
+        return false;
+      }
+      if (this.source.end.line === line && this.source.end.column < column) {
+        return false;
+      }
+      return true;
+    }
+    return undefined;
+  };
+  _proto.stringifyProperty = function stringifyProperty(name) {
+    return this.raws && this.raws[name] || this[name];
+  };
+  _proto.valueToString = function valueToString() {
+    return String(this.stringifyProperty("value"));
+  };
+  _proto.toString = function toString() {
+    return [this.rawSpaceBefore, this.valueToString(), this.rawSpaceAfter].join('');
+  };
+  _createClass(Node, [{
+    key: "rawSpaceBefore",
+    get: function get() {
+      var rawSpace = this.raws && this.raws.spaces && this.raws.spaces.before;
+      if (rawSpace === undefined) {
+        rawSpace = this.spaces && this.spaces.before;
+      }
+      return rawSpace || "";
+    },
+    set: function set(raw) {
+      (0, _util.ensureObject)(this, "raws", "spaces");
+      this.raws.spaces.before = raw;
+    }
+  }, {
+    key: "rawSpaceAfter",
+    get: function get() {
+      var rawSpace = this.raws && this.raws.spaces && this.raws.spaces.after;
+      if (rawSpace === undefined) {
+        rawSpace = this.spaces.after;
+      }
+      return rawSpace || "";
+    },
+    set: function set(raw) {
+      (0, _util.ensureObject)(this, "raws", "spaces");
+      this.raws.spaces.after = raw;
+    }
+  }]);
+  return Node;
+}();
+exports["default"] = Node;
+module.exports = exports.default;
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/pseudo.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/pseudo.js
new file mode 100644
index 0000000000000000000000000000000000000000..4371e5900f5c7fd2d4f54049174937ff9fcec944
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/pseudo.js
@@ -0,0 +1,26 @@
+"use strict";
+
+exports.__esModule = true;
+exports["default"] = void 0;
+var _container = _interopRequireDefault(require("./container"));
+var _types = require("./types");
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
+function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); }
+function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf ? Object.setPrototypeOf.bind() : function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
+var Pseudo = /*#__PURE__*/function (_Container) {
+  _inheritsLoose(Pseudo, _Container);
+  function Pseudo(opts) {
+    var _this;
+    _this = _Container.call(this, opts) || this;
+    _this.type = _types.PSEUDO;
+    return _this;
+  }
+  var _proto = Pseudo.prototype;
+  _proto.toString = function toString() {
+    var params = this.length ? '(' + this.map(String).join(',') + ')' : '';
+    return [this.rawSpaceBefore, this.stringifyProperty("value"), params, this.rawSpaceAfter].join('');
+  };
+  return Pseudo;
+}(_container["default"]);
+exports["default"] = Pseudo;
+module.exports = exports.default;
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/root.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/root.js
new file mode 100644
index 0000000000000000000000000000000000000000..8c599d15809f555bdcadcc25758e74b35ccf9387
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/root.js
@@ -0,0 +1,44 @@
+"use strict";
+
+exports.__esModule = true;
+exports["default"] = void 0;
+var _container = _interopRequireDefault(require("./container"));
+var _types = require("./types");
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
+function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } }
+function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); Object.defineProperty(Constructor, "prototype", { writable: false }); return Constructor; }
+function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); }
+function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf ? Object.setPrototypeOf.bind() : function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
+var Root = /*#__PURE__*/function (_Container) {
+  _inheritsLoose(Root, _Container);
+  function Root(opts) {
+    var _this;
+    _this = _Container.call(this, opts) || this;
+    _this.type = _types.ROOT;
+    return _this;
+  }
+  var _proto = Root.prototype;
+  _proto.toString = function toString() {
+    var str = this.reduce(function (memo, selector) {
+      memo.push(String(selector));
+      return memo;
+    }, []).join(',');
+    return this.trailingComma ? str + ',' : str;
+  };
+  _proto.error = function error(message, options) {
+    if (this._error) {
+      return this._error(message, options);
+    } else {
+      return new Error(message);
+    }
+  };
+  _createClass(Root, [{
+    key: "errorGenerator",
+    set: function set(handler) {
+      this._error = handler;
+    }
+  }]);
+  return Root;
+}(_container["default"]);
+exports["default"] = Root;
+module.exports = exports.default;
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/selector.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/selector.js
new file mode 100644
index 0000000000000000000000000000000000000000..8cc4bc1cddd75c9e144aebf8996e027aa1fc24b3
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/selector.js
@@ -0,0 +1,21 @@
+"use strict";
+
+exports.__esModule = true;
+exports["default"] = void 0;
+var _container = _interopRequireDefault(require("./container"));
+var _types = require("./types");
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
+function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); }
+function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf ? Object.setPrototypeOf.bind() : function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
+var Selector = /*#__PURE__*/function (_Container) {
+  _inheritsLoose(Selector, _Container);
+  function Selector(opts) {
+    var _this;
+    _this = _Container.call(this, opts) || this;
+    _this.type = _types.SELECTOR;
+    return _this;
+  }
+  return Selector;
+}(_container["default"]);
+exports["default"] = Selector;
+module.exports = exports.default;
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/string.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/string.js
new file mode 100644
index 0000000000000000000000000000000000000000..4749791416b579a3b38308cdcc1f4cde7e61ea41
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/string.js
@@ -0,0 +1,21 @@
+"use strict";
+
+exports.__esModule = true;
+exports["default"] = void 0;
+var _node = _interopRequireDefault(require("./node"));
+var _types = require("./types");
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
+function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); }
+function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf ? Object.setPrototypeOf.bind() : function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
+var String = /*#__PURE__*/function (_Node) {
+  _inheritsLoose(String, _Node);
+  function String(opts) {
+    var _this;
+    _this = _Node.call(this, opts) || this;
+    _this.type = _types.STRING;
+    return _this;
+  }
+  return String;
+}(_node["default"]);
+exports["default"] = String;
+module.exports = exports.default;
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/tag.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/tag.js
new file mode 100644
index 0000000000000000000000000000000000000000..224e74de4a2f77002614888fe4b4b00e7062b2fc
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/tag.js
@@ -0,0 +1,21 @@
+"use strict";
+
+exports.__esModule = true;
+exports["default"] = void 0;
+var _namespace = _interopRequireDefault(require("./namespace"));
+var _types = require("./types");
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
+function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); }
+function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf ? Object.setPrototypeOf.bind() : function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
+var Tag = /*#__PURE__*/function (_Namespace) {
+  _inheritsLoose(Tag, _Namespace);
+  function Tag(opts) {
+    var _this;
+    _this = _Namespace.call(this, opts) || this;
+    _this.type = _types.TAG;
+    return _this;
+  }
+  return Tag;
+}(_namespace["default"]);
+exports["default"] = Tag;
+module.exports = exports.default;
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/types.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/types.js
new file mode 100644
index 0000000000000000000000000000000000000000..824cc0c73894505a75d5dc8c71fad92b24c1fce7
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/types.js
@@ -0,0 +1,28 @@
+"use strict";
+
+exports.__esModule = true;
+exports.UNIVERSAL = exports.TAG = exports.STRING = exports.SELECTOR = exports.ROOT = exports.PSEUDO = exports.NESTING = exports.ID = exports.COMMENT = exports.COMBINATOR = exports.CLASS = exports.ATTRIBUTE = void 0;
+var TAG = 'tag';
+exports.TAG = TAG;
+var STRING = 'string';
+exports.STRING = STRING;
+var SELECTOR = 'selector';
+exports.SELECTOR = SELECTOR;
+var ROOT = 'root';
+exports.ROOT = ROOT;
+var PSEUDO = 'pseudo';
+exports.PSEUDO = PSEUDO;
+var NESTING = 'nesting';
+exports.NESTING = NESTING;
+var ID = 'id';
+exports.ID = ID;
+var COMMENT = 'comment';
+exports.COMMENT = COMMENT;
+var COMBINATOR = 'combinator';
+exports.COMBINATOR = COMBINATOR;
+var CLASS = 'class';
+exports.CLASS = CLASS;
+var ATTRIBUTE = 'attribute';
+exports.ATTRIBUTE = ATTRIBUTE;
+var UNIVERSAL = 'universal';
+exports.UNIVERSAL = UNIVERSAL;
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/universal.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/universal.js
new file mode 100644
index 0000000000000000000000000000000000000000..5b5874380b8e9422c0acc283e974fd0b3d8e78f0
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/selectors/universal.js
@@ -0,0 +1,22 @@
+"use strict";
+
+exports.__esModule = true;
+exports["default"] = void 0;
+var _namespace = _interopRequireDefault(require("./namespace"));
+var _types = require("./types");
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
+function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; _setPrototypeOf(subClass, superClass); }
+function _setPrototypeOf(o, p) { _setPrototypeOf = Object.setPrototypeOf ? Object.setPrototypeOf.bind() : function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); }
+var Universal = /*#__PURE__*/function (_Namespace) {
+  _inheritsLoose(Universal, _Namespace);
+  function Universal(opts) {
+    var _this;
+    _this = _Namespace.call(this, opts) || this;
+    _this.type = _types.UNIVERSAL;
+    _this.value = '*';
+    return _this;
+  }
+  return Universal;
+}(_namespace["default"]);
+exports["default"] = Universal;
+module.exports = exports.default;
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/sortAscending.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/sortAscending.js
new file mode 100644
index 0000000000000000000000000000000000000000..5666d5dc99f89c1f92f1b57762ff575a08a53920
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/sortAscending.js
@@ -0,0 +1,11 @@
+"use strict";
+
+exports.__esModule = true;
+exports["default"] = sortAscending;
+function sortAscending(list) {
+  return list.sort(function (a, b) {
+    return a - b;
+  });
+}
+;
+module.exports = exports.default;
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/tokenTypes.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/tokenTypes.js
new file mode 100644
index 0000000000000000000000000000000000000000..59d8e6c6bf4cf7e822a0edc32dbb31443a5e0fe0
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/tokenTypes.js
@@ -0,0 +1,70 @@
+"use strict";
+
+exports.__esModule = true;
+exports.word = exports.tilde = exports.tab = exports.str = exports.space = exports.slash = exports.singleQuote = exports.semicolon = exports.plus = exports.pipe = exports.openSquare = exports.openParenthesis = exports.newline = exports.greaterThan = exports.feed = exports.equals = exports.doubleQuote = exports.dollar = exports.cr = exports.comment = exports.comma = exports.combinator = exports.colon = exports.closeSquare = exports.closeParenthesis = exports.caret = exports.bang = exports.backslash = exports.at = exports.asterisk = exports.ampersand = void 0;
+var ampersand = 38; // `&`.charCodeAt(0);
+exports.ampersand = ampersand;
+var asterisk = 42; // `*`.charCodeAt(0);
+exports.asterisk = asterisk;
+var at = 64; // `@`.charCodeAt(0);
+exports.at = at;
+var comma = 44; // `,`.charCodeAt(0);
+exports.comma = comma;
+var colon = 58; // `:`.charCodeAt(0);
+exports.colon = colon;
+var semicolon = 59; // `;`.charCodeAt(0);
+exports.semicolon = semicolon;
+var openParenthesis = 40; // `(`.charCodeAt(0);
+exports.openParenthesis = openParenthesis;
+var closeParenthesis = 41; // `)`.charCodeAt(0);
+exports.closeParenthesis = closeParenthesis;
+var openSquare = 91; // `[`.charCodeAt(0);
+exports.openSquare = openSquare;
+var closeSquare = 93; // `]`.charCodeAt(0);
+exports.closeSquare = closeSquare;
+var dollar = 36; // `$`.charCodeAt(0);
+exports.dollar = dollar;
+var tilde = 126; // `~`.charCodeAt(0);
+exports.tilde = tilde;
+var caret = 94; // `^`.charCodeAt(0);
+exports.caret = caret;
+var plus = 43; // `+`.charCodeAt(0);
+exports.plus = plus;
+var equals = 61; // `=`.charCodeAt(0);
+exports.equals = equals;
+var pipe = 124; // `|`.charCodeAt(0);
+exports.pipe = pipe;
+var greaterThan = 62; // `>`.charCodeAt(0);
+exports.greaterThan = greaterThan;
+var space = 32; // ` `.charCodeAt(0);
+exports.space = space;
+var singleQuote = 39; // `'`.charCodeAt(0);
+exports.singleQuote = singleQuote;
+var doubleQuote = 34; // `"`.charCodeAt(0);
+exports.doubleQuote = doubleQuote;
+var slash = 47; // `/`.charCodeAt(0);
+exports.slash = slash;
+var bang = 33; // `!`.charCodeAt(0);
+exports.bang = bang;
+var backslash = 92; // '\\'.charCodeAt(0);
+exports.backslash = backslash;
+var cr = 13; // '\r'.charCodeAt(0);
+exports.cr = cr;
+var feed = 12; // '\f'.charCodeAt(0);
+exports.feed = feed;
+var newline = 10; // '\n'.charCodeAt(0);
+exports.newline = newline;
+var tab = 9; // '\t'.charCodeAt(0);
+
+// Expose aliases primarily for readability.
+exports.tab = tab;
+var str = singleQuote;
+
+// No good single character representation!
+exports.str = str;
+var comment = -1;
+exports.comment = comment;
+var word = -2;
+exports.word = word;
+var combinator = -3;
+exports.combinator = combinator;
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/tokenize.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/tokenize.js
new file mode 100644
index 0000000000000000000000000000000000000000..bf61d261b5cd734772be1098daac182d12dc7f44
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/tokenize.js
@@ -0,0 +1,239 @@
+"use strict";
+
+exports.__esModule = true;
+exports.FIELDS = void 0;
+exports["default"] = tokenize;
+var t = _interopRequireWildcard(require("./tokenTypes"));
+var _unescapable, _wordDelimiters;
+function _getRequireWildcardCache(nodeInterop) { if (typeof WeakMap !== "function") return null; var cacheBabelInterop = new WeakMap(); var cacheNodeInterop = new WeakMap(); return (_getRequireWildcardCache = function _getRequireWildcardCache(nodeInterop) { return nodeInterop ? cacheNodeInterop : cacheBabelInterop; })(nodeInterop); }
+function _interopRequireWildcard(obj, nodeInterop) { if (!nodeInterop && obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { "default": obj }; } var cache = _getRequireWildcardCache(nodeInterop); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj["default"] = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
+var unescapable = (_unescapable = {}, _unescapable[t.tab] = true, _unescapable[t.newline] = true, _unescapable[t.cr] = true, _unescapable[t.feed] = true, _unescapable);
+var wordDelimiters = (_wordDelimiters = {}, _wordDelimiters[t.space] = true, _wordDelimiters[t.tab] = true, _wordDelimiters[t.newline] = true, _wordDelimiters[t.cr] = true, _wordDelimiters[t.feed] = true, _wordDelimiters[t.ampersand] = true, _wordDelimiters[t.asterisk] = true, _wordDelimiters[t.bang] = true, _wordDelimiters[t.comma] = true, _wordDelimiters[t.colon] = true, _wordDelimiters[t.semicolon] = true, _wordDelimiters[t.openParenthesis] = true, _wordDelimiters[t.closeParenthesis] = true, _wordDelimiters[t.openSquare] = true, _wordDelimiters[t.closeSquare] = true, _wordDelimiters[t.singleQuote] = true, _wordDelimiters[t.doubleQuote] = true, _wordDelimiters[t.plus] = true, _wordDelimiters[t.pipe] = true, _wordDelimiters[t.tilde] = true, _wordDelimiters[t.greaterThan] = true, _wordDelimiters[t.equals] = true, _wordDelimiters[t.dollar] = true, _wordDelimiters[t.caret] = true, _wordDelimiters[t.slash] = true, _wordDelimiters);
+var hex = {};
+var hexChars = "0123456789abcdefABCDEF";
+for (var i = 0; i < hexChars.length; i++) {
+  hex[hexChars.charCodeAt(i)] = true;
+}
+
+/**
+ *  Returns the last index of the bar css word
+ * @param {string} css The string in which the word begins
+ * @param {number} start The index into the string where word's first letter occurs
+ */
+function consumeWord(css, start) {
+  var next = start;
+  var code;
+  do {
+    code = css.charCodeAt(next);
+    if (wordDelimiters[code]) {
+      return next - 1;
+    } else if (code === t.backslash) {
+      next = consumeEscape(css, next) + 1;
+    } else {
+      // All other characters are part of the word
+      next++;
+    }
+  } while (next < css.length);
+  return next - 1;
+}
+
+/**
+ *  Returns the last index of the escape sequence
+ * @param {string} css The string in which the sequence begins
+ * @param {number} start The index into the string where escape character (`\`) occurs.
+ */
+function consumeEscape(css, start) {
+  var next = start;
+  var code = css.charCodeAt(next + 1);
+  if (unescapable[code]) {
+    // just consume the escape char
+  } else if (hex[code]) {
+    var hexDigits = 0;
+    // consume up to 6 hex chars
+    do {
+      next++;
+      hexDigits++;
+      code = css.charCodeAt(next + 1);
+    } while (hex[code] && hexDigits < 6);
+    // if fewer than 6 hex chars, a trailing space ends the escape
+    if (hexDigits < 6 && code === t.space) {
+      next++;
+    }
+  } else {
+    // the next char is part of the current word
+    next++;
+  }
+  return next;
+}
+var FIELDS = {
+  TYPE: 0,
+  START_LINE: 1,
+  START_COL: 2,
+  END_LINE: 3,
+  END_COL: 4,
+  START_POS: 5,
+  END_POS: 6
+};
+exports.FIELDS = FIELDS;
+function tokenize(input) {
+  var tokens = [];
+  var css = input.css.valueOf();
+  var _css = css,
+    length = _css.length;
+  var offset = -1;
+  var line = 1;
+  var start = 0;
+  var end = 0;
+  var code, content, endColumn, endLine, escaped, escapePos, last, lines, next, nextLine, nextOffset, quote, tokenType;
+  function unclosed(what, fix) {
+    if (input.safe) {
+      // fyi: this is never set to true.
+      css += fix;
+      next = css.length - 1;
+    } else {
+      throw input.error('Unclosed ' + what, line, start - offset, start);
+    }
+  }
+  while (start < length) {
+    code = css.charCodeAt(start);
+    if (code === t.newline) {
+      offset = start;
+      line += 1;
+    }
+    switch (code) {
+      case t.space:
+      case t.tab:
+      case t.newline:
+      case t.cr:
+      case t.feed:
+        next = start;
+        do {
+          next += 1;
+          code = css.charCodeAt(next);
+          if (code === t.newline) {
+            offset = next;
+            line += 1;
+          }
+        } while (code === t.space || code === t.newline || code === t.tab || code === t.cr || code === t.feed);
+        tokenType = t.space;
+        endLine = line;
+        endColumn = next - offset - 1;
+        end = next;
+        break;
+      case t.plus:
+      case t.greaterThan:
+      case t.tilde:
+      case t.pipe:
+        next = start;
+        do {
+          next += 1;
+          code = css.charCodeAt(next);
+        } while (code === t.plus || code === t.greaterThan || code === t.tilde || code === t.pipe);
+        tokenType = t.combinator;
+        endLine = line;
+        endColumn = start - offset;
+        end = next;
+        break;
+
+      // Consume these characters as single tokens.
+      case t.asterisk:
+      case t.ampersand:
+      case t.bang:
+      case t.comma:
+      case t.equals:
+      case t.dollar:
+      case t.caret:
+      case t.openSquare:
+      case t.closeSquare:
+      case t.colon:
+      case t.semicolon:
+      case t.openParenthesis:
+      case t.closeParenthesis:
+        next = start;
+        tokenType = code;
+        endLine = line;
+        endColumn = start - offset;
+        end = next + 1;
+        break;
+      case t.singleQuote:
+      case t.doubleQuote:
+        quote = code === t.singleQuote ? "'" : '"';
+        next = start;
+        do {
+          escaped = false;
+          next = css.indexOf(quote, next + 1);
+          if (next === -1) {
+            unclosed('quote', quote);
+          }
+          escapePos = next;
+          while (css.charCodeAt(escapePos - 1) === t.backslash) {
+            escapePos -= 1;
+            escaped = !escaped;
+          }
+        } while (escaped);
+        tokenType = t.str;
+        endLine = line;
+        endColumn = start - offset;
+        end = next + 1;
+        break;
+      default:
+        if (code === t.slash && css.charCodeAt(start + 1) === t.asterisk) {
+          next = css.indexOf('*/', start + 2) + 1;
+          if (next === 0) {
+            unclosed('comment', '*/');
+          }
+          content = css.slice(start, next + 1);
+          lines = content.split('\n');
+          last = lines.length - 1;
+          if (last > 0) {
+            nextLine = line + last;
+            nextOffset = next - lines[last].length;
+          } else {
+            nextLine = line;
+            nextOffset = offset;
+          }
+          tokenType = t.comment;
+          line = nextLine;
+          endLine = nextLine;
+          endColumn = next - nextOffset;
+        } else if (code === t.slash) {
+          next = start;
+          tokenType = code;
+          endLine = line;
+          endColumn = start - offset;
+          end = next + 1;
+        } else {
+          next = consumeWord(css, start);
+          tokenType = t.word;
+          endLine = line;
+          endColumn = next - offset;
+        }
+        end = next + 1;
+        break;
+    }
+
+    // Ensure that the token structure remains consistent
+    tokens.push([tokenType,
+    // [0] Token type
+    line,
+    // [1] Starting line
+    start - offset,
+    // [2] Starting column
+    endLine,
+    // [3] Ending line
+    endColumn,
+    // [4] Ending column
+    start,
+    // [5] Start position / Source index
+    end // [6] End position
+    ]);
+
+    // Reset offset for the next token
+    if (nextOffset) {
+      offset = nextOffset;
+      nextOffset = null;
+    }
+    start = end;
+  }
+  return tokens;
+}
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/util/ensureObject.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/util/ensureObject.js
new file mode 100644
index 0000000000000000000000000000000000000000..494941adaf212a08887a1bba245e5213c3df5e7f
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/util/ensureObject.js
@@ -0,0 +1,17 @@
+"use strict";
+
+exports.__esModule = true;
+exports["default"] = ensureObject;
+function ensureObject(obj) {
+  for (var _len = arguments.length, props = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) {
+    props[_key - 1] = arguments[_key];
+  }
+  while (props.length > 0) {
+    var prop = props.shift();
+    if (!obj[prop]) {
+      obj[prop] = {};
+    }
+    obj = obj[prop];
+  }
+}
+module.exports = exports.default;
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/util/getProp.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/util/getProp.js
new file mode 100644
index 0000000000000000000000000000000000000000..a2b7a07307b0eca44a5e2f2c8d83ffe8b5d9b8b2
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/util/getProp.js
@@ -0,0 +1,18 @@
+"use strict";
+
+exports.__esModule = true;
+exports["default"] = getProp;
+function getProp(obj) {
+  for (var _len = arguments.length, props = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) {
+    props[_key - 1] = arguments[_key];
+  }
+  while (props.length > 0) {
+    var prop = props.shift();
+    if (!obj[prop]) {
+      return undefined;
+    }
+    obj = obj[prop];
+  }
+  return obj;
+}
+module.exports = exports.default;
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/util/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/util/index.js
new file mode 100644
index 0000000000000000000000000000000000000000..f96ec11b6d47636ecc470506f954c164ebbe63b5
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/util/index.js
@@ -0,0 +1,13 @@
+"use strict";
+
+exports.__esModule = true;
+exports.unesc = exports.stripComments = exports.getProp = exports.ensureObject = void 0;
+var _unesc = _interopRequireDefault(require("./unesc"));
+exports.unesc = _unesc["default"];
+var _getProp = _interopRequireDefault(require("./getProp"));
+exports.getProp = _getProp["default"];
+var _ensureObject = _interopRequireDefault(require("./ensureObject"));
+exports.ensureObject = _ensureObject["default"];
+var _stripComments = _interopRequireDefault(require("./stripComments"));
+exports.stripComments = _stripComments["default"];
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/util/stripComments.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/util/stripComments.js
new file mode 100644
index 0000000000000000000000000000000000000000..0baa0e07eb0b940655f1ff98b2c975b3e6674799
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/util/stripComments.js
@@ -0,0 +1,21 @@
+"use strict";
+
+exports.__esModule = true;
+exports["default"] = stripComments;
+function stripComments(str) {
+  var s = "";
+  var commentStart = str.indexOf("/*");
+  var lastEnd = 0;
+  while (commentStart >= 0) {
+    s = s + str.slice(lastEnd, commentStart);
+    var commentEnd = str.indexOf("*/", commentStart + 2);
+    if (commentEnd < 0) {
+      return s;
+    }
+    lastEnd = commentEnd + 2;
+    commentStart = str.indexOf("/*", lastEnd);
+  }
+  s = s + str.slice(lastEnd);
+  return s;
+}
+module.exports = exports.default;
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/util/unesc.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/util/unesc.js
new file mode 100644
index 0000000000000000000000000000000000000000..d5d960448b155ba2cec5853bcf77e26b8a6ed2d5
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/postcss-selector-parser/dist/util/unesc.js
@@ -0,0 +1,76 @@
+"use strict";
+
+exports.__esModule = true;
+exports["default"] = unesc;
+// Many thanks for this post which made this migration much easier.
+// https://mathiasbynens.be/notes/css-escapes
+
+/**
+ *
+ * @param {string} str
+ * @returns {[string, number]|undefined}
+ */
+function gobbleHex(str) {
+  var lower = str.toLowerCase();
+  var hex = '';
+  var spaceTerminated = false;
+  for (var i = 0; i < 6 && lower[i] !== undefined; i++) {
+    var code = lower.charCodeAt(i);
+    // check to see if we are dealing with a valid hex char [a-f|0-9]
+    var valid = code >= 97 && code <= 102 || code >= 48 && code <= 57;
+    // https://drafts.csswg.org/css-syntax/#consume-escaped-code-point
+    spaceTerminated = code === 32;
+    if (!valid) {
+      break;
+    }
+    hex += lower[i];
+  }
+  if (hex.length === 0) {
+    return undefined;
+  }
+  var codePoint = parseInt(hex, 16);
+  var isSurrogate = codePoint >= 0xD800 && codePoint <= 0xDFFF;
+  // Add special case for
+  // "If this number is zero, or is for a surrogate, or is greater than the maximum allowed code point"
+  // https://drafts.csswg.org/css-syntax/#maximum-allowed-code-point
+  if (isSurrogate || codePoint === 0x0000 || codePoint > 0x10FFFF) {
+    return ["\uFFFD", hex.length + (spaceTerminated ? 1 : 0)];
+  }
+  return [String.fromCodePoint(codePoint), hex.length + (spaceTerminated ? 1 : 0)];
+}
+var CONTAINS_ESCAPE = /\\/;
+function unesc(str) {
+  var needToProcess = CONTAINS_ESCAPE.test(str);
+  if (!needToProcess) {
+    return str;
+  }
+  var ret = "";
+  for (var i = 0; i < str.length; i++) {
+    if (str[i] === "\\") {
+      var gobbled = gobbleHex(str.slice(i + 1, i + 7));
+      if (gobbled !== undefined) {
+        ret += gobbled[0];
+        i += gobbled[1];
+        continue;
+      }
+
+      // Retain a pair of \\ if double escaped `\\\\`
+      // https://github.com/postcss/postcss-selector-parser/commit/268c9a7656fb53f543dc620aa5b73a30ec3ff20e
+      if (str[i + 1] === "\\") {
+        ret += "\\";
+        i++;
+        continue;
+      }
+
+      // if \\ is at the end of the string retain it
+      // https://github.com/postcss/postcss-selector-parser/commit/01a6b346e3612ce1ab20219acc26abdc259ccefb
+      if (str.length === i + 1) {
+        ret += str[i];
+      }
+      continue;
+    }
+    ret += str[i];
+  }
+  return ret;
+}
+module.exports = exports.default;
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/proc-log/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/proc-log/lib/index.js
new file mode 100644
index 0000000000000000000000000000000000000000..86d90861078dab660bb1a9a4d5986753805fd740
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/proc-log/lib/index.js
@@ -0,0 +1,153 @@
+const META = Symbol('proc-log.meta')
+module.exports = {
+  META: META,
+  output: {
+    LEVELS: [
+      'standard',
+      'error',
+      'buffer',
+      'flush',
+    ],
+    KEYS: {
+      standard: 'standard',
+      error: 'error',
+      buffer: 'buffer',
+      flush: 'flush',
+    },
+    standard: function (...args) {
+      return process.emit('output', 'standard', ...args)
+    },
+    error: function (...args) {
+      return process.emit('output', 'error', ...args)
+    },
+    buffer: function (...args) {
+      return process.emit('output', 'buffer', ...args)
+    },
+    flush: function (...args) {
+      return process.emit('output', 'flush', ...args)
+    },
+  },
+  log: {
+    LEVELS: [
+      'notice',
+      'error',
+      'warn',
+      'info',
+      'verbose',
+      'http',
+      'silly',
+      'timing',
+      'pause',
+      'resume',
+    ],
+    KEYS: {
+      notice: 'notice',
+      error: 'error',
+      warn: 'warn',
+      info: 'info',
+      verbose: 'verbose',
+      http: 'http',
+      silly: 'silly',
+      timing: 'timing',
+      pause: 'pause',
+      resume: 'resume',
+    },
+    error: function (...args) {
+      return process.emit('log', 'error', ...args)
+    },
+    notice: function (...args) {
+      return process.emit('log', 'notice', ...args)
+    },
+    warn: function (...args) {
+      return process.emit('log', 'warn', ...args)
+    },
+    info: function (...args) {
+      return process.emit('log', 'info', ...args)
+    },
+    verbose: function (...args) {
+      return process.emit('log', 'verbose', ...args)
+    },
+    http: function (...args) {
+      return process.emit('log', 'http', ...args)
+    },
+    silly: function (...args) {
+      return process.emit('log', 'silly', ...args)
+    },
+    timing: function (...args) {
+      return process.emit('log', 'timing', ...args)
+    },
+    pause: function () {
+      return process.emit('log', 'pause')
+    },
+    resume: function () {
+      return process.emit('log', 'resume')
+    },
+  },
+  time: {
+    LEVELS: [
+      'start',
+      'end',
+    ],
+    KEYS: {
+      start: 'start',
+      end: 'end',
+    },
+    start: function (name, fn) {
+      process.emit('time', 'start', name)
+      function end () {
+        return process.emit('time', 'end', name)
+      }
+      if (typeof fn === 'function') {
+        const res = fn()
+        if (res && res.finally) {
+          return res.finally(end)
+        }
+        end()
+        return res
+      }
+      return end
+    },
+    end: function (name) {
+      return process.emit('time', 'end', name)
+    },
+  },
+  input: {
+    LEVELS: [
+      'start',
+      'end',
+      'read',
+    ],
+    KEYS: {
+      start: 'start',
+      end: 'end',
+      read: 'read',
+    },
+    start: function (fn) {
+      process.emit('input', 'start')
+      function end () {
+        return process.emit('input', 'end')
+      }
+      if (typeof fn === 'function') {
+        const res = fn()
+        if (res && res.finally) {
+          return res.finally(end)
+        }
+        end()
+        return res
+      }
+      return end
+    },
+    end: function () {
+      return process.emit('input', 'end')
+    },
+    read: function (...args) {
+      let resolve, reject
+      const promise = new Promise((_resolve, _reject) => {
+        resolve = _resolve
+        reject = _reject
+      })
+      process.emit('input', 'read', resolve, reject, ...args)
+      return promise
+    },
+  },
+}
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/proggy/lib/client.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/proggy/lib/client.js
new file mode 100644
index 0000000000000000000000000000000000000000..2eafb9c75addc231e82cf8efec88d691f30753a8
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/proggy/lib/client.js
@@ -0,0 +1,114 @@
+const EE = require('events')
+const onProgress = Symbol('onProgress')
+const bars = Symbol('bars')
+const listener = Symbol('listener')
+const normData = Symbol('normData')
+class Client extends EE {
+  constructor ({ normalize = false, stopOnDone = false } = {}) {
+    super()
+    this.normalize = !!normalize
+    this.stopOnDone = !!stopOnDone
+    this[bars] = new Map()
+    this[listener] = null
+  }
+
+  get size () {
+    return this[bars].size
+  }
+
+  get listening () {
+    return !!this[listener]
+  }
+
+  addListener (...args) {
+    return this.on(...args)
+  }
+
+  on (ev, ...args) {
+    if (ev === 'progress' && !this[listener]) {
+      this.start()
+    }
+    return super.on(ev, ...args)
+  }
+
+  off (ev, ...args) {
+    return this.removeListener(ev, ...args)
+  }
+
+  removeListener (ev, ...args) {
+    const ret = super.removeListener(ev, ...args)
+    if (ev === 'progress' && this.listeners(ev).length === 0) {
+      this.stop()
+    }
+    return ret
+  }
+
+  stop () {
+    if (this[listener]) {
+      process.removeListener('progress', this[listener])
+      this[listener] = null
+    }
+  }
+
+  start () {
+    if (!this[listener]) {
+      this[listener] = (...args) => this[onProgress](...args)
+      process.on('progress', this[listener])
+    }
+  }
+
+  [onProgress] (key, data) {
+    data = this[normData](key, data)
+    if (!this[bars].has(key)) {
+      this.emit('bar', key, data)
+    }
+    this[bars].set(key, data)
+    this.emit('progress', key, data)
+    if (data.done) {
+      this[bars].delete(key)
+      this.emit('barDone', key, data)
+      if (this.size === 0) {
+        if (this.stopOnDone) {
+          this.stop()
+        }
+        this.emit('done')
+      }
+    }
+  }
+
+  [normData] (key, data) {
+    const actualValue = data.value
+    const actualTotal = data.total
+    let value = actualValue
+    let total = actualTotal
+    const done = data.done || value >= total
+    if (this.normalize) {
+      const bar = this[bars].get(key)
+      total = 100
+      if (done) {
+        value = 100
+      } else {
+        // show value as a portion of 100
+        const pct = 100 * actualValue / actualTotal
+        if (bar) {
+          // don't ever go backwards, and don't stand still
+          // move at least 1% of the remaining value if it wouldn't move.
+          value = (pct > bar.value) ? pct
+            : (100 - bar.value) / 100 + bar.value
+        }
+      }
+    }
+    // include the key
+    return {
+      ...data,
+      key,
+      name: data.name || key,
+      value,
+      total,
+      actualValue,
+      actualTotal,
+      done,
+    }
+  }
+}
+module.exports = Client
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/proggy/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/proggy/lib/index.js
new file mode 100644
index 0000000000000000000000000000000000000000..834948b4ff8603091f2ff6020d31388d64af1d10
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/proggy/lib/index.js
@@ -0,0 +1,15 @@
+exports.Client = require('./client.js')
+exports.Tracker = require('./tracker.js')
+
+const trackers = new Map()
+exports.createTracker = (name, key, total) => {
+  const tracker = new exports.Tracker(name, key, total)
+  if (trackers.has(tracker.key)) {
+    const msg = `proggy: duplicate progress id ${JSON.stringify(tracker.key)}`
+    throw new Error(msg)
+  }
+  trackers.set(tracker.key, tracker)
+  tracker.on('done', () => trackers.delete(tracker.key))
+  return tracker
+}
+exports.createClient = (options = {}) => new exports.Client(options)
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/proggy/lib/tracker.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/proggy/lib/tracker.js
new file mode 100644
index 0000000000000000000000000000000000000000..56c78d9434dc7cfc946259280be36b31674a9c2c
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/proggy/lib/tracker.js
@@ -0,0 +1,68 @@
+// The tracker class is intentionally as naive as possible.  it is just
+// an ergonomic wrapper around process.emit('progress', ...)
+const EE = require('events')
+class Tracker extends EE {
+  constructor (name, key, total) {
+    super()
+    if (!name) {
+      throw new Error('proggy: Tracker needs a name')
+    }
+
+    if (typeof key === 'number' && !total) {
+      total = key
+      key = null
+    }
+
+    if (!total) {
+      total = 100
+    }
+
+    if (!key) {
+      key = name
+    }
+
+    this.done = false
+    this.name = name
+    this.key = key
+    this.value = 0
+    this.total = total
+  }
+
+  finish (metadata = {}) {
+    this.update(this.total, this.total, metadata)
+  }
+
+  update (value, total, metadata) {
+    if (!metadata) {
+      if (total && typeof total === 'object') {
+        metadata = total
+      } else {
+        metadata = {}
+      }
+    }
+    if (typeof total !== 'number') {
+      total = this.total
+    }
+
+    if (this.done) {
+      const msg = `proggy: updating completed tracker: ${JSON.stringify(this.key)}`
+      throw new Error(msg)
+    }
+    this.value = value
+    this.total = total
+    const done = this.value >= this.total
+    process.emit('progress', this.key, {
+      ...metadata,
+      name: this.name,
+      key: this.key,
+      value,
+      total,
+      done,
+    })
+    if (done) {
+      this.done = true
+      this.emit('done')
+    }
+  }
+}
+module.exports = Tracker
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/promise-call-limit/dist/commonjs/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/promise-call-limit/dist/commonjs/index.js
new file mode 100644
index 0000000000000000000000000000000000000000..b32a85bb11aa397ab5ecc810a5b90b87e21db8fa
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/promise-call-limit/dist/commonjs/index.js
@@ -0,0 +1,87 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.callLimit = void 0;
+const os = __importStar(require("node:os"));
+// availableParallelism available only since node v19, for older versions use
+// cpus() cpus() can return an empty list if /proc is not mounted, use 1 in
+// this case
+/* c8 ignore start */
+const defLimit = 'availableParallelism' in os ?
+    Math.max(1, os.availableParallelism() - 1)
+    : Math.max(1, os.cpus().length - 1);
+const callLimit = (queue, { limit = defLimit, rejectLate } = {}) => new Promise((res, rej) => {
+    let active = 0;
+    let current = 0;
+    const results = [];
+    // Whether or not we rejected, distinct from the rejection just in case the rejection itself is falsey
+    let rejected = false;
+    let rejection;
+    const reject = (er) => {
+        if (rejected)
+            return;
+        rejected = true;
+        rejection ??= er;
+        if (!rejectLate)
+            rej(rejection);
+    };
+    let resolved = false;
+    const resolve = () => {
+        if (resolved || active > 0)
+            return;
+        resolved = true;
+        res(results);
+    };
+    const run = () => {
+        const c = current++;
+        if (c >= queue.length)
+            return rejected ? reject() : resolve();
+        active++;
+        const step = queue[c];
+        /* c8 ignore start */
+        if (!step)
+            throw new Error('walked off queue');
+        /* c8 ignore stop */
+        results[c] = step()
+            .then(result => {
+            active--;
+            results[c] = result;
+            return result;
+        }, er => {
+            active--;
+            reject(er);
+        })
+            .then(result => {
+            if (rejected && active === 0)
+                return rej(rejection);
+            run();
+            return result;
+        });
+    };
+    for (let i = 0; i < limit; i++)
+        run();
+});
+exports.callLimit = callLimit;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/promise-call-limit/dist/commonjs/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/promise-call-limit/dist/commonjs/package.json
new file mode 100644
index 0000000000000000000000000000000000000000..5bbefffbabee392d1855491b84dc0a716b6a3bf2
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/promise-call-limit/dist/commonjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/promise-call-limit/dist/esm/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/promise-call-limit/dist/esm/index.js
new file mode 100644
index 0000000000000000000000000000000000000000..fe709db7fc04cc4c24b7cf6e2b98c3f8bd08d076
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/promise-call-limit/dist/esm/index.js
@@ -0,0 +1,60 @@
+import * as os from 'node:os';
+// availableParallelism available only since node v19, for older versions use
+// cpus() cpus() can return an empty list if /proc is not mounted, use 1 in
+// this case
+/* c8 ignore start */
+const defLimit = 'availableParallelism' in os ?
+    Math.max(1, os.availableParallelism() - 1)
+    : Math.max(1, os.cpus().length - 1);
+export const callLimit = (queue, { limit = defLimit, rejectLate } = {}) => new Promise((res, rej) => {
+    let active = 0;
+    let current = 0;
+    const results = [];
+    // Whether or not we rejected, distinct from the rejection just in case the rejection itself is falsey
+    let rejected = false;
+    let rejection;
+    const reject = (er) => {
+        if (rejected)
+            return;
+        rejected = true;
+        rejection ??= er;
+        if (!rejectLate)
+            rej(rejection);
+    };
+    let resolved = false;
+    const resolve = () => {
+        if (resolved || active > 0)
+            return;
+        resolved = true;
+        res(results);
+    };
+    const run = () => {
+        const c = current++;
+        if (c >= queue.length)
+            return rejected ? reject() : resolve();
+        active++;
+        const step = queue[c];
+        /* c8 ignore start */
+        if (!step)
+            throw new Error('walked off queue');
+        /* c8 ignore stop */
+        results[c] = step()
+            .then(result => {
+            active--;
+            results[c] = result;
+            return result;
+        }, er => {
+            active--;
+            reject(er);
+        })
+            .then(result => {
+            if (rejected && active === 0)
+                return rej(rejection);
+            run();
+            return result;
+        });
+    };
+    for (let i = 0; i < limit; i++)
+        run();
+});
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/promise-call-limit/dist/esm/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/promise-call-limit/dist/esm/package.json
new file mode 100644
index 0000000000000000000000000000000000000000..3dbc1ca591c0557e35b6004aeba250e6a70b56e3
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/promise-call-limit/dist/esm/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/read-cmd-shim/lib/index.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/read-cmd-shim/lib/index.js
new file mode 100644
index 0000000000000000000000000000000000000000..dafc874463f63efb227a6a6dcde5020fdd40453e
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/read-cmd-shim/lib/index.js
@@ -0,0 +1,75 @@
+const fs = require('fs')
+const { promisify } = require('util')
+const { readFileSync } = fs
+const readFile = promisify(fs.readFile)
+
+const extractPath = (path, cmdshimContents) => {
+  if (/[.]cmd$/.test(path)) {
+    return extractPathFromCmd(cmdshimContents)
+  } else if (/[.]ps1$/.test(path)) {
+    return extractPathFromPowershell(cmdshimContents)
+  } else {
+    return extractPathFromCygwin(cmdshimContents)
+  }
+}
+
+const extractPathFromPowershell = cmdshimContents => {
+  const matches = cmdshimContents.match(/"[$]basedir[/]([^"]+?)"\s+[$]args/)
+  return matches && matches[1]
+}
+
+const extractPathFromCmd = cmdshimContents => {
+  const matches = cmdshimContents.match(/"%(?:~dp0|dp0%)\\([^"]+?)"\s+%[*]/)
+  return matches && matches[1]
+}
+
+const extractPathFromCygwin = cmdshimContents => {
+  const matches = cmdshimContents.match(/"[$]basedir[/]([^"]+?)"\s+"[$]@"/)
+  return matches && matches[1]
+}
+
+const wrapError = (thrown, newError) => {
+  newError.message = thrown.message
+  newError.code = thrown.code
+  newError.path = thrown.path
+  return newError
+}
+
+const notaShim = (path, er) => {
+  if (!er) {
+    er = new Error()
+    Error.captureStackTrace(er, notaShim)
+  }
+  er.code = 'ENOTASHIM'
+  er.message = `Can't read shim path from '${path}', ` +
+    `it doesn't appear to be a cmd-shim`
+  return er
+}
+
+const readCmdShim = path => {
+  // create a new error to capture the stack trace from this point,
+  // instead of getting some opaque stack into node's internals
+  const er = new Error()
+  Error.captureStackTrace(er, readCmdShim)
+  return readFile(path).then(contents => {
+    const destination = extractPath(path, contents.toString())
+    if (destination) {
+      return destination
+    }
+    throw notaShim(path, er)
+  }, readFileEr => {
+    throw wrapError(readFileEr, er)
+  })
+}
+
+const readCmdShimSync = path => {
+  const contents = readFileSync(path)
+  const destination = extractPath(path, contents.toString())
+  if (!destination) {
+    throw notaShim(path)
+  }
+  return destination
+}
+
+readCmdShim.sync = readCmdShimSync
+module.exports = readCmdShim
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/read/dist/commonjs/package.json b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/read/dist/commonjs/package.json
new file mode 100644
index 0000000000000000000000000000000000000000..5bbefffbabee392d1855491b84dc0a716b6a3bf2
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/read/dist/commonjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/read/dist/commonjs/read.js b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/read/dist/commonjs/read.js
new file mode 100644
index 0000000000000000000000000000000000000000..744a5f3bf4baf87b5dc1ffaf89b11ff1667db0fe
--- /dev/null
+++ b/data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/read/dist/commonjs/read.js
@@ -0,0 +1,94 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.read = read;
+const mute_stream_1 = __importDefault(require("mute-stream"));
+const readline_1 = require("readline");
+async function read({ default: def, input = process.stdin, output = process.stdout, completer, prompt = '', silent, timeout, edit, terminal, replace, history, }) {
+    if (typeof def !== 'undefined' &&
+        typeof def !== 'string' &&
+        typeof def !== 'number') {
+        throw new Error('default value must be string or number');
+    }
+    let editDef = false;
+    const defString = def?.toString();
+    prompt = prompt.trim() + ' ';
+    terminal = !!(terminal || output.isTTY);
+    if (defString) {
+        if (silent) {
+            prompt += '(