CSDDSFSFSAFSAF commited on
Commit
10dd5ba
·
verified ·
1 Parent(s): e7c9012

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/man/man1/npm-audit.1 +447 -0
  2. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/man/man1/npm-help.1 +47 -0
  3. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/man/man1/npm-link.1 +356 -0
  4. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/man/man1/npm-search.1 +156 -0
  5. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/man/man1/npm.1 +131 -0
  6. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/agent-base/dist/helpers.js +66 -0
  7. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/ansi-regex/index.js +10 -0
  8. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/ansi-regex/license +9 -0
  9. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/ansi-regex/package.json +55 -0
  10. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/aproba/LICENSE +14 -0
  11. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/aproba/index.js +105 -0
  12. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/aproba/package.json +35 -0
  13. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/archy/LICENSE +18 -0
  14. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/archy/index.js +35 -0
  15. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/archy/package.json +40 -0
  16. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/balanced-match/LICENSE.md +21 -0
  17. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/balanced-match/index.js +62 -0
  18. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/balanced-match/package.json +48 -0
  19. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/LICENSE +15 -0
  20. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/package.json +61 -0
  21. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/binary-extensions/binary-extensions.json +264 -0
  22. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/binary-extensions/index.js +3 -0
  23. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/binary-extensions/license +10 -0
  24. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/binary-extensions/package.json +45 -0
  25. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cacache/LICENSE.md +16 -0
  26. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cacache/lib/content/path.js +29 -0
  27. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cacache/lib/content/read.js +165 -0
  28. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cacache/lib/content/rm.js +18 -0
  29. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cacache/lib/content/write.js +206 -0
  30. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cacache/lib/entry-index.js +336 -0
  31. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cacache/lib/get.js +170 -0
  32. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cacache/lib/index.js +42 -0
  33. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cacache/lib/memoization.js +72 -0
  34. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cacache/lib/put.js +80 -0
  35. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cacache/lib/rm.js +31 -0
  36. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cacache/lib/util/glob.js +7 -0
  37. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cacache/lib/util/hash-to-segments.js +7 -0
  38. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cacache/lib/util/tmp.js +26 -0
  39. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cacache/lib/verify.js +258 -0
  40. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cacache/package.json +82 -0
  41. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/chalk/license +9 -0
  42. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/chalk/package.json +83 -0
  43. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/chownr/LICENSE.md +63 -0
  44. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/chownr/package.json +69 -0
  45. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/ci-info/LICENSE +21 -0
  46. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/ci-info/index.js +104 -0
  47. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/ci-info/package.json +54 -0
  48. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/ci-info/vendors.json +358 -0
  49. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cidr-regex/LICENSE +22 -0
  50. data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cidr-regex/package.json +38 -0
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/man/man1/npm-audit.1 ADDED
@@ -0,0 +1,447 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ .TH "NPM-AUDIT" "1" "October 2025" "NPM@11.6.2" ""
2
+ .SH "NAME"
3
+ \fBnpm-audit\fR - Run a security audit
4
+ .SS "Synopsis"
5
+ .P
6
+ .RS 2
7
+ .nf
8
+ npm audit \[lB]fix|signatures\[rB]
9
+ .fi
10
+ .RE
11
+ .SS "Description"
12
+ .P
13
+ The audit command submits a description of the dependencies configured in your project to your default registry and asks for a report of known vulnerabilities. If any vulnerabilities are found, then the impact and appropriate remediation will be calculated. If the \fBfix\fR argument is provided, then remediations will be applied to the package tree.
14
+ .P
15
+ The command will exit with a 0 exit code if no vulnerabilities were found.
16
+ .P
17
+ Note that some vulnerabilities cannot be fixed automatically and will require manual intervention or review. Also note that since \fBnpm audit fix\fR runs a full-fledged \fBnpm install\fR under the hood, all configs that apply to the installer will also apply to \fBnpm install\fR -- so things like \fBnpm audit fix --package-lock-only\fR will work as expected.
18
+ .P
19
+ By default, the audit command will exit with a non-zero code if any vulnerability is found. It may be useful in CI environments to include the \fB--audit-level\fR parameter to specify the minimum vulnerability level that will cause the command to fail. This option does not filter the report output, it simply changes the command's failure threshold.
20
+ .SS "Package lock"
21
+ .P
22
+ By default npm requires a package-lock or shrinkwrap in order to run the audit. You can bypass the package lock with \fB--no-package-lock\fR but be aware the results may be different with every run, since npm will re-build the dependency tree each time.
23
+ .SS "Audit Signatures"
24
+ .P
25
+ To ensure the integrity of packages you download from the public npm registry, or any registry that supports signatures, you can verify the registry signatures of downloaded packages using the npm CLI.
26
+ .P
27
+ Registry signatures can be verified using the following \fBaudit\fR command:
28
+ .P
29
+ .RS 2
30
+ .nf
31
+ $ npm audit signatures
32
+ .fi
33
+ .RE
34
+ .P
35
+ The \fBaudit signatures\fR command will also verify the provenance attestations of downloaded packages. Because provenance attestations are such a new feature, security features may be added to (or changed in) the attestation format over time. To ensure that you're always able to verify attestation signatures check that you're running the latest version of the npm CLI. Please note this often means updating npm beyond the version that ships with Node.js.
36
+ .P
37
+ The npm CLI supports registry signatures and signing keys provided by any registry if the following conventions are followed:
38
+ .RS 0
39
+ .IP 1. 4
40
+ Signatures are provided in the package's \fBpackument\fR in each published version within the \fBdist\fR object:
41
+ .RE 0
42
+
43
+ .P
44
+ .RS 2
45
+ .nf
46
+ "dist":{
47
+ "..omitted..": "..omitted..",
48
+ "signatures": \[lB]{
49
+ "keyid": "SHA256:{{SHA256_PUBLIC_KEY}}",
50
+ "sig": "a312b9c3cb4a1b693e8ebac5ee1ca9cc01f2661c14391917dcb111517f72370809..."
51
+ }\[rB]
52
+ }
53
+ .fi
54
+ .RE
55
+ .P
56
+ See this \fBexample\fR \fI\(lahttps://registry.npmjs.org/light-cycle/1.4.3\(ra\fR of a signed package from the public npm registry.
57
+ .P
58
+ The \fBsig\fR is generated using the following template: \fB${package.name}@${package.version}:${package.dist.integrity}\fR and the \fBkeyid\fR has to match one of the public signing keys below.
59
+ .RS 0
60
+ .IP 2. 4
61
+ Public signing keys are provided at \fBregistry-host.tld/-/npm/v1/keys\fR in the following format:
62
+ .RE 0
63
+
64
+ .P
65
+ .RS 2
66
+ .nf
67
+ {
68
+ "keys": \[lB]{
69
+ "expires": null,
70
+ "keyid": "SHA256:{{SHA256_PUBLIC_KEY}}",
71
+ "keytype": "ecdsa-sha2-nistp256",
72
+ "scheme": "ecdsa-sha2-nistp256",
73
+ "key": "{{B64_PUBLIC_KEY}}"
74
+ }\[rB]
75
+ }
76
+ .fi
77
+ .RE
78
+ .P
79
+ Keys response:
80
+ .RS 0
81
+ .IP \(bu 4
82
+ \fBexpires\fR: null or a simplified extended \fBISO 8601 format\fR \fI\(lahttps://en.wikipedia.org/wiki/ISO_8601\(ra\fR: \fBYYYY-MM-DDTHH:mm:ss.sssZ\fR
83
+ .IP \(bu 4
84
+ \fBkeyid\fR: sha256 fingerprint of the public key
85
+ .IP \(bu 4
86
+ \fBkeytype\fR: only \fBecdsa-sha2-nistp256\fR is currently supported by the npm CLI
87
+ .IP \(bu 4
88
+ \fBscheme\fR: only \fBecdsa-sha2-nistp256\fR is currently supported by the npm CLI
89
+ .IP \(bu 4
90
+ \fBkey\fR: base64 encoded public key
91
+ .RE 0
92
+
93
+ .P
94
+ See this \fBexample key's response from the public npm registry\fR \fI\(lahttps://registry.npmjs.org/-/npm/v1/keys\(ra\fR.
95
+ .SS "Audit Endpoints"
96
+ .P
97
+ There are two audit endpoints that npm may use to fetch vulnerability information: the \fBBulk Advisory\fR endpoint and the \fBQuick Audit\fR endpoint.
98
+ .SS "Bulk Advisory Endpoint"
99
+ .P
100
+ As of version 7, npm uses the much faster \fBBulk Advisory\fR endpoint to optimize the speed of calculating audit results.
101
+ .P
102
+ npm will generate a JSON payload with the name and list of versions of each package in the tree, and POST it to the default configured registry at the path \fB/-/npm/v1/security/advisories/bulk\fR.
103
+ .P
104
+ Any packages in the tree that do not have a \fBversion\fR field in their package.json file will be ignored. If any \fB--omit\fR options are specified (either via the \fB\[rs]fB--omit\[rs]fR config\fR \fI\(la/using-npm/config#omit\(ra\fR, or one of the shorthands such as \fB--production\fR, \fB--only=dev\fR, and so on), then packages will be omitted from the submitted payload as appropriate.
105
+ .P
106
+ If the registry responds with an error, or with an invalid response, then npm will attempt to load advisory data from the \fBQuick Audit\fR endpoint.
107
+ .P
108
+ The expected result will contain a set of advisory objects for each dependency that matches the advisory range. Each advisory object contains a \fBname\fR, \fBurl\fR, \fBid\fR, \fBseverity\fR, \fBvulnerable_versions\fR, and \fBtitle\fR.
109
+ .P
110
+ npm then uses these advisory objects to calculate vulnerabilities and meta-vulnerabilities of the dependencies within the tree.
111
+ .SS "Quick Audit Endpoint"
112
+ .P
113
+ If the \fBBulk Advisory\fR endpoint returns an error, or invalid data, npm will attempt to load advisory data from the \fBQuick Audit\fR endpoint, which is considerably slower in most cases.
114
+ .P
115
+ The full package tree as found in \fBpackage-lock.json\fR is submitted, along with the following pieces of additional metadata:
116
+ .RS 0
117
+ .IP \(bu 4
118
+ \fBnpm_version\fR
119
+ .IP \(bu 4
120
+ \fBnode_version\fR
121
+ .IP \(bu 4
122
+ \fBplatform\fR
123
+ .IP \(bu 4
124
+ \fBarch\fR
125
+ .IP \(bu 4
126
+ \fBnode_env\fR
127
+ .RE 0
128
+
129
+ .P
130
+ All packages in the tree are submitted to the Quick Audit endpoint. Omitted dependency types are skipped when generating the report.
131
+ .SS "Scrubbing"
132
+ .P
133
+ Out of an abundance of caution, npm versions 5 and 6 would "scrub" any packages from the submitted report if their name contained a \fB/\fR character, so as to avoid leaking the names of potentially private packages or git URLs.
134
+ .P
135
+ However, in practice, this resulted in audits often failing to properly detect meta-vulnerabilities, because the tree would appear to be invalid due to missing dependencies, and prevented the detection of vulnerabilities in package trees that used git dependencies or private modules.
136
+ .P
137
+ This scrubbing has been removed from npm as of version 7.
138
+ .SS "Calculating Meta-Vulnerabilities and Remediations"
139
+ .P
140
+ npm uses the \fB\[rs]fB@npmcli/metavuln-calculator\[rs]fR\fR \fI\(lahttp://npm.im/@npmcli/metavuln-calculator\(ra\fR module to turn a set of security advisories into a set of "vulnerability" objects. A "meta-vulnerability" is a dependency that is vulnerable by virtue of dependence on vulnerable versions of a vulnerable package.
141
+ .P
142
+ For example, if the package \fBfoo\fR is vulnerable in the range \fB>=1.0.2 <2.0.0\fR, and the package \fBbar\fR depends on \fBfoo@^1.1.0\fR, then that version of \fBbar\fR can only be installed by installing a vulnerable version of \fBfoo\fR. In this case, \fBbar\fR is a "metavulnerability".
143
+ .P
144
+ Once metavulnerabilities for a given package are calculated, they are cached in the \fB~/.npm\fR folder and only re-evaluated if the advisory range changes, or a new version of the package is published (in which case, the new version is checked for metavulnerable status as well).
145
+ .P
146
+ If the chain of metavulnerabilities extends all the way to the root project, and it cannot be updated without changing its dependency ranges, then \fBnpm audit fix\fR will require the \fB--force\fR option to apply the remediation. If remediations do not require changes to the dependency ranges, then all vulnerable packages will be updated to a version that does not have an advisory or metavulnerability posted against it.
147
+ .SS "Exit Code"
148
+ .P
149
+ The \fBnpm audit\fR command will exit with a 0 exit code if no vulnerabilities were found. The \fBnpm audit fix\fR command will exit with 0 exit code if no vulnerabilities are found \fIor\fR if the remediation is able to successfully fix all vulnerabilities.
150
+ .P
151
+ If vulnerabilities were found the exit code will depend on the \fB\[rs]fBaudit-level\[rs]fR config\fR \fI\(la/using-npm/config#audit-level\(ra\fR.
152
+ .SS "Examples"
153
+ .P
154
+ Scan your project for vulnerabilities and automatically install any compatible updates to vulnerable dependencies:
155
+ .P
156
+ .RS 2
157
+ .nf
158
+ $ npm audit fix
159
+ .fi
160
+ .RE
161
+ .P
162
+ Run \fBaudit fix\fR without modifying \fBnode_modules\fR, but still updating the pkglock:
163
+ .P
164
+ .RS 2
165
+ .nf
166
+ $ npm audit fix --package-lock-only
167
+ .fi
168
+ .RE
169
+ .P
170
+ Skip updating \fBdevDependencies\fR:
171
+ .P
172
+ .RS 2
173
+ .nf
174
+ $ npm audit fix --only=prod
175
+ .fi
176
+ .RE
177
+ .P
178
+ Have \fBaudit fix\fR install SemVer-major updates to toplevel dependencies, not just SemVer-compatible ones:
179
+ .P
180
+ .RS 2
181
+ .nf
182
+ $ npm audit fix --force
183
+ .fi
184
+ .RE
185
+ .P
186
+ Do a dry run to get an idea of what \fBaudit fix\fR will do, and \fIalso\fR output install information in JSON format:
187
+ .P
188
+ .RS 2
189
+ .nf
190
+ $ npm audit fix --dry-run --json
191
+ .fi
192
+ .RE
193
+ .P
194
+ Scan your project for vulnerabilities and just show the details, without fixing anything:
195
+ .P
196
+ .RS 2
197
+ .nf
198
+ $ npm audit
199
+ .fi
200
+ .RE
201
+ .P
202
+ Get the detailed audit report in JSON format:
203
+ .P
204
+ .RS 2
205
+ .nf
206
+ $ npm audit --json
207
+ .fi
208
+ .RE
209
+ .P
210
+ Fail an audit only if the results include a vulnerability with a level of moderate or higher:
211
+ .P
212
+ .RS 2
213
+ .nf
214
+ $ npm audit --audit-level=moderate
215
+ .fi
216
+ .RE
217
+ .SS "Configuration"
218
+ .SS "\fBaudit-level\fR"
219
+ .RS 0
220
+ .IP \(bu 4
221
+ Default: null
222
+ .IP \(bu 4
223
+ Type: null, "info", "low", "moderate", "high", "critical", or "none"
224
+ .RE 0
225
+
226
+ .P
227
+ The minimum level of vulnerability for \fBnpm audit\fR to exit with a non-zero exit code.
228
+ .SS "\fBdry-run\fR"
229
+ .RS 0
230
+ .IP \(bu 4
231
+ Default: false
232
+ .IP \(bu 4
233
+ Type: Boolean
234
+ .RE 0
235
+
236
+ .P
237
+ Indicates that you don't want npm to make any changes and that it should only report what it would have done. This can be passed into any of the commands that modify your local installation, eg, \fBinstall\fR, \fBupdate\fR, \fBdedupe\fR, \fBuninstall\fR, as well as \fBpack\fR and \fBpublish\fR.
238
+ .P
239
+ Note: This is NOT honored by other network related commands, eg \fBdist-tags\fR, \fBowner\fR, etc.
240
+ .SS "\fBforce\fR"
241
+ .RS 0
242
+ .IP \(bu 4
243
+ Default: false
244
+ .IP \(bu 4
245
+ Type: Boolean
246
+ .RE 0
247
+
248
+ .P
249
+ Removes various protections against unfortunate side effects, common mistakes, unnecessary performance degradation, and malicious input.
250
+ .RS 0
251
+ .IP \(bu 4
252
+ Allow clobbering non-npm files in global installs.
253
+ .IP \(bu 4
254
+ Allow the \fBnpm version\fR command to work on an unclean git repository.
255
+ .IP \(bu 4
256
+ Allow deleting the cache folder with \fBnpm cache clean\fR.
257
+ .IP \(bu 4
258
+ Allow installing packages that have an \fBengines\fR declaration requiring a different version of npm.
259
+ .IP \(bu 4
260
+ Allow installing packages that have an \fBengines\fR declaration requiring a different version of \fBnode\fR, even if \fB--engine-strict\fR is enabled.
261
+ .IP \(bu 4
262
+ Allow \fBnpm audit fix\fR to install modules outside your stated dependency range (including SemVer-major changes).
263
+ .IP \(bu 4
264
+ Allow unpublishing all versions of a published package.
265
+ .IP \(bu 4
266
+ Allow conflicting peerDependencies to be installed in the root project.
267
+ .IP \(bu 4
268
+ Implicitly set \fB--yes\fR during \fBnpm init\fR.
269
+ .IP \(bu 4
270
+ Allow clobbering existing values in \fBnpm pkg\fR
271
+ .IP \(bu 4
272
+ Allow unpublishing of entire packages (not just a single version).
273
+ .RE 0
274
+
275
+ .P
276
+ If you don't have a clear idea of what you want to do, it is strongly recommended that you do not use this option!
277
+ .SS "\fBjson\fR"
278
+ .RS 0
279
+ .IP \(bu 4
280
+ Default: false
281
+ .IP \(bu 4
282
+ Type: Boolean
283
+ .RE 0
284
+
285
+ .P
286
+ Whether or not to output JSON data, rather than the normal output.
287
+ .RS 0
288
+ .IP \(bu 4
289
+ In \fBnpm pkg set\fR it enables parsing set values with JSON.parse() before saving them to your \fBpackage.json\fR.
290
+ .RE 0
291
+
292
+ .P
293
+ Not supported by all npm commands.
294
+ .SS "\fBpackage-lock-only\fR"
295
+ .RS 0
296
+ .IP \(bu 4
297
+ Default: false
298
+ .IP \(bu 4
299
+ Type: Boolean
300
+ .RE 0
301
+
302
+ .P
303
+ If set to true, the current operation will only use the \fBpackage-lock.json\fR, ignoring \fBnode_modules\fR.
304
+ .P
305
+ For \fBupdate\fR this means only the \fBpackage-lock.json\fR will be updated, instead of checking \fBnode_modules\fR and downloading dependencies.
306
+ .P
307
+ For \fBlist\fR this means the output will be based on the tree described by the \fBpackage-lock.json\fR, rather than the contents of \fBnode_modules\fR.
308
+ .SS "\fBpackage-lock\fR"
309
+ .RS 0
310
+ .IP \(bu 4
311
+ Default: true
312
+ .IP \(bu 4
313
+ Type: Boolean
314
+ .RE 0
315
+
316
+ .P
317
+ If set to false, then ignore \fBpackage-lock.json\fR files when installing. This will also prevent \fIwriting\fR \fBpackage-lock.json\fR if \fBsave\fR is true.
318
+ .SS "\fBomit\fR"
319
+ .RS 0
320
+ .IP \(bu 4
321
+ Default: 'dev' if the \fBNODE_ENV\fR environment variable is set to 'production'; otherwise, empty.
322
+ .IP \(bu 4
323
+ Type: "dev", "optional", or "peer" (can be set multiple times)
324
+ .RE 0
325
+
326
+ .P
327
+ Dependency types to omit from the installation tree on disk.
328
+ .P
329
+ Note that these dependencies \fIare\fR still resolved and added to the \fBpackage-lock.json\fR or \fBnpm-shrinkwrap.json\fR file. They are just not physically installed on disk.
330
+ .P
331
+ If a package type appears in both the \fB--include\fR and \fB--omit\fR lists, then it will be included.
332
+ .P
333
+ If the resulting omit list includes \fB'dev'\fR, then the \fBNODE_ENV\fR environment variable will be set to \fB'production'\fR for all lifecycle scripts.
334
+ .SS "\fBinclude\fR"
335
+ .RS 0
336
+ .IP \(bu 4
337
+ Default:
338
+ .IP \(bu 4
339
+ Type: "prod", "dev", "optional", or "peer" (can be set multiple times)
340
+ .RE 0
341
+
342
+ .P
343
+ Option that allows for defining which types of dependencies to install.
344
+ .P
345
+ This is the inverse of \fB--omit=<type>\fR.
346
+ .P
347
+ Dependency types specified in \fB--include\fR will not be omitted, regardless of the order in which omit/include are specified on the command-line.
348
+ .SS "\fBforeground-scripts\fR"
349
+ .RS 0
350
+ .IP \(bu 4
351
+ Default: \fBfalse\fR unless when using \fBnpm pack\fR or \fBnpm publish\fR where it defaults to \fBtrue\fR
352
+ .IP \(bu 4
353
+ Type: Boolean
354
+ .RE 0
355
+
356
+ .P
357
+ Run all build scripts (ie, \fBpreinstall\fR, \fBinstall\fR, and \fBpostinstall\fR) scripts for installed packages in the foreground process, sharing standard input, output, and error with the main npm process.
358
+ .P
359
+ Note that this will generally make installs run slower, and be much noisier, but can be useful for debugging.
360
+ .SS "\fBignore-scripts\fR"
361
+ .RS 0
362
+ .IP \(bu 4
363
+ Default: false
364
+ .IP \(bu 4
365
+ Type: Boolean
366
+ .RE 0
367
+
368
+ .P
369
+ If true, npm does not run scripts specified in package.json files.
370
+ .P
371
+ Note that commands explicitly intended to run a particular script, such as \fBnpm start\fR, \fBnpm stop\fR, \fBnpm restart\fR, \fBnpm test\fR, and \fBnpm
372
+ run\fR will still run their intended script if \fBignore-scripts\fR is set, but they will \fInot\fR run any pre- or post-scripts.
373
+ .SS "\fBworkspace\fR"
374
+ .RS 0
375
+ .IP \(bu 4
376
+ Default:
377
+ .IP \(bu 4
378
+ Type: String (can be set multiple times)
379
+ .RE 0
380
+
381
+ .P
382
+ Enable running a command in the context of the configured workspaces of the current project while filtering by running only the workspaces defined by this configuration option.
383
+ .P
384
+ Valid values for the \fBworkspace\fR config are either:
385
+ .RS 0
386
+ .IP \(bu 4
387
+ Workspace names
388
+ .IP \(bu 4
389
+ Path to a workspace directory
390
+ .IP \(bu 4
391
+ Path to a parent workspace directory (will result in selecting all workspaces within that folder)
392
+ .RE 0
393
+
394
+ .P
395
+ When set for the \fBnpm init\fR command, this may be set to the folder of a workspace which does not yet exist, to create the folder and set it up as a brand new workspace within the project.
396
+ .P
397
+ This value is not exported to the environment for child processes.
398
+ .SS "\fBworkspaces\fR"
399
+ .RS 0
400
+ .IP \(bu 4
401
+ Default: null
402
+ .IP \(bu 4
403
+ Type: null or Boolean
404
+ .RE 0
405
+
406
+ .P
407
+ Set to true to run the command in the context of \fBall\fR configured workspaces.
408
+ .P
409
+ Explicitly setting this to false will cause commands like \fBinstall\fR to ignore workspaces altogether. When not set explicitly:
410
+ .RS 0
411
+ .IP \(bu 4
412
+ Commands that operate on the \fBnode_modules\fR tree (install, update, etc.) will link workspaces into the \fBnode_modules\fR folder. - Commands that do other things (test, exec, publish, etc.) will operate on the root project, \fIunless\fR one or more workspaces are specified in the \fBworkspace\fR config.
413
+ .RE 0
414
+
415
+ .P
416
+ This value is not exported to the environment for child processes.
417
+ .SS "\fBinclude-workspace-root\fR"
418
+ .RS 0
419
+ .IP \(bu 4
420
+ Default: false
421
+ .IP \(bu 4
422
+ Type: Boolean
423
+ .RE 0
424
+
425
+ .P
426
+ Include the workspace root when workspaces are enabled for a command.
427
+ .P
428
+ When false, specifying individual workspaces via the \fBworkspace\fR config, or all workspaces via the \fBworkspaces\fR flag, will cause npm to operate only on the specified workspaces, and not on the root project.
429
+ .P
430
+ This value is not exported to the environment for child processes.
431
+ .SS "\fBinstall-links\fR"
432
+ .RS 0
433
+ .IP \(bu 4
434
+ Default: false
435
+ .IP \(bu 4
436
+ Type: Boolean
437
+ .RE 0
438
+
439
+ .P
440
+ When set file: protocol dependencies will be packed and installed as regular dependencies instead of creating a symlink. This option has no effect on workspaces.
441
+ .SS "See Also"
442
+ .RS 0
443
+ .IP \(bu 4
444
+ npm help install
445
+ .IP \(bu 4
446
+ npm help config
447
+ .RE 0
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/man/man1/npm-help.1 ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ .TH "NPM-HELP" "1" "October 2025" "NPM@11.6.2" ""
2
+ .SH "NAME"
3
+ \fBnpm-help\fR - Get help on npm
4
+ .SS "Synopsis"
5
+ .P
6
+ .RS 2
7
+ .nf
8
+ npm help <term> \[lB]<terms..>\[rB]
9
+
10
+ alias: hlep
11
+ .fi
12
+ .RE
13
+ .P
14
+ Note: This command is unaware of workspaces.
15
+ .SS "Description"
16
+ .P
17
+ If supplied a topic, then show the appropriate documentation page.
18
+ .P
19
+ If the topic does not exist, or if multiple terms are provided, then npm will run the \fBhelp-search\fR command to find a match. Note that, if \fBhelp-search\fR finds a single subject, then it will run \fBhelp\fR on that topic, so unique matches are equivalent to specifying a topic name.
20
+ .SS "Configuration"
21
+ .SS "\fBviewer\fR"
22
+ .RS 0
23
+ .IP \(bu 4
24
+ Default: "man" on Posix, "browser" on Windows
25
+ .IP \(bu 4
26
+ Type: String
27
+ .RE 0
28
+
29
+ .P
30
+ The program to use to view help content.
31
+ .P
32
+ Set to \fB"browser"\fR to view html help content in the default web browser.
33
+ .SS "See Also"
34
+ .RS 0
35
+ .IP \(bu 4
36
+ npm help npm
37
+ .IP \(bu 4
38
+ npm help folders
39
+ .IP \(bu 4
40
+ npm help config
41
+ .IP \(bu 4
42
+ npm help npmrc
43
+ .IP \(bu 4
44
+ \fBpackage.json\fR \fI\(la/configuring-npm/package-json\(ra\fR
45
+ .IP \(bu 4
46
+ npm help help-search
47
+ .RE 0
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/man/man1/npm-link.1 ADDED
@@ -0,0 +1,356 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ .TH "NPM-LINK" "1" "October 2025" "NPM@11.6.2" ""
2
+ .SH "NAME"
3
+ \fBnpm-link\fR - Symlink a package folder
4
+ .SS "Synopsis"
5
+ .P
6
+ .RS 2
7
+ .nf
8
+ npm link \[lB]<package-spec>\[rB]
9
+
10
+ alias: ln
11
+ .fi
12
+ .RE
13
+ .SS "Description"
14
+ .P
15
+ This is handy for installing your own stuff, so that you can work on it and test iteratively without having to continually rebuild.
16
+ .P
17
+ Package linking is a two-step process.
18
+ .P
19
+ First, \fBnpm link\fR in a package folder with no arguments will create a symlink in the global folder \fB{prefix}/lib/node_modules/<package>\fR that links to the package where the \fBnpm link\fR command was executed. It will also link any bins in the package to \fB{prefix}/bin/{name}\fR. Note that \fBnpm link\fR uses the global prefix (see \fBnpm prefix -g\fR for its value).
20
+ .P
21
+ Next, in some other location, \fBnpm link package-name\fR will create a symbolic link from globally-installed \fBpackage-name\fR to \fBnode_modules/\fR of the current folder.
22
+ .P
23
+ Note that \fBpackage-name\fR is taken from \fBpackage.json\fR, \fInot\fR from the directory name.
24
+ .P
25
+ The package name can be optionally prefixed with a scope. See npm help scope. The scope must be preceded by an @-symbol and followed by a slash.
26
+ .P
27
+ When creating tarballs for \fBnpm publish\fR, the linked packages are "snapshotted" to their current state by resolving the symbolic links, if they are included in \fBbundleDependencies\fR.
28
+ .P
29
+ For example:
30
+ .P
31
+ .RS 2
32
+ .nf
33
+ cd ~/projects/node-redis # go into the package directory
34
+ npm link # creates global link
35
+ cd ~/projects/node-bloggy # go into some other package directory.
36
+ npm link redis # link-install the package
37
+ .fi
38
+ .RE
39
+ .P
40
+ Now, any changes to \fB~/projects/node-redis\fR will be reflected in \fB~/projects/node-bloggy/node_modules/node-redis/\fR. Note that the link should be to the package name, not the directory name for that package.
41
+ .P
42
+ You may also shortcut the two steps in one. For example, to do the above use-case in a shorter way:
43
+ .P
44
+ .RS 2
45
+ .nf
46
+ cd ~/projects/node-bloggy # go into the dir of your main project
47
+ npm link ../node-redis # link the dir of your dependency
48
+ .fi
49
+ .RE
50
+ .P
51
+ The second line is the equivalent of doing:
52
+ .P
53
+ .RS 2
54
+ .nf
55
+ (cd ../node-redis; npm link)
56
+ npm link redis
57
+ .fi
58
+ .RE
59
+ .P
60
+ That is, it first creates a global link, and then links the global installation target into your project's \fBnode_modules\fR folder.
61
+ .P
62
+ Note that in this case, you are referring to the directory name, \fBnode-redis\fR, rather than the package name \fBredis\fR.
63
+ .P
64
+ If your linked package is scoped (see npm help scope) your link command must include that scope, e.g.
65
+ .P
66
+ .RS 2
67
+ .nf
68
+ npm link @myorg/privatepackage
69
+ .fi
70
+ .RE
71
+ .SS "Caveat"
72
+ .P
73
+ Note that package dependencies linked in this way are \fInot\fR saved to \fBpackage.json\fR by default, on the assumption that the intention is to have a link stand in for a regular non-link dependency. Otherwise, for example, if you depend on \fBredis@^3.0.1\fR, and ran \fBnpm link redis\fR, it would replace the \fB^3.0.1\fR dependency with \fBfile:../path/to/node-redis\fR, which you probably don't want! Additionally, other users or developers on your project would run into issues if they do not have their folders set up exactly the same as yours.
74
+ .P
75
+ If you are adding a \fInew\fR dependency as a link, you should add it to the relevant metadata by running \fBnpm install <dep> --package-lock-only\fR.
76
+ .P
77
+ If you \fIwant\fR to save the \fBfile:\fR reference in your \fBpackage.json\fR and \fBpackage-lock.json\fR files, you can use \fBnpm link <dep> --save\fR to do so.
78
+ .SS "Workspace Usage"
79
+ .P
80
+ \fBnpm link <pkg> --workspace <name>\fR will link the relevant package as a dependency of the specified workspace(s). Note that It may actually be linked into the parent project's \fBnode_modules\fR folder, if there are no conflicting dependencies.
81
+ .P
82
+ \fBnpm link --workspace <name>\fR will create a global link to the specified workspace(s).
83
+ .SS "Configuration"
84
+ .SS "\fBsave\fR"
85
+ .RS 0
86
+ .IP \(bu 4
87
+ Default: \fBtrue\fR unless when using \fBnpm update\fR where it defaults to \fBfalse\fR
88
+ .IP \(bu 4
89
+ Type: Boolean
90
+ .RE 0
91
+
92
+ .P
93
+ Save installed packages to a \fBpackage.json\fR file as dependencies.
94
+ .P
95
+ When used with the \fBnpm rm\fR command, removes the dependency from \fBpackage.json\fR.
96
+ .P
97
+ Will also prevent writing to \fBpackage-lock.json\fR if set to \fBfalse\fR.
98
+ .SS "\fBsave-exact\fR"
99
+ .RS 0
100
+ .IP \(bu 4
101
+ Default: false
102
+ .IP \(bu 4
103
+ Type: Boolean
104
+ .RE 0
105
+
106
+ .P
107
+ Dependencies saved to package.json will be configured with an exact version rather than using npm's default semver range operator.
108
+ .SS "\fBglobal\fR"
109
+ .RS 0
110
+ .IP \(bu 4
111
+ Default: false
112
+ .IP \(bu 4
113
+ Type: Boolean
114
+ .RE 0
115
+
116
+ .P
117
+ Operates in "global" mode, so that packages are installed into the \fBprefix\fR folder instead of the current working directory. See npm help folders for more on the differences in behavior.
118
+ .RS 0
119
+ .IP \(bu 4
120
+ packages are installed into the \fB{prefix}/lib/node_modules\fR folder, instead of the current working directory.
121
+ .IP \(bu 4
122
+ bin files are linked to \fB{prefix}/bin\fR
123
+ .IP \(bu 4
124
+ man pages are linked to \fB{prefix}/share/man\fR
125
+ .RE 0
126
+
127
+ .SS "\fBinstall-strategy\fR"
128
+ .RS 0
129
+ .IP \(bu 4
130
+ Default: "hoisted"
131
+ .IP \(bu 4
132
+ Type: "hoisted", "nested", "shallow", or "linked"
133
+ .RE 0
134
+
135
+ .P
136
+ Sets the strategy for installing packages in node_modules. hoisted (default): Install non-duplicated in top-level, and duplicated as necessary within directory structure. nested: (formerly --legacy-bundling) install in place, no hoisting. shallow (formerly --global-style) only install direct deps at top-level. linked: (experimental) install in node_modules/.store, link in place, unhoisted.
137
+ .SS "\fBlegacy-bundling\fR"
138
+ .RS 0
139
+ .IP \(bu 4
140
+ Default: false
141
+ .IP \(bu 4
142
+ Type: Boolean
143
+ .IP \(bu 4
144
+ DEPRECATED: This option has been deprecated in favor of \fB--install-strategy=nested\fR
145
+ .RE 0
146
+
147
+ .P
148
+ Instead of hoisting package installs in \fBnode_modules\fR, install packages in the same manner that they are depended on. This may cause very deep directory structures and duplicate package installs as there is no de-duplicating. Sets \fB--install-strategy=nested\fR.
149
+ .SS "\fBglobal-style\fR"
150
+ .RS 0
151
+ .IP \(bu 4
152
+ Default: false
153
+ .IP \(bu 4
154
+ Type: Boolean
155
+ .IP \(bu 4
156
+ DEPRECATED: This option has been deprecated in favor of \fB--install-strategy=shallow\fR
157
+ .RE 0
158
+
159
+ .P
160
+ Only install direct dependencies in the top level \fBnode_modules\fR, but hoist on deeper dependencies. Sets \fB--install-strategy=shallow\fR.
161
+ .SS "\fBstrict-peer-deps\fR"
162
+ .RS 0
163
+ .IP \(bu 4
164
+ Default: false
165
+ .IP \(bu 4
166
+ Type: Boolean
167
+ .RE 0
168
+
169
+ .P
170
+ If set to \fBtrue\fR, and \fB--legacy-peer-deps\fR is not set, then \fIany\fR conflicting \fBpeerDependencies\fR will be treated as an install failure, even if npm could reasonably guess the appropriate resolution based on non-peer dependency relationships.
171
+ .P
172
+ By default, conflicting \fBpeerDependencies\fR deep in the dependency graph will be resolved using the nearest non-peer dependency specification, even if doing so will result in some packages receiving a peer dependency outside the range set in their package's \fBpeerDependencies\fR object.
173
+ .P
174
+ When such an override is performed, a warning is printed, explaining the conflict and the packages involved. If \fB--strict-peer-deps\fR is set, then this warning is treated as a failure.
175
+ .SS "\fBpackage-lock\fR"
176
+ .RS 0
177
+ .IP \(bu 4
178
+ Default: true
179
+ .IP \(bu 4
180
+ Type: Boolean
181
+ .RE 0
182
+
183
+ .P
184
+ If set to false, then ignore \fBpackage-lock.json\fR files when installing. This will also prevent \fIwriting\fR \fBpackage-lock.json\fR if \fBsave\fR is true.
185
+ .SS "\fBomit\fR"
186
+ .RS 0
187
+ .IP \(bu 4
188
+ Default: 'dev' if the \fBNODE_ENV\fR environment variable is set to 'production'; otherwise, empty.
189
+ .IP \(bu 4
190
+ Type: "dev", "optional", or "peer" (can be set multiple times)
191
+ .RE 0
192
+
193
+ .P
194
+ Dependency types to omit from the installation tree on disk.
195
+ .P
196
+ Note that these dependencies \fIare\fR still resolved and added to the \fBpackage-lock.json\fR or \fBnpm-shrinkwrap.json\fR file. They are just not physically installed on disk.
197
+ .P
198
+ If a package type appears in both the \fB--include\fR and \fB--omit\fR lists, then it will be included.
199
+ .P
200
+ If the resulting omit list includes \fB'dev'\fR, then the \fBNODE_ENV\fR environment variable will be set to \fB'production'\fR for all lifecycle scripts.
201
+ .SS "\fBinclude\fR"
202
+ .RS 0
203
+ .IP \(bu 4
204
+ Default:
205
+ .IP \(bu 4
206
+ Type: "prod", "dev", "optional", or "peer" (can be set multiple times)
207
+ .RE 0
208
+
209
+ .P
210
+ Option that allows for defining which types of dependencies to install.
211
+ .P
212
+ This is the inverse of \fB--omit=<type>\fR.
213
+ .P
214
+ Dependency types specified in \fB--include\fR will not be omitted, regardless of the order in which omit/include are specified on the command-line.
215
+ .SS "\fBignore-scripts\fR"
216
+ .RS 0
217
+ .IP \(bu 4
218
+ Default: false
219
+ .IP \(bu 4
220
+ Type: Boolean
221
+ .RE 0
222
+
223
+ .P
224
+ If true, npm does not run scripts specified in package.json files.
225
+ .P
226
+ Note that commands explicitly intended to run a particular script, such as \fBnpm start\fR, \fBnpm stop\fR, \fBnpm restart\fR, \fBnpm test\fR, and \fBnpm
227
+ run\fR will still run their intended script if \fBignore-scripts\fR is set, but they will \fInot\fR run any pre- or post-scripts.
228
+ .SS "\fBaudit\fR"
229
+ .RS 0
230
+ .IP \(bu 4
231
+ Default: true
232
+ .IP \(bu 4
233
+ Type: Boolean
234
+ .RE 0
235
+
236
+ .P
237
+ When "true" submit audit reports alongside the current npm command to the default registry and all registries configured for scopes. See the documentation for npm help audit for details on what is submitted.
238
+ .SS "\fBbin-links\fR"
239
+ .RS 0
240
+ .IP \(bu 4
241
+ Default: true
242
+ .IP \(bu 4
243
+ Type: Boolean
244
+ .RE 0
245
+
246
+ .P
247
+ Tells npm to create symlinks (or \fB.cmd\fR shims on Windows) for package executables.
248
+ .P
249
+ Set to false to have it not do this. This can be used to work around the fact that some file systems don't support symlinks, even on ostensibly Unix systems.
250
+ .SS "\fBfund\fR"
251
+ .RS 0
252
+ .IP \(bu 4
253
+ Default: true
254
+ .IP \(bu 4
255
+ Type: Boolean
256
+ .RE 0
257
+
258
+ .P
259
+ When "true" displays the message at the end of each \fBnpm install\fR acknowledging the number of dependencies looking for funding. See npm help fund for details.
260
+ .SS "\fBdry-run\fR"
261
+ .RS 0
262
+ .IP \(bu 4
263
+ Default: false
264
+ .IP \(bu 4
265
+ Type: Boolean
266
+ .RE 0
267
+
268
+ .P
269
+ Indicates that you don't want npm to make any changes and that it should only report what it would have done. This can be passed into any of the commands that modify your local installation, eg, \fBinstall\fR, \fBupdate\fR, \fBdedupe\fR, \fBuninstall\fR, as well as \fBpack\fR and \fBpublish\fR.
270
+ .P
271
+ Note: This is NOT honored by other network related commands, eg \fBdist-tags\fR, \fBowner\fR, etc.
272
+ .SS "\fBworkspace\fR"
273
+ .RS 0
274
+ .IP \(bu 4
275
+ Default:
276
+ .IP \(bu 4
277
+ Type: String (can be set multiple times)
278
+ .RE 0
279
+
280
+ .P
281
+ Enable running a command in the context of the configured workspaces of the current project while filtering by running only the workspaces defined by this configuration option.
282
+ .P
283
+ Valid values for the \fBworkspace\fR config are either:
284
+ .RS 0
285
+ .IP \(bu 4
286
+ Workspace names
287
+ .IP \(bu 4
288
+ Path to a workspace directory
289
+ .IP \(bu 4
290
+ Path to a parent workspace directory (will result in selecting all workspaces within that folder)
291
+ .RE 0
292
+
293
+ .P
294
+ When set for the \fBnpm init\fR command, this may be set to the folder of a workspace which does not yet exist, to create the folder and set it up as a brand new workspace within the project.
295
+ .P
296
+ This value is not exported to the environment for child processes.
297
+ .SS "\fBworkspaces\fR"
298
+ .RS 0
299
+ .IP \(bu 4
300
+ Default: null
301
+ .IP \(bu 4
302
+ Type: null or Boolean
303
+ .RE 0
304
+
305
+ .P
306
+ Set to true to run the command in the context of \fBall\fR configured workspaces.
307
+ .P
308
+ Explicitly setting this to false will cause commands like \fBinstall\fR to ignore workspaces altogether. When not set explicitly:
309
+ .RS 0
310
+ .IP \(bu 4
311
+ Commands that operate on the \fBnode_modules\fR tree (install, update, etc.) will link workspaces into the \fBnode_modules\fR folder. - Commands that do other things (test, exec, publish, etc.) will operate on the root project, \fIunless\fR one or more workspaces are specified in the \fBworkspace\fR config.
312
+ .RE 0
313
+
314
+ .P
315
+ This value is not exported to the environment for child processes.
316
+ .SS "\fBinclude-workspace-root\fR"
317
+ .RS 0
318
+ .IP \(bu 4
319
+ Default: false
320
+ .IP \(bu 4
321
+ Type: Boolean
322
+ .RE 0
323
+
324
+ .P
325
+ Include the workspace root when workspaces are enabled for a command.
326
+ .P
327
+ When false, specifying individual workspaces via the \fBworkspace\fR config, or all workspaces via the \fBworkspaces\fR flag, will cause npm to operate only on the specified workspaces, and not on the root project.
328
+ .P
329
+ This value is not exported to the environment for child processes.
330
+ .SS "\fBinstall-links\fR"
331
+ .RS 0
332
+ .IP \(bu 4
333
+ Default: false
334
+ .IP \(bu 4
335
+ Type: Boolean
336
+ .RE 0
337
+
338
+ .P
339
+ When set file: protocol dependencies will be packed and installed as regular dependencies instead of creating a symlink. This option has no effect on workspaces.
340
+ .SS "See Also"
341
+ .RS 0
342
+ .IP \(bu 4
343
+ npm help "package spec"
344
+ .IP \(bu 4
345
+ npm help developers
346
+ .IP \(bu 4
347
+ \fBpackage.json\fR \fI\(la/configuring-npm/package-json\(ra\fR
348
+ .IP \(bu 4
349
+ npm help install
350
+ .IP \(bu 4
351
+ npm help folders
352
+ .IP \(bu 4
353
+ npm help config
354
+ .IP \(bu 4
355
+ npm help npmrc
356
+ .RE 0
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/man/man1/npm-search.1 ADDED
@@ -0,0 +1,156 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ .TH "NPM-SEARCH" "1" "October 2025" "NPM@11.6.2" ""
2
+ .SH "NAME"
3
+ \fBnpm-search\fR - Search for packages
4
+ .SS "Synopsis"
5
+ .P
6
+ .RS 2
7
+ .nf
8
+ npm search <search term> \[lB]<search term> ...\[rB]
9
+
10
+ aliases: find, s, se
11
+ .fi
12
+ .RE
13
+ .P
14
+ Note: This command is unaware of workspaces.
15
+ .SS "Description"
16
+ .P
17
+ Search the registry for packages matching the search terms. \fBnpm search\fR performs a linear, incremental, lexically-ordered search through package metadata for all files in the registry. If your terminal has color support, it will further highlight the matches in the results. This can be disabled with the config item \fBcolor\fR
18
+ .P
19
+ Additionally, using the \fB--searchopts\fR and \fB--searchexclude\fR options paired with more search terms will include and exclude further patterns. The main difference between \fB--searchopts\fR and the standard search terms is that the former does not highlight results in the output and you can use them more fine-grained filtering. Additionally, you can add both of these to your config to change default search filtering behavior.
20
+ .P
21
+ Search also allows targeting of maintainers in search results, by prefixing their npm username with \fB=\fR.
22
+ .P
23
+ If a term starts with \fB/\fR, then it's interpreted as a regular expression and supports standard JavaScript RegExp syntax. In this case search will ignore a trailing \fB/\fR . (Note you must escape or quote many regular expression characters in most shells.)
24
+ .SS "Configuration"
25
+ .SS "\fBjson\fR"
26
+ .RS 0
27
+ .IP \(bu 4
28
+ Default: false
29
+ .IP \(bu 4
30
+ Type: Boolean
31
+ .RE 0
32
+
33
+ .P
34
+ Whether or not to output JSON data, rather than the normal output.
35
+ .RS 0
36
+ .IP \(bu 4
37
+ In \fBnpm pkg set\fR it enables parsing set values with JSON.parse() before saving them to your \fBpackage.json\fR.
38
+ .RE 0
39
+
40
+ .P
41
+ Not supported by all npm commands.
42
+ .SS "\fBcolor\fR"
43
+ .RS 0
44
+ .IP \(bu 4
45
+ Default: true unless the NO_COLOR environ is set to something other than '0'
46
+ .IP \(bu 4
47
+ Type: "always" or Boolean
48
+ .RE 0
49
+
50
+ .P
51
+ If false, never shows colors. If \fB"always"\fR then always shows colors. If true, then only prints color codes for tty file descriptors.
52
+ .SS "\fBparseable\fR"
53
+ .RS 0
54
+ .IP \(bu 4
55
+ Default: false
56
+ .IP \(bu 4
57
+ Type: Boolean
58
+ .RE 0
59
+
60
+ .P
61
+ Output parseable results from commands that write to standard output. For \fBnpm search\fR, this will be tab-separated table format.
62
+ .SS "\fBdescription\fR"
63
+ .RS 0
64
+ .IP \(bu 4
65
+ Default: true
66
+ .IP \(bu 4
67
+ Type: Boolean
68
+ .RE 0
69
+
70
+ .P
71
+ Show the description in \fBnpm search\fR
72
+ .SS "\fBsearchlimit\fR"
73
+ .RS 0
74
+ .IP \(bu 4
75
+ Default: 20
76
+ .IP \(bu 4
77
+ Type: Number
78
+ .RE 0
79
+
80
+ .P
81
+ Number of items to limit search results to. Will not apply at all to legacy searches.
82
+ .SS "\fBsearchopts\fR"
83
+ .RS 0
84
+ .IP \(bu 4
85
+ Default: ""
86
+ .IP \(bu 4
87
+ Type: String
88
+ .RE 0
89
+
90
+ .P
91
+ Space-separated options that are always passed to search.
92
+ .SS "\fBsearchexclude\fR"
93
+ .RS 0
94
+ .IP \(bu 4
95
+ Default: ""
96
+ .IP \(bu 4
97
+ Type: String
98
+ .RE 0
99
+
100
+ .P
101
+ Space-separated options that limit the results from search.
102
+ .SS "\fBregistry\fR"
103
+ .RS 0
104
+ .IP \(bu 4
105
+ Default: "https://registry.npmjs.org/"
106
+ .IP \(bu 4
107
+ Type: URL
108
+ .RE 0
109
+
110
+ .P
111
+ The base URL of the npm registry.
112
+ .SS "\fBprefer-online\fR"
113
+ .RS 0
114
+ .IP \(bu 4
115
+ Default: false
116
+ .IP \(bu 4
117
+ Type: Boolean
118
+ .RE 0
119
+
120
+ .P
121
+ If true, staleness checks for cached data will be forced, making the CLI look for updates immediately even for fresh package data.
122
+ .SS "\fBprefer-offline\fR"
123
+ .RS 0
124
+ .IP \(bu 4
125
+ Default: false
126
+ .IP \(bu 4
127
+ Type: Boolean
128
+ .RE 0
129
+
130
+ .P
131
+ If true, staleness checks for cached data will be bypassed, but missing data will be requested from the server. To force full offline mode, use \fB--offline\fR.
132
+ .SS "\fBoffline\fR"
133
+ .RS 0
134
+ .IP \(bu 4
135
+ Default: false
136
+ .IP \(bu 4
137
+ Type: Boolean
138
+ .RE 0
139
+
140
+ .P
141
+ Force offline mode: no network requests will be done during install. To allow the CLI to fill in missing cache data, see \fB--prefer-offline\fR.
142
+ .SS "See Also"
143
+ .RS 0
144
+ .IP \(bu 4
145
+ npm help registry
146
+ .IP \(bu 4
147
+ npm help config
148
+ .IP \(bu 4
149
+ npm help npmrc
150
+ .IP \(bu 4
151
+ npm help view
152
+ .IP \(bu 4
153
+ npm help cache
154
+ .IP \(bu 4
155
+ https://npm.im/npm-registry-fetch
156
+ .RE 0
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/man/man1/npm.1 ADDED
@@ -0,0 +1,131 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ .TH "NPM" "1" "October 2025" "NPM@11.6.2" ""
2
+ .SH "NAME"
3
+ \fBnpm\fR - javascript package manager
4
+ .SS "Synopsis"
5
+ .P
6
+ .RS 2
7
+ .nf
8
+ npm
9
+ .fi
10
+ .RE
11
+ .P
12
+ Note: This command is unaware of workspaces.
13
+ .SS "Version"
14
+ .P
15
+ 11.6.2
16
+ .SS "Description"
17
+ .P
18
+ npm is the package manager for the Node JavaScript platform. It puts modules in place so that node can find them, and manages dependency conflicts intelligently.
19
+ .P
20
+ It is extremely configurable to support a variety of use cases. Most commonly, you use it to publish, discover, install, and develop node programs.
21
+ .P
22
+ Run \fBnpm help\fR to get a list of available commands.
23
+ .SS "Important"
24
+ .P
25
+ npm comes preconfigured to use npm's public registry at https://registry.npmjs.org by default. Use of the npm public registry is subject to terms of use available at https://docs.npmjs.com/policies/terms.
26
+ .P
27
+ You can configure npm to use any compatible registry you like, and even run your own registry. Use of someone else's registry is governed by their terms of use.
28
+ .SS "Introduction"
29
+ .P
30
+ You probably got npm because you want to install stuff.
31
+ .P
32
+ The very first thing you will most likely want to run in any node program is \fBnpm install\fR to install its dependencies.
33
+ .P
34
+ You can also run \fBnpm install blerg\fR to install the latest version of "blerg". Check out npm help install for more info. It can do a lot of stuff.
35
+ .P
36
+ Use the \fBnpm search\fR command to show everything that's available in the public registry. Use \fBnpm ls\fR to show everything you've installed.
37
+ .SS "Dependencies"
38
+ .P
39
+ If a package lists a dependency using a git URL, npm will install that dependency using the \fB\[rs]fBgit\[rs]fR\fR \fI\(lahttps://github.com/git-guides/install-git\(ra\fR command and will generate an error if it is not installed.
40
+ .P
41
+ If one of the packages npm tries to install is a native node module and requires compiling of C++ Code, npm will use \fBnode-gyp\fR \fI\(lahttps://github.com/nodejs/node-gyp\(ra\fR for that task. For a Unix system, \fBnode-gyp\fR \fI\(lahttps://github.com/nodejs/node-gyp\(ra\fR needs Python, make and a buildchain like GCC. On Windows, Python and Microsoft Visual Studio C++ are needed. For more information visit \fBthe node-gyp repository\fR \fI\(lahttps://github.com/nodejs/node-gyp\(ra\fR and the \fBnode-gyp Wiki\fR \fI\(lahttps://github.com/nodejs/node-gyp/wiki\(ra\fR.
42
+ .SS "Directories"
43
+ .P
44
+ See npm help folders to learn about where npm puts stuff.
45
+ .P
46
+ In particular, npm has two modes of operation:
47
+ .RS 0
48
+ .IP \(bu 4
49
+ local mode: npm installs packages into the current project directory, which defaults to the current working directory. Packages install to \fB./node_modules\fR, and bins to \fB./node_modules/.bin\fR.
50
+ .IP \(bu 4
51
+ global mode: npm installs packages into the install prefix at \fB$npm_config_prefix/lib/node_modules\fR and bins to \fB$npm_config_prefix/bin\fR.
52
+ .RE 0
53
+
54
+ .P
55
+ Local mode is the default. Use \fB-g\fR or \fB--global\fR on any command to run in global mode instead.
56
+ .SS "Developer Usage"
57
+ .P
58
+ If you're using npm to develop and publish your code, check out the following help topics:
59
+ .RS 0
60
+ .IP \(bu 4
61
+ json: Make a package.json file. See \fB\[rs]fBpackage.json\[rs]fR\fR \fI\(la/configuring-npm/package-json\(ra\fR.
62
+ .IP \(bu 4
63
+ link: Links your current working code into Node's path, so that you don't have to reinstall every time you make a change. Use npm help link to do this.
64
+ .IP \(bu 4
65
+ install: It's a good idea to install things if you don't need the symbolic link. Especially, installing other peoples code from the registry is done via npm help install
66
+ .IP \(bu 4
67
+ adduser: Create an account or log in. When you do this, npm will store credentials in the user config file.
68
+ .IP \(bu 4
69
+ publish: Use the npm help publish command to upload your code to the registry.
70
+ .RE 0
71
+
72
+ .SS "Configuration"
73
+ .P
74
+ npm is extremely configurable. It reads its configuration options from 5 places.
75
+ .RS 0
76
+ .IP \(bu 4
77
+ Command line switches: Set a config with \fB--key val\fR. All keys take a value, even if they are booleans (the config parser doesn't know what the options are at the time of parsing). If you do not provide a value (\fB--key\fR) then the option is set to boolean \fBtrue\fR.
78
+ .IP \(bu 4
79
+ Environment Variables: Set any config by prefixing the name in an environment variable with \fBnpm_config_\fR. For example, \fBexport npm_config_key=val\fR.
80
+ .IP \(bu 4
81
+ User Configs: The file at \fB$HOME/.npmrc\fR is an ini-formatted list of configs. If present, it is parsed. If the \fBuserconfig\fR option is set in the cli or env, that file will be used instead.
82
+ .IP \(bu 4
83
+ Global Configs: The file found at \fB./etc/npmrc\fR (relative to the global prefix will be parsed if it is found. See npm help prefix for more info on the global prefix. If the \fBglobalconfig\fR option is set in the cli, env, or user config, then that file is parsed instead.
84
+ .IP \(bu 4
85
+ Defaults: npm's default configuration options are defined in \fBlib/utils/config/definitions.js\fR. These must not be changed.
86
+ .RE 0
87
+
88
+ .P
89
+ See npm help config for much, much, more information.
90
+ .SS "Contributions"
91
+ .P
92
+ Patches welcome!
93
+ .P
94
+ If you would like to help, but don't know what to work on, read the \fBcontributing guidelines\fR \fI\(lahttps://github.com/npm/cli/blob/latest/CONTRIBUTING.md\(ra\fR and check the issues list.
95
+ .SS "Bugs"
96
+ .P
97
+ When you find issues, please report them: \fI\(lahttps://github.com/npm/cli/issues\(ra\fR
98
+ .P
99
+ Please be sure to follow the template and bug reporting guidelines.
100
+ .SS "Feature Requests"
101
+ .P
102
+ Discuss new feature ideas on our discussion forum:
103
+ .RS 0
104
+ .IP \(bu 4
105
+ \fI\(lahttps://github.com/orgs/community/discussions/categories/npm\(ra\fR
106
+ .RE 0
107
+
108
+ .P
109
+ Or suggest formal RFC proposals:
110
+ .RS 0
111
+ .IP \(bu 4
112
+ \fI\(lahttps://github.com/npm/rfcs\(ra\fR
113
+ .RE 0
114
+
115
+ .SS "See Also"
116
+ .RS 0
117
+ .IP \(bu 4
118
+ npm help help
119
+ .IP \(bu 4
120
+ \fBpackage.json\fR \fI\(la/configuring-npm/package-json\(ra\fR
121
+ .IP \(bu 4
122
+ npm help npmrc
123
+ .IP \(bu 4
124
+ npm help config
125
+ .IP \(bu 4
126
+ npm help install
127
+ .IP \(bu 4
128
+ npm help prefix
129
+ .IP \(bu 4
130
+ npm help publish
131
+ .RE 0
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/agent-base/dist/helpers.js ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
15
+ }) : function(o, v) {
16
+ o["default"] = v;
17
+ });
18
+ var __importStar = (this && this.__importStar) || function (mod) {
19
+ if (mod && mod.__esModule) return mod;
20
+ var result = {};
21
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
22
+ __setModuleDefault(result, mod);
23
+ return result;
24
+ };
25
+ Object.defineProperty(exports, "__esModule", { value: true });
26
+ exports.req = exports.json = exports.toBuffer = void 0;
27
+ const http = __importStar(require("http"));
28
+ const https = __importStar(require("https"));
29
+ async function toBuffer(stream) {
30
+ let length = 0;
31
+ const chunks = [];
32
+ for await (const chunk of stream) {
33
+ length += chunk.length;
34
+ chunks.push(chunk);
35
+ }
36
+ return Buffer.concat(chunks, length);
37
+ }
38
+ exports.toBuffer = toBuffer;
39
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
40
+ async function json(stream) {
41
+ const buf = await toBuffer(stream);
42
+ const str = buf.toString('utf8');
43
+ try {
44
+ return JSON.parse(str);
45
+ }
46
+ catch (_err) {
47
+ const err = _err;
48
+ err.message += ` (input: ${str})`;
49
+ throw err;
50
+ }
51
+ }
52
+ exports.json = json;
53
+ function req(url, opts = {}) {
54
+ const href = typeof url === 'string' ? url : url.href;
55
+ const req = (href.startsWith('https:') ? https : http).request(url, opts);
56
+ const promise = new Promise((resolve, reject) => {
57
+ req
58
+ .once('response', resolve)
59
+ .once('error', reject)
60
+ .end();
61
+ });
62
+ req.then = promise.then.bind(promise);
63
+ return req;
64
+ }
65
+ exports.req = req;
66
+ //# sourceMappingURL=helpers.js.map
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/ansi-regex/index.js ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ 'use strict';
2
+
3
+ module.exports = ({onlyFirst = false} = {}) => {
4
+ const pattern = [
5
+ '[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]+)*|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)',
6
+ '(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))'
7
+ ].join('|');
8
+
9
+ return new RegExp(pattern, onlyFirst ? undefined : 'g');
10
+ };
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/ansi-regex/license ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
6
+
7
+ The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
8
+
9
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/ansi-regex/package.json ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "name": "ansi-regex",
3
+ "version": "5.0.1",
4
+ "description": "Regular expression for matching ANSI escape codes",
5
+ "license": "MIT",
6
+ "repository": "chalk/ansi-regex",
7
+ "author": {
8
+ "name": "Sindre Sorhus",
9
+ "email": "sindresorhus@gmail.com",
10
+ "url": "sindresorhus.com"
11
+ },
12
+ "engines": {
13
+ "node": ">=8"
14
+ },
15
+ "scripts": {
16
+ "test": "xo && ava && tsd",
17
+ "view-supported": "node fixtures/view-codes.js"
18
+ },
19
+ "files": [
20
+ "index.js",
21
+ "index.d.ts"
22
+ ],
23
+ "keywords": [
24
+ "ansi",
25
+ "styles",
26
+ "color",
27
+ "colour",
28
+ "colors",
29
+ "terminal",
30
+ "console",
31
+ "cli",
32
+ "string",
33
+ "tty",
34
+ "escape",
35
+ "formatting",
36
+ "rgb",
37
+ "256",
38
+ "shell",
39
+ "xterm",
40
+ "command-line",
41
+ "text",
42
+ "regex",
43
+ "regexp",
44
+ "re",
45
+ "match",
46
+ "test",
47
+ "find",
48
+ "pattern"
49
+ ],
50
+ "devDependencies": {
51
+ "ava": "^2.4.0",
52
+ "tsd": "^0.9.0",
53
+ "xo": "^0.25.3"
54
+ }
55
+ }
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/aproba/LICENSE ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Copyright (c) 2015, Rebecca Turner <me@re-becca.org>
2
+
3
+ Permission to use, copy, modify, and/or distribute this software for any
4
+ purpose with or without fee is hereby granted, provided that the above
5
+ copyright notice and this permission notice appear in all copies.
6
+
7
+ THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
8
+ WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
9
+ MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
10
+ ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
11
+ WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
12
+ ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
13
+ OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
14
+
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/aproba/index.js ADDED
@@ -0,0 +1,105 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 'use strict'
2
+ module.exports = validate
3
+
4
+ function isArguments (thingy) {
5
+ return thingy != null && typeof thingy === 'object' && thingy.hasOwnProperty('callee')
6
+ }
7
+
8
+ const types = {
9
+ '*': {label: 'any', check: () => true},
10
+ A: {label: 'array', check: _ => Array.isArray(_) || isArguments(_)},
11
+ S: {label: 'string', check: _ => typeof _ === 'string'},
12
+ N: {label: 'number', check: _ => typeof _ === 'number'},
13
+ F: {label: 'function', check: _ => typeof _ === 'function'},
14
+ O: {label: 'object', check: _ => typeof _ === 'object' && _ != null && !types.A.check(_) && !types.E.check(_)},
15
+ B: {label: 'boolean', check: _ => typeof _ === 'boolean'},
16
+ E: {label: 'error', check: _ => _ instanceof Error},
17
+ Z: {label: 'null', check: _ => _ == null}
18
+ }
19
+
20
+ function addSchema (schema, arity) {
21
+ const group = arity[schema.length] = arity[schema.length] || []
22
+ if (group.indexOf(schema) === -1) group.push(schema)
23
+ }
24
+
25
+ function validate (rawSchemas, args) {
26
+ if (arguments.length !== 2) throw wrongNumberOfArgs(['SA'], arguments.length)
27
+ if (!rawSchemas) throw missingRequiredArg(0, 'rawSchemas')
28
+ if (!args) throw missingRequiredArg(1, 'args')
29
+ if (!types.S.check(rawSchemas)) throw invalidType(0, ['string'], rawSchemas)
30
+ if (!types.A.check(args)) throw invalidType(1, ['array'], args)
31
+ const schemas = rawSchemas.split('|')
32
+ const arity = {}
33
+
34
+ schemas.forEach(schema => {
35
+ for (let ii = 0; ii < schema.length; ++ii) {
36
+ const type = schema[ii]
37
+ if (!types[type]) throw unknownType(ii, type)
38
+ }
39
+ if (/E.*E/.test(schema)) throw moreThanOneError(schema)
40
+ addSchema(schema, arity)
41
+ if (/E/.test(schema)) {
42
+ addSchema(schema.replace(/E.*$/, 'E'), arity)
43
+ addSchema(schema.replace(/E/, 'Z'), arity)
44
+ if (schema.length === 1) addSchema('', arity)
45
+ }
46
+ })
47
+ let matching = arity[args.length]
48
+ if (!matching) {
49
+ throw wrongNumberOfArgs(Object.keys(arity), args.length)
50
+ }
51
+ for (let ii = 0; ii < args.length; ++ii) {
52
+ let newMatching = matching.filter(schema => {
53
+ const type = schema[ii]
54
+ const typeCheck = types[type].check
55
+ return typeCheck(args[ii])
56
+ })
57
+ if (!newMatching.length) {
58
+ const labels = matching.map(_ => types[_[ii]].label).filter(_ => _ != null)
59
+ throw invalidType(ii, labels, args[ii])
60
+ }
61
+ matching = newMatching
62
+ }
63
+ }
64
+
65
+ function missingRequiredArg (num) {
66
+ return newException('EMISSINGARG', 'Missing required argument #' + (num + 1))
67
+ }
68
+
69
+ function unknownType (num, type) {
70
+ return newException('EUNKNOWNTYPE', 'Unknown type ' + type + ' in argument #' + (num + 1))
71
+ }
72
+
73
+ function invalidType (num, expectedTypes, value) {
74
+ let valueType
75
+ Object.keys(types).forEach(typeCode => {
76
+ if (types[typeCode].check(value)) valueType = types[typeCode].label
77
+ })
78
+ return newException('EINVALIDTYPE', 'Argument #' + (num + 1) + ': Expected ' +
79
+ englishList(expectedTypes) + ' but got ' + valueType)
80
+ }
81
+
82
+ function englishList (list) {
83
+ return list.join(', ').replace(/, ([^,]+)$/, ' or $1')
84
+ }
85
+
86
+ function wrongNumberOfArgs (expected, got) {
87
+ const english = englishList(expected)
88
+ const args = expected.every(ex => ex.length === 1)
89
+ ? 'argument'
90
+ : 'arguments'
91
+ return newException('EWRONGARGCOUNT', 'Expected ' + english + ' ' + args + ' but got ' + got)
92
+ }
93
+
94
+ function moreThanOneError (schema) {
95
+ return newException('ETOOMANYERRORTYPES',
96
+ 'Only one error type per argument signature is allowed, more than one found in "' + schema + '"')
97
+ }
98
+
99
+ function newException (code, msg) {
100
+ const err = new TypeError(msg)
101
+ err.code = code
102
+ /* istanbul ignore else */
103
+ if (Error.captureStackTrace) Error.captureStackTrace(err, validate)
104
+ return err
105
+ }
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/aproba/package.json ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "name": "aproba",
3
+ "version": "2.1.0",
4
+ "description": "A ridiculously light-weight argument validator (now browser friendly)",
5
+ "main": "index.js",
6
+ "directories": {
7
+ "test": "test"
8
+ },
9
+ "dependencies": {},
10
+ "devDependencies": {
11
+ "standard": "^11.0.1",
12
+ "tap": "^12.0.1"
13
+ },
14
+ "files": [
15
+ "index.js"
16
+ ],
17
+ "scripts": {
18
+ "pretest": "standard",
19
+ "test": "tap --100 -J test/*.js"
20
+ },
21
+ "repository": {
22
+ "type": "git",
23
+ "url": "https://github.com/iarna/aproba"
24
+ },
25
+ "keywords": [
26
+ "argument",
27
+ "validate"
28
+ ],
29
+ "author": "Rebecca Turner <me@re-becca.org>",
30
+ "license": "ISC",
31
+ "bugs": {
32
+ "url": "https://github.com/iarna/aproba/issues"
33
+ },
34
+ "homepage": "https://github.com/iarna/aproba"
35
+ }
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/archy/LICENSE ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ This software is released under the MIT license:
2
+
3
+ Permission is hereby granted, free of charge, to any person obtaining a copy of
4
+ this software and associated documentation files (the "Software"), to deal in
5
+ the Software without restriction, including without limitation the rights to
6
+ use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
7
+ the Software, and to permit persons to whom the Software is furnished to do so,
8
+ subject to the following conditions:
9
+
10
+ The above copyright notice and this permission notice shall be included in all
11
+ copies or substantial portions of the Software.
12
+
13
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
15
+ FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
16
+ COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
17
+ IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
18
+ CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/archy/index.js ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ module.exports = function archy (obj, prefix, opts) {
2
+ if (prefix === undefined) prefix = '';
3
+ if (!opts) opts = {};
4
+ var chr = function (s) {
5
+ var chars = {
6
+ '│' : '|',
7
+ '└' : '`',
8
+ '├' : '+',
9
+ '─' : '-',
10
+ '┬' : '-'
11
+ };
12
+ return opts.unicode === false ? chars[s] : s;
13
+ };
14
+
15
+ if (typeof obj === 'string') obj = { label : obj };
16
+
17
+ var nodes = obj.nodes || [];
18
+ var lines = (obj.label || '').split('\n');
19
+ var splitter = '\n' + prefix + (nodes.length ? chr('│') : ' ') + ' ';
20
+
21
+ return prefix
22
+ + lines.join(splitter) + '\n'
23
+ + nodes.map(function (node, ix) {
24
+ var last = ix === nodes.length - 1;
25
+ var more = node.nodes && node.nodes.length;
26
+ var prefix_ = prefix + (last ? ' ' : chr('│')) + ' ';
27
+
28
+ return prefix
29
+ + (last ? chr('└') : chr('├')) + chr('─')
30
+ + (more ? chr('┬') : chr('─')) + ' '
31
+ + archy(node, prefix_, opts).slice(prefix.length + 2)
32
+ ;
33
+ }).join('')
34
+ ;
35
+ };
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/archy/package.json ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "name" : "archy",
3
+ "version" : "1.0.0",
4
+ "description" : "render nested hierarchies `npm ls` style with unicode pipes",
5
+ "main" : "index.js",
6
+ "devDependencies" : {
7
+ "tap" : "~0.3.3",
8
+ "tape" : "~0.1.1"
9
+ },
10
+ "scripts" : {
11
+ "test" : "tap test"
12
+ },
13
+ "testling" : {
14
+ "files" : "test/*.js",
15
+ "browsers" : {
16
+ "iexplore" : [ "6.0", "7.0", "8.0", "9.0" ],
17
+ "chrome" : [ "20.0" ],
18
+ "firefox" : [ "10.0", "15.0" ],
19
+ "safari" : [ "5.1" ],
20
+ "opera" : [ "12.0" ]
21
+ }
22
+ },
23
+ "repository" : {
24
+ "type" : "git",
25
+ "url" : "http://github.com/substack/node-archy.git"
26
+ },
27
+ "keywords" : [
28
+ "hierarchy",
29
+ "npm ls",
30
+ "unicode",
31
+ "pretty",
32
+ "print"
33
+ ],
34
+ "author" : {
35
+ "name" : "James Halliday",
36
+ "email" : "mail@substack.net",
37
+ "url" : "http://substack.net"
38
+ },
39
+ "license" : "MIT"
40
+ }
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/balanced-match/LICENSE.md ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ (MIT)
2
+
3
+ Copyright (c) 2013 Julian Gruber &lt;julian@juliangruber.com&gt;
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy of
6
+ this software and associated documentation files (the "Software"), to deal in
7
+ the Software without restriction, including without limitation the rights to
8
+ use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
9
+ of the Software, and to permit persons to whom the Software is furnished to do
10
+ so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/balanced-match/index.js ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 'use strict';
2
+ module.exports = balanced;
3
+ function balanced(a, b, str) {
4
+ if (a instanceof RegExp) a = maybeMatch(a, str);
5
+ if (b instanceof RegExp) b = maybeMatch(b, str);
6
+
7
+ var r = range(a, b, str);
8
+
9
+ return r && {
10
+ start: r[0],
11
+ end: r[1],
12
+ pre: str.slice(0, r[0]),
13
+ body: str.slice(r[0] + a.length, r[1]),
14
+ post: str.slice(r[1] + b.length)
15
+ };
16
+ }
17
+
18
+ function maybeMatch(reg, str) {
19
+ var m = str.match(reg);
20
+ return m ? m[0] : null;
21
+ }
22
+
23
+ balanced.range = range;
24
+ function range(a, b, str) {
25
+ var begs, beg, left, right, result;
26
+ var ai = str.indexOf(a);
27
+ var bi = str.indexOf(b, ai + 1);
28
+ var i = ai;
29
+
30
+ if (ai >= 0 && bi > 0) {
31
+ if(a===b) {
32
+ return [ai, bi];
33
+ }
34
+ begs = [];
35
+ left = str.length;
36
+
37
+ while (i >= 0 && !result) {
38
+ if (i == ai) {
39
+ begs.push(i);
40
+ ai = str.indexOf(a, i + 1);
41
+ } else if (begs.length == 1) {
42
+ result = [ begs.pop(), bi ];
43
+ } else {
44
+ beg = begs.pop();
45
+ if (beg < left) {
46
+ left = beg;
47
+ right = bi;
48
+ }
49
+
50
+ bi = str.indexOf(b, i + 1);
51
+ }
52
+
53
+ i = ai < bi && ai >= 0 ? ai : bi;
54
+ }
55
+
56
+ if (begs.length) {
57
+ result = [ left, right ];
58
+ }
59
+ }
60
+
61
+ return result;
62
+ }
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/balanced-match/package.json ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "name": "balanced-match",
3
+ "description": "Match balanced character pairs, like \"{\" and \"}\"",
4
+ "version": "1.0.2",
5
+ "repository": {
6
+ "type": "git",
7
+ "url": "git://github.com/juliangruber/balanced-match.git"
8
+ },
9
+ "homepage": "https://github.com/juliangruber/balanced-match",
10
+ "main": "index.js",
11
+ "scripts": {
12
+ "test": "tape test/test.js",
13
+ "bench": "matcha test/bench.js"
14
+ },
15
+ "devDependencies": {
16
+ "matcha": "^0.7.0",
17
+ "tape": "^4.6.0"
18
+ },
19
+ "keywords": [
20
+ "match",
21
+ "regexp",
22
+ "test",
23
+ "balanced",
24
+ "parse"
25
+ ],
26
+ "author": {
27
+ "name": "Julian Gruber",
28
+ "email": "mail@juliangruber.com",
29
+ "url": "http://juliangruber.com"
30
+ },
31
+ "license": "MIT",
32
+ "testling": {
33
+ "files": "test/*.js",
34
+ "browsers": [
35
+ "ie/8..latest",
36
+ "firefox/20..latest",
37
+ "firefox/nightly",
38
+ "chrome/25..latest",
39
+ "chrome/canary",
40
+ "opera/12..latest",
41
+ "opera/next",
42
+ "safari/5.1..latest",
43
+ "ipad/6.0..latest",
44
+ "iphone/6.0..latest",
45
+ "android-browser/4.2..latest"
46
+ ]
47
+ }
48
+ }
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/LICENSE ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ The ISC License
2
+
3
+ Copyright (c) npm, Inc.
4
+
5
+ Permission to use, copy, modify, and/or distribute this software for any
6
+ purpose with or without fee is hereby granted, provided that the above
7
+ copyright notice and this permission notice appear in all copies.
8
+
9
+ THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
10
+ WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
11
+ MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
12
+ ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
13
+ WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
14
+ ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
15
+ IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/bin-links/package.json ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "name": "bin-links",
3
+ "version": "5.0.0",
4
+ "description": "JavaScript package binary linker",
5
+ "main": "./lib/index.js",
6
+ "scripts": {
7
+ "snap": "tap",
8
+ "test": "tap",
9
+ "lint": "npm run eslint",
10
+ "postlint": "template-oss-check",
11
+ "lintfix": "npm run eslint -- --fix",
12
+ "posttest": "npm run lint",
13
+ "template-oss-apply": "template-oss-apply --force",
14
+ "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
15
+ },
16
+ "repository": {
17
+ "type": "git",
18
+ "url": "git+https://github.com/npm/bin-links.git"
19
+ },
20
+ "keywords": [
21
+ "npm",
22
+ "link",
23
+ "bins"
24
+ ],
25
+ "license": "ISC",
26
+ "dependencies": {
27
+ "cmd-shim": "^7.0.0",
28
+ "npm-normalize-package-bin": "^4.0.0",
29
+ "proc-log": "^5.0.0",
30
+ "read-cmd-shim": "^5.0.0",
31
+ "write-file-atomic": "^6.0.0"
32
+ },
33
+ "devDependencies": {
34
+ "@npmcli/eslint-config": "^5.0.0",
35
+ "@npmcli/template-oss": "4.23.3",
36
+ "require-inject": "^1.4.4",
37
+ "tap": "^16.0.1"
38
+ },
39
+ "tap": {
40
+ "check-coverage": true,
41
+ "coverage-map": "map.js",
42
+ "nyc-arg": [
43
+ "--exclude",
44
+ "tap-snapshots/**"
45
+ ]
46
+ },
47
+ "files": [
48
+ "bin/",
49
+ "lib/"
50
+ ],
51
+ "engines": {
52
+ "node": "^18.17.0 || >=20.5.0"
53
+ },
54
+ "author": "GitHub Inc.",
55
+ "templateOSS": {
56
+ "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
57
+ "windowsCI": false,
58
+ "version": "4.23.3",
59
+ "publish": true
60
+ }
61
+ }
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/binary-extensions/binary-extensions.json ADDED
@@ -0,0 +1,264 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [
2
+ "3dm",
3
+ "3ds",
4
+ "3g2",
5
+ "3gp",
6
+ "7z",
7
+ "a",
8
+ "aac",
9
+ "adp",
10
+ "afdesign",
11
+ "afphoto",
12
+ "afpub",
13
+ "ai",
14
+ "aif",
15
+ "aiff",
16
+ "alz",
17
+ "ape",
18
+ "apk",
19
+ "appimage",
20
+ "ar",
21
+ "arj",
22
+ "asf",
23
+ "au",
24
+ "avi",
25
+ "bak",
26
+ "baml",
27
+ "bh",
28
+ "bin",
29
+ "bk",
30
+ "bmp",
31
+ "btif",
32
+ "bz2",
33
+ "bzip2",
34
+ "cab",
35
+ "caf",
36
+ "cgm",
37
+ "class",
38
+ "cmx",
39
+ "cpio",
40
+ "cr2",
41
+ "cr3",
42
+ "cur",
43
+ "dat",
44
+ "dcm",
45
+ "deb",
46
+ "dex",
47
+ "djvu",
48
+ "dll",
49
+ "dmg",
50
+ "dng",
51
+ "doc",
52
+ "docm",
53
+ "docx",
54
+ "dot",
55
+ "dotm",
56
+ "dra",
57
+ "DS_Store",
58
+ "dsk",
59
+ "dts",
60
+ "dtshd",
61
+ "dvb",
62
+ "dwg",
63
+ "dxf",
64
+ "ecelp4800",
65
+ "ecelp7470",
66
+ "ecelp9600",
67
+ "egg",
68
+ "eol",
69
+ "eot",
70
+ "epub",
71
+ "exe",
72
+ "f4v",
73
+ "fbs",
74
+ "fh",
75
+ "fla",
76
+ "flac",
77
+ "flatpak",
78
+ "fli",
79
+ "flv",
80
+ "fpx",
81
+ "fst",
82
+ "fvt",
83
+ "g3",
84
+ "gh",
85
+ "gif",
86
+ "graffle",
87
+ "gz",
88
+ "gzip",
89
+ "h261",
90
+ "h263",
91
+ "h264",
92
+ "icns",
93
+ "ico",
94
+ "ief",
95
+ "img",
96
+ "ipa",
97
+ "iso",
98
+ "jar",
99
+ "jpeg",
100
+ "jpg",
101
+ "jpgv",
102
+ "jpm",
103
+ "jxr",
104
+ "key",
105
+ "ktx",
106
+ "lha",
107
+ "lib",
108
+ "lvp",
109
+ "lz",
110
+ "lzh",
111
+ "lzma",
112
+ "lzo",
113
+ "m3u",
114
+ "m4a",
115
+ "m4v",
116
+ "mar",
117
+ "mdi",
118
+ "mht",
119
+ "mid",
120
+ "midi",
121
+ "mj2",
122
+ "mka",
123
+ "mkv",
124
+ "mmr",
125
+ "mng",
126
+ "mobi",
127
+ "mov",
128
+ "movie",
129
+ "mp3",
130
+ "mp4",
131
+ "mp4a",
132
+ "mpeg",
133
+ "mpg",
134
+ "mpga",
135
+ "mxu",
136
+ "nef",
137
+ "npx",
138
+ "numbers",
139
+ "nupkg",
140
+ "o",
141
+ "odp",
142
+ "ods",
143
+ "odt",
144
+ "oga",
145
+ "ogg",
146
+ "ogv",
147
+ "otf",
148
+ "ott",
149
+ "pages",
150
+ "pbm",
151
+ "pcx",
152
+ "pdb",
153
+ "pdf",
154
+ "pea",
155
+ "pgm",
156
+ "pic",
157
+ "png",
158
+ "pnm",
159
+ "pot",
160
+ "potm",
161
+ "potx",
162
+ "ppa",
163
+ "ppam",
164
+ "ppm",
165
+ "pps",
166
+ "ppsm",
167
+ "ppsx",
168
+ "ppt",
169
+ "pptm",
170
+ "pptx",
171
+ "psd",
172
+ "pya",
173
+ "pyc",
174
+ "pyo",
175
+ "pyv",
176
+ "qt",
177
+ "rar",
178
+ "ras",
179
+ "raw",
180
+ "resources",
181
+ "rgb",
182
+ "rip",
183
+ "rlc",
184
+ "rmf",
185
+ "rmvb",
186
+ "rpm",
187
+ "rtf",
188
+ "rz",
189
+ "s3m",
190
+ "s7z",
191
+ "scpt",
192
+ "sgi",
193
+ "shar",
194
+ "snap",
195
+ "sil",
196
+ "sketch",
197
+ "slk",
198
+ "smv",
199
+ "snk",
200
+ "so",
201
+ "stl",
202
+ "suo",
203
+ "sub",
204
+ "swf",
205
+ "tar",
206
+ "tbz",
207
+ "tbz2",
208
+ "tga",
209
+ "tgz",
210
+ "thmx",
211
+ "tif",
212
+ "tiff",
213
+ "tlz",
214
+ "ttc",
215
+ "ttf",
216
+ "txz",
217
+ "udf",
218
+ "uvh",
219
+ "uvi",
220
+ "uvm",
221
+ "uvp",
222
+ "uvs",
223
+ "uvu",
224
+ "viv",
225
+ "vob",
226
+ "war",
227
+ "wav",
228
+ "wax",
229
+ "wbmp",
230
+ "wdp",
231
+ "weba",
232
+ "webm",
233
+ "webp",
234
+ "whl",
235
+ "wim",
236
+ "wm",
237
+ "wma",
238
+ "wmv",
239
+ "wmx",
240
+ "woff",
241
+ "woff2",
242
+ "wrm",
243
+ "wvx",
244
+ "xbm",
245
+ "xif",
246
+ "xla",
247
+ "xlam",
248
+ "xls",
249
+ "xlsb",
250
+ "xlsm",
251
+ "xlsx",
252
+ "xlt",
253
+ "xltm",
254
+ "xltx",
255
+ "xm",
256
+ "xmind",
257
+ "xpi",
258
+ "xpm",
259
+ "xwd",
260
+ "xz",
261
+ "z",
262
+ "zip",
263
+ "zipx"
264
+ ]
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/binary-extensions/index.js ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ import binaryExtensions from './binary-extensions.json' with {type: 'json'};
2
+
3
+ export default binaryExtensions;
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/binary-extensions/license ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)
4
+ Copyright (c) Paul Miller (https://paulmillr.com)
5
+
6
+ Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
7
+
8
+ The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
9
+
10
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/binary-extensions/package.json ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "name": "binary-extensions",
3
+ "version": "3.1.0",
4
+ "description": "List of binary file extensions",
5
+ "license": "MIT",
6
+ "repository": "sindresorhus/binary-extensions",
7
+ "funding": "https://github.com/sponsors/sindresorhus",
8
+ "author": {
9
+ "name": "Sindre Sorhus",
10
+ "email": "sindresorhus@gmail.com",
11
+ "url": "https://sindresorhus.com"
12
+ },
13
+ "type": "module",
14
+ "exports": {
15
+ "types": "./index.d.ts",
16
+ "default": "./index.js"
17
+ },
18
+ "sideEffects": false,
19
+ "engines": {
20
+ "node": ">=18.20"
21
+ },
22
+ "scripts": {
23
+ "//test": "xo && ava && tsd",
24
+ "test": "ava && tsd"
25
+ },
26
+ "files": [
27
+ "index.js",
28
+ "index.d.ts",
29
+ "binary-extensions.json"
30
+ ],
31
+ "keywords": [
32
+ "binary",
33
+ "extensions",
34
+ "extension",
35
+ "file",
36
+ "json",
37
+ "list",
38
+ "array"
39
+ ],
40
+ "devDependencies": {
41
+ "ava": "^6.1.2",
42
+ "tsd": "^0.31.0",
43
+ "xo": "^0.58.0"
44
+ }
45
+ }
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cacache/LICENSE.md ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ISC License
2
+
3
+ Copyright (c) npm, Inc.
4
+
5
+ Permission to use, copy, modify, and/or distribute this software for
6
+ any purpose with or without fee is hereby granted, provided that the
7
+ above copyright notice and this permission notice appear in all copies.
8
+
9
+ THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
10
+ ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
11
+ WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
12
+ COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
13
+ CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
14
+ OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
15
+ OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
16
+ USE OR PERFORMANCE OF THIS SOFTWARE.
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cacache/lib/content/path.js ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 'use strict'
2
+
3
+ const contentVer = require('../../package.json')['cache-version'].content
4
+ const hashToSegments = require('../util/hash-to-segments')
5
+ const path = require('path')
6
+ const ssri = require('ssri')
7
+
8
+ // Current format of content file path:
9
+ //
10
+ // sha512-BaSE64Hex= ->
11
+ // ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee
12
+ //
13
+ module.exports = contentPath
14
+
15
+ function contentPath (cache, integrity) {
16
+ const sri = ssri.parse(integrity, { single: true })
17
+ // contentPath is the *strongest* algo given
18
+ return path.join(
19
+ contentDir(cache),
20
+ sri.algorithm,
21
+ ...hashToSegments(sri.hexDigest())
22
+ )
23
+ }
24
+
25
+ module.exports.contentDir = contentDir
26
+
27
+ function contentDir (cache) {
28
+ return path.join(cache, `content-v${contentVer}`)
29
+ }
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cacache/lib/content/read.js ADDED
@@ -0,0 +1,165 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 'use strict'
2
+
3
+ const fs = require('fs/promises')
4
+ const fsm = require('fs-minipass')
5
+ const ssri = require('ssri')
6
+ const contentPath = require('./path')
7
+ const Pipeline = require('minipass-pipeline')
8
+
9
+ module.exports = read
10
+
11
+ const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024
12
+ async function read (cache, integrity, opts = {}) {
13
+ const { size } = opts
14
+ const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
15
+ // get size
16
+ const stat = size ? { size } : await fs.stat(cpath)
17
+ return { stat, cpath, sri }
18
+ })
19
+
20
+ if (stat.size > MAX_SINGLE_READ_SIZE) {
21
+ return readPipeline(cpath, stat.size, sri, new Pipeline()).concat()
22
+ }
23
+
24
+ const data = await fs.readFile(cpath, { encoding: null })
25
+
26
+ if (stat.size !== data.length) {
27
+ throw sizeError(stat.size, data.length)
28
+ }
29
+
30
+ if (!ssri.checkData(data, sri)) {
31
+ throw integrityError(sri, cpath)
32
+ }
33
+
34
+ return data
35
+ }
36
+
37
+ const readPipeline = (cpath, size, sri, stream) => {
38
+ stream.push(
39
+ new fsm.ReadStream(cpath, {
40
+ size,
41
+ readSize: MAX_SINGLE_READ_SIZE,
42
+ }),
43
+ ssri.integrityStream({
44
+ integrity: sri,
45
+ size,
46
+ })
47
+ )
48
+ return stream
49
+ }
50
+
51
+ module.exports.stream = readStream
52
+ module.exports.readStream = readStream
53
+
54
+ function readStream (cache, integrity, opts = {}) {
55
+ const { size } = opts
56
+ const stream = new Pipeline()
57
+ // Set all this up to run on the stream and then just return the stream
58
+ Promise.resolve().then(async () => {
59
+ const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
60
+ // get size
61
+ const stat = size ? { size } : await fs.stat(cpath)
62
+ return { stat, cpath, sri }
63
+ })
64
+
65
+ return readPipeline(cpath, stat.size, sri, stream)
66
+ }).catch(err => stream.emit('error', err))
67
+
68
+ return stream
69
+ }
70
+
71
+ module.exports.copy = copy
72
+
73
+ function copy (cache, integrity, dest) {
74
+ return withContentSri(cache, integrity, (cpath) => {
75
+ return fs.copyFile(cpath, dest)
76
+ })
77
+ }
78
+
79
+ module.exports.hasContent = hasContent
80
+
81
+ async function hasContent (cache, integrity) {
82
+ if (!integrity) {
83
+ return false
84
+ }
85
+
86
+ try {
87
+ return await withContentSri(cache, integrity, async (cpath, sri) => {
88
+ const stat = await fs.stat(cpath)
89
+ return { size: stat.size, sri, stat }
90
+ })
91
+ } catch (err) {
92
+ if (err.code === 'ENOENT') {
93
+ return false
94
+ }
95
+
96
+ if (err.code === 'EPERM') {
97
+ /* istanbul ignore else */
98
+ if (process.platform !== 'win32') {
99
+ throw err
100
+ } else {
101
+ return false
102
+ }
103
+ }
104
+ }
105
+ }
106
+
107
+ async function withContentSri (cache, integrity, fn) {
108
+ const sri = ssri.parse(integrity)
109
+ // If `integrity` has multiple entries, pick the first digest
110
+ // with available local data.
111
+ const algo = sri.pickAlgorithm()
112
+ const digests = sri[algo]
113
+
114
+ if (digests.length <= 1) {
115
+ const cpath = contentPath(cache, digests[0])
116
+ return fn(cpath, digests[0])
117
+ } else {
118
+ // Can't use race here because a generic error can happen before
119
+ // a ENOENT error, and can happen before a valid result
120
+ const results = await Promise.all(digests.map(async (meta) => {
121
+ try {
122
+ return await withContentSri(cache, meta, fn)
123
+ } catch (err) {
124
+ if (err.code === 'ENOENT') {
125
+ return Object.assign(
126
+ new Error('No matching content found for ' + sri.toString()),
127
+ { code: 'ENOENT' }
128
+ )
129
+ }
130
+ return err
131
+ }
132
+ }))
133
+ // Return the first non error if it is found
134
+ const result = results.find((r) => !(r instanceof Error))
135
+ if (result) {
136
+ return result
137
+ }
138
+
139
+ // Throw the No matching content found error
140
+ const enoentError = results.find((r) => r.code === 'ENOENT')
141
+ if (enoentError) {
142
+ throw enoentError
143
+ }
144
+
145
+ // Throw generic error
146
+ throw results.find((r) => r instanceof Error)
147
+ }
148
+ }
149
+
150
+ function sizeError (expected, found) {
151
+ /* eslint-disable-next-line max-len */
152
+ const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
153
+ err.expected = expected
154
+ err.found = found
155
+ err.code = 'EBADSIZE'
156
+ return err
157
+ }
158
+
159
+ function integrityError (sri, path) {
160
+ const err = new Error(`Integrity verification failed for ${sri} (${path})`)
161
+ err.code = 'EINTEGRITY'
162
+ err.sri = sri
163
+ err.path = path
164
+ return err
165
+ }
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cacache/lib/content/rm.js ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 'use strict'
2
+
3
+ const fs = require('fs/promises')
4
+ const contentPath = require('./path')
5
+ const { hasContent } = require('./read')
6
+
7
+ module.exports = rm
8
+
9
+ async function rm (cache, integrity) {
10
+ const content = await hasContent(cache, integrity)
11
+ // ~pretty~ sure we can't end up with a content lacking sri, but be safe
12
+ if (content && content.sri) {
13
+ await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true })
14
+ return true
15
+ } else {
16
+ return false
17
+ }
18
+ }
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cacache/lib/content/write.js ADDED
@@ -0,0 +1,206 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 'use strict'
2
+
3
+ const events = require('events')
4
+
5
+ const contentPath = require('./path')
6
+ const fs = require('fs/promises')
7
+ const { moveFile } = require('@npmcli/fs')
8
+ const { Minipass } = require('minipass')
9
+ const Pipeline = require('minipass-pipeline')
10
+ const Flush = require('minipass-flush')
11
+ const path = require('path')
12
+ const ssri = require('ssri')
13
+ const uniqueFilename = require('unique-filename')
14
+ const fsm = require('fs-minipass')
15
+
16
+ module.exports = write
17
+
18
+ // Cache of move operations in process so we don't duplicate
19
+ const moveOperations = new Map()
20
+
21
+ async function write (cache, data, opts = {}) {
22
+ const { algorithms, size, integrity } = opts
23
+
24
+ if (typeof size === 'number' && data.length !== size) {
25
+ throw sizeError(size, data.length)
26
+ }
27
+
28
+ const sri = ssri.fromData(data, algorithms ? { algorithms } : {})
29
+ if (integrity && !ssri.checkData(data, integrity, opts)) {
30
+ throw checksumError(integrity, sri)
31
+ }
32
+
33
+ for (const algo in sri) {
34
+ const tmp = await makeTmp(cache, opts)
35
+ const hash = sri[algo].toString()
36
+ try {
37
+ await fs.writeFile(tmp.target, data, { flag: 'wx' })
38
+ await moveToDestination(tmp, cache, hash, opts)
39
+ } finally {
40
+ if (!tmp.moved) {
41
+ await fs.rm(tmp.target, { recursive: true, force: true })
42
+ }
43
+ }
44
+ }
45
+ return { integrity: sri, size: data.length }
46
+ }
47
+
48
+ module.exports.stream = writeStream
49
+
50
+ // writes proxied to the 'inputStream' that is passed to the Promise
51
+ // 'end' is deferred until content is handled.
52
+ class CacacheWriteStream extends Flush {
53
+ constructor (cache, opts) {
54
+ super()
55
+ this.opts = opts
56
+ this.cache = cache
57
+ this.inputStream = new Minipass()
58
+ this.inputStream.on('error', er => this.emit('error', er))
59
+ this.inputStream.on('drain', () => this.emit('drain'))
60
+ this.handleContentP = null
61
+ }
62
+
63
+ write (chunk, encoding, cb) {
64
+ if (!this.handleContentP) {
65
+ this.handleContentP = handleContent(
66
+ this.inputStream,
67
+ this.cache,
68
+ this.opts
69
+ )
70
+ this.handleContentP.catch(error => this.emit('error', error))
71
+ }
72
+ return this.inputStream.write(chunk, encoding, cb)
73
+ }
74
+
75
+ flush (cb) {
76
+ this.inputStream.end(() => {
77
+ if (!this.handleContentP) {
78
+ const e = new Error('Cache input stream was empty')
79
+ e.code = 'ENODATA'
80
+ // empty streams are probably emitting end right away.
81
+ // defer this one tick by rejecting a promise on it.
82
+ return Promise.reject(e).catch(cb)
83
+ }
84
+ // eslint-disable-next-line promise/catch-or-return
85
+ this.handleContentP.then(
86
+ (res) => {
87
+ res.integrity && this.emit('integrity', res.integrity)
88
+ // eslint-disable-next-line promise/always-return
89
+ res.size !== null && this.emit('size', res.size)
90
+ cb()
91
+ },
92
+ (er) => cb(er)
93
+ )
94
+ })
95
+ }
96
+ }
97
+
98
+ function writeStream (cache, opts = {}) {
99
+ return new CacacheWriteStream(cache, opts)
100
+ }
101
+
102
+ async function handleContent (inputStream, cache, opts) {
103
+ const tmp = await makeTmp(cache, opts)
104
+ try {
105
+ const res = await pipeToTmp(inputStream, cache, tmp.target, opts)
106
+ await moveToDestination(
107
+ tmp,
108
+ cache,
109
+ res.integrity,
110
+ opts
111
+ )
112
+ return res
113
+ } finally {
114
+ if (!tmp.moved) {
115
+ await fs.rm(tmp.target, { recursive: true, force: true })
116
+ }
117
+ }
118
+ }
119
+
120
+ async function pipeToTmp (inputStream, cache, tmpTarget, opts) {
121
+ const outStream = new fsm.WriteStream(tmpTarget, {
122
+ flags: 'wx',
123
+ })
124
+
125
+ if (opts.integrityEmitter) {
126
+ // we need to create these all simultaneously since they can fire in any order
127
+ const [integrity, size] = await Promise.all([
128
+ events.once(opts.integrityEmitter, 'integrity').then(res => res[0]),
129
+ events.once(opts.integrityEmitter, 'size').then(res => res[0]),
130
+ new Pipeline(inputStream, outStream).promise(),
131
+ ])
132
+ return { integrity, size }
133
+ }
134
+
135
+ let integrity
136
+ let size
137
+ const hashStream = ssri.integrityStream({
138
+ integrity: opts.integrity,
139
+ algorithms: opts.algorithms,
140
+ size: opts.size,
141
+ })
142
+ hashStream.on('integrity', i => {
143
+ integrity = i
144
+ })
145
+ hashStream.on('size', s => {
146
+ size = s
147
+ })
148
+
149
+ const pipeline = new Pipeline(inputStream, hashStream, outStream)
150
+ await pipeline.promise()
151
+ return { integrity, size }
152
+ }
153
+
154
+ async function makeTmp (cache, opts) {
155
+ const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
156
+ await fs.mkdir(path.dirname(tmpTarget), { recursive: true })
157
+ return {
158
+ target: tmpTarget,
159
+ moved: false,
160
+ }
161
+ }
162
+
163
+ async function moveToDestination (tmp, cache, sri) {
164
+ const destination = contentPath(cache, sri)
165
+ const destDir = path.dirname(destination)
166
+ if (moveOperations.has(destination)) {
167
+ return moveOperations.get(destination)
168
+ }
169
+ moveOperations.set(
170
+ destination,
171
+ fs.mkdir(destDir, { recursive: true })
172
+ .then(async () => {
173
+ await moveFile(tmp.target, destination, { overwrite: false })
174
+ tmp.moved = true
175
+ return tmp.moved
176
+ })
177
+ .catch(err => {
178
+ if (!err.message.startsWith('The destination file exists')) {
179
+ throw Object.assign(err, { code: 'EEXIST' })
180
+ }
181
+ }).finally(() => {
182
+ moveOperations.delete(destination)
183
+ })
184
+
185
+ )
186
+ return moveOperations.get(destination)
187
+ }
188
+
189
+ function sizeError (expected, found) {
190
+ /* eslint-disable-next-line max-len */
191
+ const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
192
+ err.expected = expected
193
+ err.found = found
194
+ err.code = 'EBADSIZE'
195
+ return err
196
+ }
197
+
198
+ function checksumError (expected, found) {
199
+ const err = new Error(`Integrity check failed:
200
+ Wanted: ${expected}
201
+ Found: ${found}`)
202
+ err.code = 'EINTEGRITY'
203
+ err.expected = expected
204
+ err.found = found
205
+ return err
206
+ }
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cacache/lib/entry-index.js ADDED
@@ -0,0 +1,336 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 'use strict'
2
+
3
+ const crypto = require('crypto')
4
+ const {
5
+ appendFile,
6
+ mkdir,
7
+ readFile,
8
+ readdir,
9
+ rm,
10
+ writeFile,
11
+ } = require('fs/promises')
12
+ const { Minipass } = require('minipass')
13
+ const path = require('path')
14
+ const ssri = require('ssri')
15
+ const uniqueFilename = require('unique-filename')
16
+
17
+ const contentPath = require('./content/path')
18
+ const hashToSegments = require('./util/hash-to-segments')
19
+ const indexV = require('../package.json')['cache-version'].index
20
+ const { moveFile } = require('@npmcli/fs')
21
+
22
+ const lsStreamConcurrency = 5
23
+
24
+ module.exports.NotFoundError = class NotFoundError extends Error {
25
+ constructor (cache, key) {
26
+ super(`No cache entry for ${key} found in ${cache}`)
27
+ this.code = 'ENOENT'
28
+ this.cache = cache
29
+ this.key = key
30
+ }
31
+ }
32
+
33
+ module.exports.compact = compact
34
+
35
+ async function compact (cache, key, matchFn, opts = {}) {
36
+ const bucket = bucketPath(cache, key)
37
+ const entries = await bucketEntries(bucket)
38
+ const newEntries = []
39
+ // we loop backwards because the bottom-most result is the newest
40
+ // since we add new entries with appendFile
41
+ for (let i = entries.length - 1; i >= 0; --i) {
42
+ const entry = entries[i]
43
+ // a null integrity could mean either a delete was appended
44
+ // or the user has simply stored an index that does not map
45
+ // to any content. we determine if the user wants to keep the
46
+ // null integrity based on the validateEntry function passed in options.
47
+ // if the integrity is null and no validateEntry is provided, we break
48
+ // as we consider the null integrity to be a deletion of everything
49
+ // that came before it.
50
+ if (entry.integrity === null && !opts.validateEntry) {
51
+ break
52
+ }
53
+
54
+ // if this entry is valid, and it is either the first entry or
55
+ // the newEntries array doesn't already include an entry that
56
+ // matches this one based on the provided matchFn, then we add
57
+ // it to the beginning of our list
58
+ if ((!opts.validateEntry || opts.validateEntry(entry) === true) &&
59
+ (newEntries.length === 0 ||
60
+ !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) {
61
+ newEntries.unshift(entry)
62
+ }
63
+ }
64
+
65
+ const newIndex = '\n' + newEntries.map((entry) => {
66
+ const stringified = JSON.stringify(entry)
67
+ const hash = hashEntry(stringified)
68
+ return `${hash}\t${stringified}`
69
+ }).join('\n')
70
+
71
+ const setup = async () => {
72
+ const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
73
+ await mkdir(path.dirname(target), { recursive: true })
74
+ return {
75
+ target,
76
+ moved: false,
77
+ }
78
+ }
79
+
80
+ const teardown = async (tmp) => {
81
+ if (!tmp.moved) {
82
+ return rm(tmp.target, { recursive: true, force: true })
83
+ }
84
+ }
85
+
86
+ const write = async (tmp) => {
87
+ await writeFile(tmp.target, newIndex, { flag: 'wx' })
88
+ await mkdir(path.dirname(bucket), { recursive: true })
89
+ // we use @npmcli/move-file directly here because we
90
+ // want to overwrite the existing file
91
+ await moveFile(tmp.target, bucket)
92
+ tmp.moved = true
93
+ }
94
+
95
+ // write the file atomically
96
+ const tmp = await setup()
97
+ try {
98
+ await write(tmp)
99
+ } finally {
100
+ await teardown(tmp)
101
+ }
102
+
103
+ // we reverse the list we generated such that the newest
104
+ // entries come first in order to make looping through them easier
105
+ // the true passed to formatEntry tells it to keep null
106
+ // integrity values, if they made it this far it's because
107
+ // validateEntry returned true, and as such we should return it
108
+ return newEntries.reverse().map((entry) => formatEntry(cache, entry, true))
109
+ }
110
+
111
+ module.exports.insert = insert
112
+
113
+ async function insert (cache, key, integrity, opts = {}) {
114
+ const { metadata, size, time } = opts
115
+ const bucket = bucketPath(cache, key)
116
+ const entry = {
117
+ key,
118
+ integrity: integrity && ssri.stringify(integrity),
119
+ time: time || Date.now(),
120
+ size,
121
+ metadata,
122
+ }
123
+ try {
124
+ await mkdir(path.dirname(bucket), { recursive: true })
125
+ const stringified = JSON.stringify(entry)
126
+ // NOTE - Cleverness ahoy!
127
+ //
128
+ // This works because it's tremendously unlikely for an entry to corrupt
129
+ // another while still preserving the string length of the JSON in
130
+ // question. So, we just slap the length in there and verify it on read.
131
+ //
132
+ // Thanks to @isaacs for the whiteboarding session that ended up with
133
+ // this.
134
+ await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`)
135
+ } catch (err) {
136
+ if (err.code === 'ENOENT') {
137
+ return undefined
138
+ }
139
+
140
+ throw err
141
+ }
142
+ return formatEntry(cache, entry)
143
+ }
144
+
145
+ module.exports.find = find
146
+
147
+ async function find (cache, key) {
148
+ const bucket = bucketPath(cache, key)
149
+ try {
150
+ const entries = await bucketEntries(bucket)
151
+ return entries.reduce((latest, next) => {
152
+ if (next && next.key === key) {
153
+ return formatEntry(cache, next)
154
+ } else {
155
+ return latest
156
+ }
157
+ }, null)
158
+ } catch (err) {
159
+ if (err.code === 'ENOENT') {
160
+ return null
161
+ } else {
162
+ throw err
163
+ }
164
+ }
165
+ }
166
+
167
+ module.exports.delete = del
168
+
169
+ function del (cache, key, opts = {}) {
170
+ if (!opts.removeFully) {
171
+ return insert(cache, key, null, opts)
172
+ }
173
+
174
+ const bucket = bucketPath(cache, key)
175
+ return rm(bucket, { recursive: true, force: true })
176
+ }
177
+
178
+ module.exports.lsStream = lsStream
179
+
180
+ function lsStream (cache) {
181
+ const indexDir = bucketDir(cache)
182
+ const stream = new Minipass({ objectMode: true })
183
+
184
+ // Set all this up to run on the stream and then just return the stream
185
+ Promise.resolve().then(async () => {
186
+ const { default: pMap } = await import('p-map')
187
+ const buckets = await readdirOrEmpty(indexDir)
188
+ await pMap(buckets, async (bucket) => {
189
+ const bucketPath = path.join(indexDir, bucket)
190
+ const subbuckets = await readdirOrEmpty(bucketPath)
191
+ await pMap(subbuckets, async (subbucket) => {
192
+ const subbucketPath = path.join(bucketPath, subbucket)
193
+
194
+ // "/cachename/<bucket 0xFF>/<bucket 0xFF>./*"
195
+ const subbucketEntries = await readdirOrEmpty(subbucketPath)
196
+ await pMap(subbucketEntries, async (entry) => {
197
+ const entryPath = path.join(subbucketPath, entry)
198
+ try {
199
+ const entries = await bucketEntries(entryPath)
200
+ // using a Map here prevents duplicate keys from showing up
201
+ // twice, I guess?
202
+ const reduced = entries.reduce((acc, entry) => {
203
+ acc.set(entry.key, entry)
204
+ return acc
205
+ }, new Map())
206
+ // reduced is a map of key => entry
207
+ for (const entry of reduced.values()) {
208
+ const formatted = formatEntry(cache, entry)
209
+ if (formatted) {
210
+ stream.write(formatted)
211
+ }
212
+ }
213
+ } catch (err) {
214
+ if (err.code === 'ENOENT') {
215
+ return undefined
216
+ }
217
+ throw err
218
+ }
219
+ },
220
+ { concurrency: lsStreamConcurrency })
221
+ },
222
+ { concurrency: lsStreamConcurrency })
223
+ },
224
+ { concurrency: lsStreamConcurrency })
225
+ stream.end()
226
+ return stream
227
+ }).catch(err => stream.emit('error', err))
228
+
229
+ return stream
230
+ }
231
+
232
+ module.exports.ls = ls
233
+
234
+ async function ls (cache) {
235
+ const entries = await lsStream(cache).collect()
236
+ return entries.reduce((acc, xs) => {
237
+ acc[xs.key] = xs
238
+ return acc
239
+ }, {})
240
+ }
241
+
242
+ module.exports.bucketEntries = bucketEntries
243
+
244
+ async function bucketEntries (bucket, filter) {
245
+ const data = await readFile(bucket, 'utf8')
246
+ return _bucketEntries(data, filter)
247
+ }
248
+
249
+ function _bucketEntries (data) {
250
+ const entries = []
251
+ data.split('\n').forEach((entry) => {
252
+ if (!entry) {
253
+ return
254
+ }
255
+
256
+ const pieces = entry.split('\t')
257
+ if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) {
258
+ // Hash is no good! Corruption or malice? Doesn't matter!
259
+ // EJECT EJECT
260
+ return
261
+ }
262
+ let obj
263
+ try {
264
+ obj = JSON.parse(pieces[1])
265
+ } catch (_) {
266
+ // eslint-ignore-next-line no-empty-block
267
+ }
268
+ // coverage disabled here, no need to test with an entry that parses to something falsey
269
+ // istanbul ignore else
270
+ if (obj) {
271
+ entries.push(obj)
272
+ }
273
+ })
274
+ return entries
275
+ }
276
+
277
+ module.exports.bucketDir = bucketDir
278
+
279
+ function bucketDir (cache) {
280
+ return path.join(cache, `index-v${indexV}`)
281
+ }
282
+
283
+ module.exports.bucketPath = bucketPath
284
+
285
+ function bucketPath (cache, key) {
286
+ const hashed = hashKey(key)
287
+ return path.join.apply(
288
+ path,
289
+ [bucketDir(cache)].concat(hashToSegments(hashed))
290
+ )
291
+ }
292
+
293
+ module.exports.hashKey = hashKey
294
+
295
+ function hashKey (key) {
296
+ return hash(key, 'sha256')
297
+ }
298
+
299
+ module.exports.hashEntry = hashEntry
300
+
301
+ function hashEntry (str) {
302
+ return hash(str, 'sha1')
303
+ }
304
+
305
+ function hash (str, digest) {
306
+ return crypto
307
+ .createHash(digest)
308
+ .update(str)
309
+ .digest('hex')
310
+ }
311
+
312
+ function formatEntry (cache, entry, keepAll) {
313
+ // Treat null digests as deletions. They'll shadow any previous entries.
314
+ if (!entry.integrity && !keepAll) {
315
+ return null
316
+ }
317
+
318
+ return {
319
+ key: entry.key,
320
+ integrity: entry.integrity,
321
+ path: entry.integrity ? contentPath(cache, entry.integrity) : undefined,
322
+ size: entry.size,
323
+ time: entry.time,
324
+ metadata: entry.metadata,
325
+ }
326
+ }
327
+
328
+ function readdirOrEmpty (dir) {
329
+ return readdir(dir).catch((err) => {
330
+ if (err.code === 'ENOENT' || err.code === 'ENOTDIR') {
331
+ return []
332
+ }
333
+
334
+ throw err
335
+ })
336
+ }
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cacache/lib/get.js ADDED
@@ -0,0 +1,170 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 'use strict'
2
+
3
+ const Collect = require('minipass-collect')
4
+ const { Minipass } = require('minipass')
5
+ const Pipeline = require('minipass-pipeline')
6
+
7
+ const index = require('./entry-index')
8
+ const memo = require('./memoization')
9
+ const read = require('./content/read')
10
+
11
+ async function getData (cache, key, opts = {}) {
12
+ const { integrity, memoize, size } = opts
13
+ const memoized = memo.get(cache, key, opts)
14
+ if (memoized && memoize !== false) {
15
+ return {
16
+ metadata: memoized.entry.metadata,
17
+ data: memoized.data,
18
+ integrity: memoized.entry.integrity,
19
+ size: memoized.entry.size,
20
+ }
21
+ }
22
+
23
+ const entry = await index.find(cache, key, opts)
24
+ if (!entry) {
25
+ throw new index.NotFoundError(cache, key)
26
+ }
27
+ const data = await read(cache, entry.integrity, { integrity, size })
28
+ if (memoize) {
29
+ memo.put(cache, entry, data, opts)
30
+ }
31
+
32
+ return {
33
+ data,
34
+ metadata: entry.metadata,
35
+ size: entry.size,
36
+ integrity: entry.integrity,
37
+ }
38
+ }
39
+ module.exports = getData
40
+
41
+ async function getDataByDigest (cache, key, opts = {}) {
42
+ const { integrity, memoize, size } = opts
43
+ const memoized = memo.get.byDigest(cache, key, opts)
44
+ if (memoized && memoize !== false) {
45
+ return memoized
46
+ }
47
+
48
+ const res = await read(cache, key, { integrity, size })
49
+ if (memoize) {
50
+ memo.put.byDigest(cache, key, res, opts)
51
+ }
52
+ return res
53
+ }
54
+ module.exports.byDigest = getDataByDigest
55
+
56
+ const getMemoizedStream = (memoized) => {
57
+ const stream = new Minipass()
58
+ stream.on('newListener', function (ev, cb) {
59
+ ev === 'metadata' && cb(memoized.entry.metadata)
60
+ ev === 'integrity' && cb(memoized.entry.integrity)
61
+ ev === 'size' && cb(memoized.entry.size)
62
+ })
63
+ stream.end(memoized.data)
64
+ return stream
65
+ }
66
+
67
+ function getStream (cache, key, opts = {}) {
68
+ const { memoize, size } = opts
69
+ const memoized = memo.get(cache, key, opts)
70
+ if (memoized && memoize !== false) {
71
+ return getMemoizedStream(memoized)
72
+ }
73
+
74
+ const stream = new Pipeline()
75
+ // Set all this up to run on the stream and then just return the stream
76
+ Promise.resolve().then(async () => {
77
+ const entry = await index.find(cache, key)
78
+ if (!entry) {
79
+ throw new index.NotFoundError(cache, key)
80
+ }
81
+
82
+ stream.emit('metadata', entry.metadata)
83
+ stream.emit('integrity', entry.integrity)
84
+ stream.emit('size', entry.size)
85
+ stream.on('newListener', function (ev, cb) {
86
+ ev === 'metadata' && cb(entry.metadata)
87
+ ev === 'integrity' && cb(entry.integrity)
88
+ ev === 'size' && cb(entry.size)
89
+ })
90
+
91
+ const src = read.readStream(
92
+ cache,
93
+ entry.integrity,
94
+ { ...opts, size: typeof size !== 'number' ? entry.size : size }
95
+ )
96
+
97
+ if (memoize) {
98
+ const memoStream = new Collect.PassThrough()
99
+ memoStream.on('collect', data => memo.put(cache, entry, data, opts))
100
+ stream.unshift(memoStream)
101
+ }
102
+ stream.unshift(src)
103
+ return stream
104
+ }).catch((err) => stream.emit('error', err))
105
+
106
+ return stream
107
+ }
108
+
109
+ module.exports.stream = getStream
110
+
111
+ function getStreamDigest (cache, integrity, opts = {}) {
112
+ const { memoize } = opts
113
+ const memoized = memo.get.byDigest(cache, integrity, opts)
114
+ if (memoized && memoize !== false) {
115
+ const stream = new Minipass()
116
+ stream.end(memoized)
117
+ return stream
118
+ } else {
119
+ const stream = read.readStream(cache, integrity, opts)
120
+ if (!memoize) {
121
+ return stream
122
+ }
123
+
124
+ const memoStream = new Collect.PassThrough()
125
+ memoStream.on('collect', data => memo.put.byDigest(
126
+ cache,
127
+ integrity,
128
+ data,
129
+ opts
130
+ ))
131
+ return new Pipeline(stream, memoStream)
132
+ }
133
+ }
134
+
135
+ module.exports.stream.byDigest = getStreamDigest
136
+
137
+ function info (cache, key, opts = {}) {
138
+ const { memoize } = opts
139
+ const memoized = memo.get(cache, key, opts)
140
+ if (memoized && memoize !== false) {
141
+ return Promise.resolve(memoized.entry)
142
+ } else {
143
+ return index.find(cache, key)
144
+ }
145
+ }
146
+ module.exports.info = info
147
+
148
+ async function copy (cache, key, dest, opts = {}) {
149
+ const entry = await index.find(cache, key, opts)
150
+ if (!entry) {
151
+ throw new index.NotFoundError(cache, key)
152
+ }
153
+ await read.copy(cache, entry.integrity, dest, opts)
154
+ return {
155
+ metadata: entry.metadata,
156
+ size: entry.size,
157
+ integrity: entry.integrity,
158
+ }
159
+ }
160
+
161
+ module.exports.copy = copy
162
+
163
+ async function copyByDigest (cache, key, dest, opts = {}) {
164
+ await read.copy(cache, key, dest, opts)
165
+ return key
166
+ }
167
+
168
+ module.exports.copy.byDigest = copyByDigest
169
+
170
+ module.exports.hasContent = read.hasContent
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cacache/lib/index.js ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 'use strict'
2
+
3
+ const get = require('./get.js')
4
+ const put = require('./put.js')
5
+ const rm = require('./rm.js')
6
+ const verify = require('./verify.js')
7
+ const { clearMemoized } = require('./memoization.js')
8
+ const tmp = require('./util/tmp.js')
9
+ const index = require('./entry-index.js')
10
+
11
+ module.exports.index = {}
12
+ module.exports.index.compact = index.compact
13
+ module.exports.index.insert = index.insert
14
+
15
+ module.exports.ls = index.ls
16
+ module.exports.ls.stream = index.lsStream
17
+
18
+ module.exports.get = get
19
+ module.exports.get.byDigest = get.byDigest
20
+ module.exports.get.stream = get.stream
21
+ module.exports.get.stream.byDigest = get.stream.byDigest
22
+ module.exports.get.copy = get.copy
23
+ module.exports.get.copy.byDigest = get.copy.byDigest
24
+ module.exports.get.info = get.info
25
+ module.exports.get.hasContent = get.hasContent
26
+
27
+ module.exports.put = put
28
+ module.exports.put.stream = put.stream
29
+
30
+ module.exports.rm = rm.entry
31
+ module.exports.rm.all = rm.all
32
+ module.exports.rm.entry = module.exports.rm
33
+ module.exports.rm.content = rm.content
34
+
35
+ module.exports.clearMemoized = clearMemoized
36
+
37
+ module.exports.tmp = {}
38
+ module.exports.tmp.mkdir = tmp.mkdir
39
+ module.exports.tmp.withTmp = tmp.withTmp
40
+
41
+ module.exports.verify = verify
42
+ module.exports.verify.lastRun = verify.lastRun
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cacache/lib/memoization.js ADDED
@@ -0,0 +1,72 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 'use strict'
2
+
3
+ const { LRUCache } = require('lru-cache')
4
+
5
+ const MEMOIZED = new LRUCache({
6
+ max: 500,
7
+ maxSize: 50 * 1024 * 1024, // 50MB
8
+ ttl: 3 * 60 * 1000, // 3 minutes
9
+ sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length,
10
+ })
11
+
12
+ module.exports.clearMemoized = clearMemoized
13
+
14
+ function clearMemoized () {
15
+ const old = {}
16
+ MEMOIZED.forEach((v, k) => {
17
+ old[k] = v
18
+ })
19
+ MEMOIZED.clear()
20
+ return old
21
+ }
22
+
23
+ module.exports.put = put
24
+
25
+ function put (cache, entry, data, opts) {
26
+ pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data })
27
+ putDigest(cache, entry.integrity, data, opts)
28
+ }
29
+
30
+ module.exports.put.byDigest = putDigest
31
+
32
+ function putDigest (cache, integrity, data, opts) {
33
+ pickMem(opts).set(`digest:${cache}:${integrity}`, data)
34
+ }
35
+
36
+ module.exports.get = get
37
+
38
+ function get (cache, key, opts) {
39
+ return pickMem(opts).get(`key:${cache}:${key}`)
40
+ }
41
+
42
+ module.exports.get.byDigest = getDigest
43
+
44
+ function getDigest (cache, integrity, opts) {
45
+ return pickMem(opts).get(`digest:${cache}:${integrity}`)
46
+ }
47
+
48
+ class ObjProxy {
49
+ constructor (obj) {
50
+ this.obj = obj
51
+ }
52
+
53
+ get (key) {
54
+ return this.obj[key]
55
+ }
56
+
57
+ set (key, val) {
58
+ this.obj[key] = val
59
+ }
60
+ }
61
+
62
+ function pickMem (opts) {
63
+ if (!opts || !opts.memoize) {
64
+ return MEMOIZED
65
+ } else if (opts.memoize.get && opts.memoize.set) {
66
+ return opts.memoize
67
+ } else if (typeof opts.memoize === 'object') {
68
+ return new ObjProxy(opts.memoize)
69
+ } else {
70
+ return MEMOIZED
71
+ }
72
+ }
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cacache/lib/put.js ADDED
@@ -0,0 +1,80 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 'use strict'
2
+
3
+ const index = require('./entry-index')
4
+ const memo = require('./memoization')
5
+ const write = require('./content/write')
6
+ const Flush = require('minipass-flush')
7
+ const { PassThrough } = require('minipass-collect')
8
+ const Pipeline = require('minipass-pipeline')
9
+
10
+ const putOpts = (opts) => ({
11
+ algorithms: ['sha512'],
12
+ ...opts,
13
+ })
14
+
15
+ module.exports = putData
16
+
17
+ async function putData (cache, key, data, opts = {}) {
18
+ const { memoize } = opts
19
+ opts = putOpts(opts)
20
+ const res = await write(cache, data, opts)
21
+ const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size })
22
+ if (memoize) {
23
+ memo.put(cache, entry, data, opts)
24
+ }
25
+
26
+ return res.integrity
27
+ }
28
+
29
+ module.exports.stream = putStream
30
+
31
+ function putStream (cache, key, opts = {}) {
32
+ const { memoize } = opts
33
+ opts = putOpts(opts)
34
+ let integrity
35
+ let size
36
+ let error
37
+
38
+ let memoData
39
+ const pipeline = new Pipeline()
40
+ // first item in the pipeline is the memoizer, because we need
41
+ // that to end first and get the collected data.
42
+ if (memoize) {
43
+ const memoizer = new PassThrough().on('collect', data => {
44
+ memoData = data
45
+ })
46
+ pipeline.push(memoizer)
47
+ }
48
+
49
+ // contentStream is a write-only, not a passthrough
50
+ // no data comes out of it.
51
+ const contentStream = write.stream(cache, opts)
52
+ .on('integrity', (int) => {
53
+ integrity = int
54
+ })
55
+ .on('size', (s) => {
56
+ size = s
57
+ })
58
+ .on('error', (err) => {
59
+ error = err
60
+ })
61
+
62
+ pipeline.push(contentStream)
63
+
64
+ // last but not least, we write the index and emit hash and size,
65
+ // and memoize if we're doing that
66
+ pipeline.push(new Flush({
67
+ async flush () {
68
+ if (!error) {
69
+ const entry = await index.insert(cache, key, integrity, { ...opts, size })
70
+ if (memoize && memoData) {
71
+ memo.put(cache, entry, memoData, opts)
72
+ }
73
+ pipeline.emit('integrity', integrity)
74
+ pipeline.emit('size', size)
75
+ }
76
+ },
77
+ }))
78
+
79
+ return pipeline
80
+ }
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cacache/lib/rm.js ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 'use strict'
2
+
3
+ const { rm } = require('fs/promises')
4
+ const glob = require('./util/glob.js')
5
+ const index = require('./entry-index')
6
+ const memo = require('./memoization')
7
+ const path = require('path')
8
+ const rmContent = require('./content/rm')
9
+
10
+ module.exports = entry
11
+ module.exports.entry = entry
12
+
13
+ function entry (cache, key, opts) {
14
+ memo.clearMemoized()
15
+ return index.delete(cache, key, opts)
16
+ }
17
+
18
+ module.exports.content = content
19
+
20
+ function content (cache, integrity) {
21
+ memo.clearMemoized()
22
+ return rmContent(cache, integrity)
23
+ }
24
+
25
+ module.exports.all = all
26
+
27
+ async function all (cache) {
28
+ memo.clearMemoized()
29
+ const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true })
30
+ return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true })))
31
+ }
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cacache/lib/util/glob.js ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ 'use strict'
2
+
3
+ const { glob } = require('glob')
4
+ const path = require('path')
5
+
6
+ const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep)
7
+ module.exports = (path, options) => glob(globify(path), options)
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cacache/lib/util/hash-to-segments.js ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ 'use strict'
2
+
3
+ module.exports = hashToSegments
4
+
5
+ function hashToSegments (hash) {
6
+ return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)]
7
+ }
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cacache/lib/util/tmp.js ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 'use strict'
2
+
3
+ const { withTempDir } = require('@npmcli/fs')
4
+ const fs = require('fs/promises')
5
+ const path = require('path')
6
+
7
+ module.exports.mkdir = mktmpdir
8
+
9
+ async function mktmpdir (cache, opts = {}) {
10
+ const { tmpPrefix } = opts
11
+ const tmpDir = path.join(cache, 'tmp')
12
+ await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' })
13
+ // do not use path.join(), it drops the trailing / if tmpPrefix is unset
14
+ const target = `${tmpDir}${path.sep}${tmpPrefix || ''}`
15
+ return fs.mkdtemp(target, { owner: 'inherit' })
16
+ }
17
+
18
+ module.exports.withTmp = withTmp
19
+
20
+ function withTmp (cache, opts, cb) {
21
+ if (!cb) {
22
+ cb = opts
23
+ opts = {}
24
+ }
25
+ return withTempDir(path.join(cache, 'tmp'), cb, opts)
26
+ }
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cacache/lib/verify.js ADDED
@@ -0,0 +1,258 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 'use strict'
2
+
3
+ const {
4
+ mkdir,
5
+ readFile,
6
+ rm,
7
+ stat,
8
+ truncate,
9
+ writeFile,
10
+ } = require('fs/promises')
11
+ const contentPath = require('./content/path')
12
+ const fsm = require('fs-minipass')
13
+ const glob = require('./util/glob.js')
14
+ const index = require('./entry-index')
15
+ const path = require('path')
16
+ const ssri = require('ssri')
17
+
18
+ const hasOwnProperty = (obj, key) =>
19
+ Object.prototype.hasOwnProperty.call(obj, key)
20
+
21
+ const verifyOpts = (opts) => ({
22
+ concurrency: 20,
23
+ log: { silly () {} },
24
+ ...opts,
25
+ })
26
+
27
+ module.exports = verify
28
+
29
+ async function verify (cache, opts) {
30
+ opts = verifyOpts(opts)
31
+ opts.log.silly('verify', 'verifying cache at', cache)
32
+
33
+ const steps = [
34
+ markStartTime,
35
+ fixPerms,
36
+ garbageCollect,
37
+ rebuildIndex,
38
+ cleanTmp,
39
+ writeVerifile,
40
+ markEndTime,
41
+ ]
42
+
43
+ const stats = {}
44
+ for (const step of steps) {
45
+ const label = step.name
46
+ const start = new Date()
47
+ const s = await step(cache, opts)
48
+ if (s) {
49
+ Object.keys(s).forEach((k) => {
50
+ stats[k] = s[k]
51
+ })
52
+ }
53
+ const end = new Date()
54
+ if (!stats.runTime) {
55
+ stats.runTime = {}
56
+ }
57
+ stats.runTime[label] = end - start
58
+ }
59
+ stats.runTime.total = stats.endTime - stats.startTime
60
+ opts.log.silly(
61
+ 'verify',
62
+ 'verification finished for',
63
+ cache,
64
+ 'in',
65
+ `${stats.runTime.total}ms`
66
+ )
67
+ return stats
68
+ }
69
+
70
+ async function markStartTime () {
71
+ return { startTime: new Date() }
72
+ }
73
+
74
+ async function markEndTime () {
75
+ return { endTime: new Date() }
76
+ }
77
+
78
+ async function fixPerms (cache, opts) {
79
+ opts.log.silly('verify', 'fixing cache permissions')
80
+ await mkdir(cache, { recursive: true })
81
+ return null
82
+ }
83
+
84
+ // Implements a naive mark-and-sweep tracing garbage collector.
85
+ //
86
+ // The algorithm is basically as follows:
87
+ // 1. Read (and filter) all index entries ("pointers")
88
+ // 2. Mark each integrity value as "live"
89
+ // 3. Read entire filesystem tree in `content-vX/` dir
90
+ // 4. If content is live, verify its checksum and delete it if it fails
91
+ // 5. If content is not marked as live, rm it.
92
+ //
93
+ async function garbageCollect (cache, opts) {
94
+ opts.log.silly('verify', 'garbage collecting content')
95
+ const { default: pMap } = await import('p-map')
96
+ const indexStream = index.lsStream(cache)
97
+ const liveContent = new Set()
98
+ indexStream.on('data', (entry) => {
99
+ if (opts.filter && !opts.filter(entry)) {
100
+ return
101
+ }
102
+
103
+ // integrity is stringified, re-parse it so we can get each hash
104
+ const integrity = ssri.parse(entry.integrity)
105
+ for (const algo in integrity) {
106
+ liveContent.add(integrity[algo].toString())
107
+ }
108
+ })
109
+ await new Promise((resolve, reject) => {
110
+ indexStream.on('end', resolve).on('error', reject)
111
+ })
112
+ const contentDir = contentPath.contentDir(cache)
113
+ const files = await glob(path.join(contentDir, '**'), {
114
+ follow: false,
115
+ nodir: true,
116
+ nosort: true,
117
+ })
118
+ const stats = {
119
+ verifiedContent: 0,
120
+ reclaimedCount: 0,
121
+ reclaimedSize: 0,
122
+ badContentCount: 0,
123
+ keptSize: 0,
124
+ }
125
+ await pMap(
126
+ files,
127
+ async (f) => {
128
+ const split = f.split(/[/\\]/)
129
+ const digest = split.slice(split.length - 3).join('')
130
+ const algo = split[split.length - 4]
131
+ const integrity = ssri.fromHex(digest, algo)
132
+ if (liveContent.has(integrity.toString())) {
133
+ const info = await verifyContent(f, integrity)
134
+ if (!info.valid) {
135
+ stats.reclaimedCount++
136
+ stats.badContentCount++
137
+ stats.reclaimedSize += info.size
138
+ } else {
139
+ stats.verifiedContent++
140
+ stats.keptSize += info.size
141
+ }
142
+ } else {
143
+ // No entries refer to this content. We can delete.
144
+ stats.reclaimedCount++
145
+ const s = await stat(f)
146
+ await rm(f, { recursive: true, force: true })
147
+ stats.reclaimedSize += s.size
148
+ }
149
+ return stats
150
+ },
151
+ { concurrency: opts.concurrency }
152
+ )
153
+ return stats
154
+ }
155
+
156
+ async function verifyContent (filepath, sri) {
157
+ const contentInfo = {}
158
+ try {
159
+ const { size } = await stat(filepath)
160
+ contentInfo.size = size
161
+ contentInfo.valid = true
162
+ await ssri.checkStream(new fsm.ReadStream(filepath), sri)
163
+ } catch (err) {
164
+ if (err.code === 'ENOENT') {
165
+ return { size: 0, valid: false }
166
+ }
167
+ if (err.code !== 'EINTEGRITY') {
168
+ throw err
169
+ }
170
+
171
+ await rm(filepath, { recursive: true, force: true })
172
+ contentInfo.valid = false
173
+ }
174
+ return contentInfo
175
+ }
176
+
177
+ async function rebuildIndex (cache, opts) {
178
+ opts.log.silly('verify', 'rebuilding index')
179
+ const { default: pMap } = await import('p-map')
180
+ const entries = await index.ls(cache)
181
+ const stats = {
182
+ missingContent: 0,
183
+ rejectedEntries: 0,
184
+ totalEntries: 0,
185
+ }
186
+ const buckets = {}
187
+ for (const k in entries) {
188
+ /* istanbul ignore else */
189
+ if (hasOwnProperty(entries, k)) {
190
+ const hashed = index.hashKey(k)
191
+ const entry = entries[k]
192
+ const excluded = opts.filter && !opts.filter(entry)
193
+ excluded && stats.rejectedEntries++
194
+ if (buckets[hashed] && !excluded) {
195
+ buckets[hashed].push(entry)
196
+ } else if (buckets[hashed] && excluded) {
197
+ // skip
198
+ } else if (excluded) {
199
+ buckets[hashed] = []
200
+ buckets[hashed]._path = index.bucketPath(cache, k)
201
+ } else {
202
+ buckets[hashed] = [entry]
203
+ buckets[hashed]._path = index.bucketPath(cache, k)
204
+ }
205
+ }
206
+ }
207
+ await pMap(
208
+ Object.keys(buckets),
209
+ (key) => {
210
+ return rebuildBucket(cache, buckets[key], stats, opts)
211
+ },
212
+ { concurrency: opts.concurrency }
213
+ )
214
+ return stats
215
+ }
216
+
217
+ async function rebuildBucket (cache, bucket, stats) {
218
+ await truncate(bucket._path)
219
+ // This needs to be serialized because cacache explicitly
220
+ // lets very racy bucket conflicts clobber each other.
221
+ for (const entry of bucket) {
222
+ const content = contentPath(cache, entry.integrity)
223
+ try {
224
+ await stat(content)
225
+ await index.insert(cache, entry.key, entry.integrity, {
226
+ metadata: entry.metadata,
227
+ size: entry.size,
228
+ time: entry.time,
229
+ })
230
+ stats.totalEntries++
231
+ } catch (err) {
232
+ if (err.code === 'ENOENT') {
233
+ stats.rejectedEntries++
234
+ stats.missingContent++
235
+ } else {
236
+ throw err
237
+ }
238
+ }
239
+ }
240
+ }
241
+
242
+ function cleanTmp (cache, opts) {
243
+ opts.log.silly('verify', 'cleaning tmp directory')
244
+ return rm(path.join(cache, 'tmp'), { recursive: true, force: true })
245
+ }
246
+
247
+ async function writeVerifile (cache, opts) {
248
+ const verifile = path.join(cache, '_lastverified')
249
+ opts.log.silly('verify', 'writing verifile to ' + verifile)
250
+ return writeFile(verifile, `${Date.now()}`)
251
+ }
252
+
253
+ module.exports.lastRun = lastRun
254
+
255
+ async function lastRun (cache) {
256
+ const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' })
257
+ return new Date(+data)
258
+ }
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cacache/package.json ADDED
@@ -0,0 +1,82 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "name": "cacache",
3
+ "version": "20.0.1",
4
+ "cache-version": {
5
+ "content": "2",
6
+ "index": "5"
7
+ },
8
+ "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.",
9
+ "main": "lib/index.js",
10
+ "files": [
11
+ "bin/",
12
+ "lib/"
13
+ ],
14
+ "scripts": {
15
+ "test": "tap",
16
+ "snap": "tap",
17
+ "coverage": "tap",
18
+ "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test",
19
+ "lint": "npm run eslint",
20
+ "npmclilint": "npmcli-lint",
21
+ "lintfix": "npm run eslint -- --fix",
22
+ "postsnap": "npm run lintfix --",
23
+ "postlint": "template-oss-check",
24
+ "posttest": "npm run lint",
25
+ "template-oss-apply": "template-oss-apply --force",
26
+ "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
27
+ },
28
+ "repository": {
29
+ "type": "git",
30
+ "url": "git+https://github.com/npm/cacache.git"
31
+ },
32
+ "keywords": [
33
+ "cache",
34
+ "caching",
35
+ "content-addressable",
36
+ "sri",
37
+ "sri hash",
38
+ "subresource integrity",
39
+ "cache",
40
+ "storage",
41
+ "store",
42
+ "file store",
43
+ "filesystem",
44
+ "disk cache",
45
+ "disk storage"
46
+ ],
47
+ "license": "ISC",
48
+ "dependencies": {
49
+ "@npmcli/fs": "^4.0.0",
50
+ "fs-minipass": "^3.0.0",
51
+ "glob": "^11.0.3",
52
+ "lru-cache": "^11.1.0",
53
+ "minipass": "^7.0.3",
54
+ "minipass-collect": "^2.0.1",
55
+ "minipass-flush": "^1.0.5",
56
+ "minipass-pipeline": "^1.2.4",
57
+ "p-map": "^7.0.2",
58
+ "ssri": "^12.0.0",
59
+ "unique-filename": "^4.0.0"
60
+ },
61
+ "devDependencies": {
62
+ "@npmcli/eslint-config": "^5.0.0",
63
+ "@npmcli/template-oss": "4.25.0",
64
+ "tap": "^16.0.0"
65
+ },
66
+ "engines": {
67
+ "node": "^20.17.0 || >=22.9.0"
68
+ },
69
+ "templateOSS": {
70
+ "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
71
+ "windowsCI": false,
72
+ "version": "4.25.0",
73
+ "publish": "true"
74
+ },
75
+ "author": "GitHub Inc.",
76
+ "tap": {
77
+ "nyc-arg": [
78
+ "--exclude",
79
+ "tap-snapshots/**"
80
+ ]
81
+ }
82
+ }
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/chalk/license ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
6
+
7
+ The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
8
+
9
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/chalk/package.json ADDED
@@ -0,0 +1,83 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "name": "chalk",
3
+ "version": "5.6.2",
4
+ "description": "Terminal string styling done right",
5
+ "license": "MIT",
6
+ "repository": "chalk/chalk",
7
+ "funding": "https://github.com/chalk/chalk?sponsor=1",
8
+ "type": "module",
9
+ "main": "./source/index.js",
10
+ "exports": "./source/index.js",
11
+ "imports": {
12
+ "#ansi-styles": "./source/vendor/ansi-styles/index.js",
13
+ "#supports-color": {
14
+ "node": "./source/vendor/supports-color/index.js",
15
+ "default": "./source/vendor/supports-color/browser.js"
16
+ }
17
+ },
18
+ "types": "./source/index.d.ts",
19
+ "sideEffects": false,
20
+ "engines": {
21
+ "node": "^12.17.0 || ^14.13 || >=16.0.0"
22
+ },
23
+ "scripts": {
24
+ "test": "xo && c8 ava && tsd",
25
+ "bench": "matcha benchmark.js"
26
+ },
27
+ "files": [
28
+ "source",
29
+ "!source/index.test-d.ts"
30
+ ],
31
+ "keywords": [
32
+ "color",
33
+ "colour",
34
+ "colors",
35
+ "terminal",
36
+ "console",
37
+ "cli",
38
+ "string",
39
+ "ansi",
40
+ "style",
41
+ "styles",
42
+ "tty",
43
+ "formatting",
44
+ "rgb",
45
+ "256",
46
+ "shell",
47
+ "xterm",
48
+ "log",
49
+ "logging",
50
+ "command-line",
51
+ "text"
52
+ ],
53
+ "devDependencies": {
54
+ "@types/node": "^16.11.10",
55
+ "ava": "^3.15.0",
56
+ "c8": "^7.10.0",
57
+ "color-convert": "^2.0.1",
58
+ "execa": "^6.0.0",
59
+ "log-update": "^5.0.0",
60
+ "matcha": "^0.7.0",
61
+ "tsd": "^0.19.0",
62
+ "xo": "^0.57.0",
63
+ "yoctodelay": "^2.0.0"
64
+ },
65
+ "xo": {
66
+ "rules": {
67
+ "unicorn/prefer-string-slice": "off",
68
+ "@typescript-eslint/consistent-type-imports": "off",
69
+ "@typescript-eslint/consistent-type-exports": "off",
70
+ "@typescript-eslint/consistent-type-definitions": "off",
71
+ "unicorn/expiring-todo-comments": "off"
72
+ }
73
+ },
74
+ "c8": {
75
+ "reporter": [
76
+ "text",
77
+ "lcov"
78
+ ],
79
+ "exclude": [
80
+ "source/vendor"
81
+ ]
82
+ }
83
+ }
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/chownr/LICENSE.md ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ All packages under `src/` are licensed according to the terms in
2
+ their respective `LICENSE` or `LICENSE.md` files.
3
+
4
+ The remainder of this project is licensed under the Blue Oak
5
+ Model License, as follows:
6
+
7
+ -----
8
+
9
+ # Blue Oak Model License
10
+
11
+ Version 1.0.0
12
+
13
+ ## Purpose
14
+
15
+ This license gives everyone as much permission to work with
16
+ this software as possible, while protecting contributors
17
+ from liability.
18
+
19
+ ## Acceptance
20
+
21
+ In order to receive this license, you must agree to its
22
+ rules. The rules of this license are both obligations
23
+ under that agreement and conditions to your license.
24
+ You must not do anything with this software that triggers
25
+ a rule that you cannot or will not follow.
26
+
27
+ ## Copyright
28
+
29
+ Each contributor licenses you to do everything with this
30
+ software that would otherwise infringe that contributor's
31
+ copyright in it.
32
+
33
+ ## Notices
34
+
35
+ You must ensure that everyone who gets a copy of
36
+ any part of this software from you, with or without
37
+ changes, also gets the text of this license or a link to
38
+ <https://blueoakcouncil.org/license/1.0.0>.
39
+
40
+ ## Excuse
41
+
42
+ If anyone notifies you in writing that you have not
43
+ complied with [Notices](#notices), you can keep your
44
+ license by taking all practical steps to comply within 30
45
+ days after the notice. If you do not do so, your license
46
+ ends immediately.
47
+
48
+ ## Patent
49
+
50
+ Each contributor licenses you to do everything with this
51
+ software that would otherwise infringe any patent claims
52
+ they can license or become able to license.
53
+
54
+ ## Reliability
55
+
56
+ No contributor can revoke this license.
57
+
58
+ ## No Liability
59
+
60
+ ***As far as the law allows, this software comes as is,
61
+ without any warranty or condition, and no contributor
62
+ will be liable to anyone for any damages related to this
63
+ software or this license, under any kind of legal claim.***
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/chownr/package.json ADDED
@@ -0,0 +1,69 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
3
+ "name": "chownr",
4
+ "description": "like `chown -R`",
5
+ "version": "3.0.0",
6
+ "repository": {
7
+ "type": "git",
8
+ "url": "git://github.com/isaacs/chownr.git"
9
+ },
10
+ "files": [
11
+ "dist"
12
+ ],
13
+ "devDependencies": {
14
+ "@types/node": "^20.12.5",
15
+ "mkdirp": "^3.0.1",
16
+ "prettier": "^3.2.5",
17
+ "rimraf": "^5.0.5",
18
+ "tap": "^18.7.2",
19
+ "tshy": "^1.13.1",
20
+ "typedoc": "^0.25.12"
21
+ },
22
+ "scripts": {
23
+ "prepare": "tshy",
24
+ "pretest": "npm run prepare",
25
+ "test": "tap",
26
+ "preversion": "npm test",
27
+ "postversion": "npm publish",
28
+ "prepublishOnly": "git push origin --follow-tags",
29
+ "format": "prettier --write . --loglevel warn",
30
+ "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
31
+ },
32
+ "license": "BlueOak-1.0.0",
33
+ "engines": {
34
+ "node": ">=18"
35
+ },
36
+ "tshy": {
37
+ "exports": {
38
+ "./package.json": "./package.json",
39
+ ".": "./src/index.ts"
40
+ }
41
+ },
42
+ "exports": {
43
+ "./package.json": "./package.json",
44
+ ".": {
45
+ "import": {
46
+ "types": "./dist/esm/index.d.ts",
47
+ "default": "./dist/esm/index.js"
48
+ },
49
+ "require": {
50
+ "types": "./dist/commonjs/index.d.ts",
51
+ "default": "./dist/commonjs/index.js"
52
+ }
53
+ }
54
+ },
55
+ "main": "./dist/commonjs/index.js",
56
+ "types": "./dist/commonjs/index.d.ts",
57
+ "type": "module",
58
+ "prettier": {
59
+ "semi": false,
60
+ "printWidth": 75,
61
+ "tabWidth": 2,
62
+ "useTabs": false,
63
+ "singleQuote": true,
64
+ "jsxSingleQuote": false,
65
+ "bracketSameLine": true,
66
+ "arrowParens": "avoid",
67
+ "endOfLine": "lf"
68
+ }
69
+ }
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/ci-info/LICENSE ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ The MIT License (MIT)
2
+
3
+ Copyright (c) 2016 Thomas Watson Steen
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/ci-info/index.js ADDED
@@ -0,0 +1,104 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 'use strict'
2
+
3
+ const vendors = require('./vendors.json')
4
+
5
+ const env = process.env
6
+
7
+ // Used for testing only
8
+ Object.defineProperty(exports, '_vendors', {
9
+ value: vendors.map(function (v) {
10
+ return v.constant
11
+ })
12
+ })
13
+
14
+ exports.name = null
15
+ exports.isPR = null
16
+ exports.id = null
17
+
18
+ if (env.CI !== 'false') {
19
+ vendors.forEach(function (vendor) {
20
+ const envs = Array.isArray(vendor.env) ? vendor.env : [vendor.env]
21
+ const isCI = envs.every(function (obj) {
22
+ return checkEnv(obj)
23
+ })
24
+
25
+ exports[vendor.constant] = isCI
26
+
27
+ if (!isCI) {
28
+ return
29
+ }
30
+
31
+ exports.name = vendor.name
32
+ exports.isPR = checkPR(vendor)
33
+ exports.id = vendor.constant
34
+ })
35
+ }
36
+
37
+ exports.isCI = !!(
38
+ env.CI !== 'false' && // Bypass all checks if CI env is explicitly set to 'false'
39
+ (env.BUILD_ID || // Jenkins, Cloudbees
40
+ env.BUILD_NUMBER || // Jenkins, TeamCity
41
+ env.CI || // Travis CI, CircleCI, Cirrus CI, Gitlab CI, Appveyor, CodeShip, dsari, Cloudflare Pages/Workers
42
+ env.CI_APP_ID || // Appflow
43
+ env.CI_BUILD_ID || // Appflow
44
+ env.CI_BUILD_NUMBER || // Appflow
45
+ env.CI_NAME || // Codeship and others
46
+ env.CONTINUOUS_INTEGRATION || // Travis CI, Cirrus CI
47
+ env.RUN_ID || // TaskCluster, dsari
48
+ exports.name ||
49
+ false)
50
+ )
51
+
52
+ function checkEnv (obj) {
53
+ // "env": "CIRRUS"
54
+ if (typeof obj === 'string') return !!env[obj]
55
+
56
+ // "env": { "env": "NODE", "includes": "/app/.heroku/node/bin/node" }
57
+ if ('env' in obj) {
58
+ // Currently there are no other types, uncomment when there are
59
+ // if ('includes' in obj) {
60
+ return env[obj.env] && env[obj.env].includes(obj.includes)
61
+ // }
62
+ }
63
+
64
+ if ('any' in obj) {
65
+ return obj.any.some(function (k) {
66
+ return !!env[k]
67
+ })
68
+ }
69
+
70
+ return Object.keys(obj).every(function (k) {
71
+ return env[k] === obj[k]
72
+ })
73
+ }
74
+
75
+ function checkPR (vendor) {
76
+ switch (typeof vendor.pr) {
77
+ case 'string':
78
+ // "pr": "CIRRUS_PR"
79
+ return !!env[vendor.pr]
80
+ case 'object':
81
+ if ('env' in vendor.pr) {
82
+ if ('any' in vendor.pr) {
83
+ // "pr": { "env": "CODEBUILD_WEBHOOK_EVENT", "any": ["PULL_REQUEST_CREATED", "PULL_REQUEST_UPDATED"] }
84
+ return vendor.pr.any.some(function (key) {
85
+ return env[vendor.pr.env] === key
86
+ })
87
+ } else {
88
+ // "pr": { "env": "BUILDKITE_PULL_REQUEST", "ne": "false" }
89
+ return vendor.pr.env in env && env[vendor.pr.env] !== vendor.pr.ne
90
+ }
91
+ } else if ('any' in vendor.pr) {
92
+ // "pr": { "any": ["ghprbPullId", "CHANGE_ID"] }
93
+ return vendor.pr.any.some(function (key) {
94
+ return !!env[key]
95
+ })
96
+ } else {
97
+ // "pr": { "DRONE_BUILD_EVENT": "pull_request" }
98
+ return checkEnv(vendor.pr)
99
+ }
100
+ default:
101
+ // PR detection not supported for this vendor
102
+ return null
103
+ }
104
+ }
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/ci-info/package.json ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "name": "ci-info",
3
+ "version": "4.3.1",
4
+ "description": "Get details about the current Continuous Integration environment",
5
+ "main": "index.js",
6
+ "typings": "index.d.ts",
7
+ "type": "commonjs",
8
+ "author": "Thomas Watson Steen <w@tson.dk> (https://twitter.com/wa7son)",
9
+ "license": "MIT",
10
+ "repository": "github:watson/ci-info",
11
+ "bugs": "https://github.com/watson/ci-info/issues",
12
+ "homepage": "https://github.com/watson/ci-info",
13
+ "contributors": [
14
+ {
15
+ "name": "Sibiraj",
16
+ "url": "https://github.com/sibiraj-s"
17
+ }
18
+ ],
19
+ "funding": [
20
+ {
21
+ "type": "github",
22
+ "url": "https://github.com/sponsors/sibiraj-s"
23
+ }
24
+ ],
25
+ "keywords": [
26
+ "ci",
27
+ "continuous",
28
+ "integration",
29
+ "test",
30
+ "detect"
31
+ ],
32
+ "files": [
33
+ "vendors.json",
34
+ "index.js",
35
+ "index.d.ts",
36
+ "CHANGELOG.md"
37
+ ],
38
+ "scripts": {
39
+ "build": "node sort-vendors.js && node create-typings.js",
40
+ "lint:fix": "standard --fix",
41
+ "test": "standard && node test.js",
42
+ "prepare": "husky install || true"
43
+ },
44
+ "devDependencies": {
45
+ "clear-module": "^4.1.2",
46
+ "husky": "^9.1.7",
47
+ "publint": "^0.3.12",
48
+ "standard": "^17.1.2",
49
+ "tape": "^5.9.0"
50
+ },
51
+ "engines": {
52
+ "node": ">=8"
53
+ }
54
+ }
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/ci-info/vendors.json ADDED
@@ -0,0 +1,358 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [
2
+ {
3
+ "name": "Agola CI",
4
+ "constant": "AGOLA",
5
+ "env": "AGOLA_GIT_REF",
6
+ "pr": "AGOLA_PULL_REQUEST_ID"
7
+ },
8
+ {
9
+ "name": "Appcircle",
10
+ "constant": "APPCIRCLE",
11
+ "env": "AC_APPCIRCLE",
12
+ "pr": {
13
+ "env": "AC_GIT_PR",
14
+ "ne": "false"
15
+ }
16
+ },
17
+ {
18
+ "name": "AppVeyor",
19
+ "constant": "APPVEYOR",
20
+ "env": "APPVEYOR",
21
+ "pr": "APPVEYOR_PULL_REQUEST_NUMBER"
22
+ },
23
+ {
24
+ "name": "AWS CodeBuild",
25
+ "constant": "CODEBUILD",
26
+ "env": "CODEBUILD_BUILD_ARN",
27
+ "pr": {
28
+ "env": "CODEBUILD_WEBHOOK_EVENT",
29
+ "any": [
30
+ "PULL_REQUEST_CREATED",
31
+ "PULL_REQUEST_UPDATED",
32
+ "PULL_REQUEST_REOPENED"
33
+ ]
34
+ }
35
+ },
36
+ {
37
+ "name": "Azure Pipelines",
38
+ "constant": "AZURE_PIPELINES",
39
+ "env": "TF_BUILD",
40
+ "pr": {
41
+ "BUILD_REASON": "PullRequest"
42
+ }
43
+ },
44
+ {
45
+ "name": "Bamboo",
46
+ "constant": "BAMBOO",
47
+ "env": "bamboo_planKey"
48
+ },
49
+ {
50
+ "name": "Bitbucket Pipelines",
51
+ "constant": "BITBUCKET",
52
+ "env": "BITBUCKET_COMMIT",
53
+ "pr": "BITBUCKET_PR_ID"
54
+ },
55
+ {
56
+ "name": "Bitrise",
57
+ "constant": "BITRISE",
58
+ "env": "BITRISE_IO",
59
+ "pr": "BITRISE_PULL_REQUEST"
60
+ },
61
+ {
62
+ "name": "Buddy",
63
+ "constant": "BUDDY",
64
+ "env": "BUDDY_WORKSPACE_ID",
65
+ "pr": "BUDDY_EXECUTION_PULL_REQUEST_ID"
66
+ },
67
+ {
68
+ "name": "Buildkite",
69
+ "constant": "BUILDKITE",
70
+ "env": "BUILDKITE",
71
+ "pr": {
72
+ "env": "BUILDKITE_PULL_REQUEST",
73
+ "ne": "false"
74
+ }
75
+ },
76
+ {
77
+ "name": "CircleCI",
78
+ "constant": "CIRCLE",
79
+ "env": "CIRCLECI",
80
+ "pr": "CIRCLE_PULL_REQUEST"
81
+ },
82
+ {
83
+ "name": "Cirrus CI",
84
+ "constant": "CIRRUS",
85
+ "env": "CIRRUS_CI",
86
+ "pr": "CIRRUS_PR"
87
+ },
88
+ {
89
+ "name": "Cloudflare Pages",
90
+ "constant": "CLOUDFLARE_PAGES",
91
+ "env": "CF_PAGES"
92
+ },
93
+ {
94
+ "name": "Cloudflare Workers",
95
+ "constant": "CLOUDFLARE_WORKERS",
96
+ "env": "WORKERS_CI"
97
+ },
98
+ {
99
+ "name": "Codefresh",
100
+ "constant": "CODEFRESH",
101
+ "env": "CF_BUILD_ID",
102
+ "pr": {
103
+ "any": [
104
+ "CF_PULL_REQUEST_NUMBER",
105
+ "CF_PULL_REQUEST_ID"
106
+ ]
107
+ }
108
+ },
109
+ {
110
+ "name": "Codemagic",
111
+ "constant": "CODEMAGIC",
112
+ "env": "CM_BUILD_ID",
113
+ "pr": "CM_PULL_REQUEST"
114
+ },
115
+ {
116
+ "name": "Codeship",
117
+ "constant": "CODESHIP",
118
+ "env": {
119
+ "CI_NAME": "codeship"
120
+ }
121
+ },
122
+ {
123
+ "name": "Drone",
124
+ "constant": "DRONE",
125
+ "env": "DRONE",
126
+ "pr": {
127
+ "DRONE_BUILD_EVENT": "pull_request"
128
+ }
129
+ },
130
+ {
131
+ "name": "dsari",
132
+ "constant": "DSARI",
133
+ "env": "DSARI"
134
+ },
135
+ {
136
+ "name": "Earthly",
137
+ "constant": "EARTHLY",
138
+ "env": "EARTHLY_CI"
139
+ },
140
+ {
141
+ "name": "Expo Application Services",
142
+ "constant": "EAS",
143
+ "env": "EAS_BUILD"
144
+ },
145
+ {
146
+ "name": "Gerrit",
147
+ "constant": "GERRIT",
148
+ "env": "GERRIT_PROJECT"
149
+ },
150
+ {
151
+ "name": "Gitea Actions",
152
+ "constant": "GITEA_ACTIONS",
153
+ "env": "GITEA_ACTIONS"
154
+ },
155
+ {
156
+ "name": "GitHub Actions",
157
+ "constant": "GITHUB_ACTIONS",
158
+ "env": "GITHUB_ACTIONS",
159
+ "pr": {
160
+ "GITHUB_EVENT_NAME": "pull_request"
161
+ }
162
+ },
163
+ {
164
+ "name": "GitLab CI",
165
+ "constant": "GITLAB",
166
+ "env": "GITLAB_CI",
167
+ "pr": "CI_MERGE_REQUEST_ID"
168
+ },
169
+ {
170
+ "name": "GoCD",
171
+ "constant": "GOCD",
172
+ "env": "GO_PIPELINE_LABEL"
173
+ },
174
+ {
175
+ "name": "Google Cloud Build",
176
+ "constant": "GOOGLE_CLOUD_BUILD",
177
+ "env": "BUILDER_OUTPUT"
178
+ },
179
+ {
180
+ "name": "Harness CI",
181
+ "constant": "HARNESS",
182
+ "env": "HARNESS_BUILD_ID"
183
+ },
184
+ {
185
+ "name": "Heroku",
186
+ "constant": "HEROKU",
187
+ "env": {
188
+ "env": "NODE",
189
+ "includes": "/app/.heroku/node/bin/node"
190
+ }
191
+ },
192
+ {
193
+ "name": "Hudson",
194
+ "constant": "HUDSON",
195
+ "env": "HUDSON_URL"
196
+ },
197
+ {
198
+ "name": "Jenkins",
199
+ "constant": "JENKINS",
200
+ "env": [
201
+ "JENKINS_URL",
202
+ "BUILD_ID"
203
+ ],
204
+ "pr": {
205
+ "any": [
206
+ "ghprbPullId",
207
+ "CHANGE_ID"
208
+ ]
209
+ }
210
+ },
211
+ {
212
+ "name": "LayerCI",
213
+ "constant": "LAYERCI",
214
+ "env": "LAYERCI",
215
+ "pr": "LAYERCI_PULL_REQUEST"
216
+ },
217
+ {
218
+ "name": "Magnum CI",
219
+ "constant": "MAGNUM",
220
+ "env": "MAGNUM"
221
+ },
222
+ {
223
+ "name": "Netlify CI",
224
+ "constant": "NETLIFY",
225
+ "env": "NETLIFY",
226
+ "pr": {
227
+ "env": "PULL_REQUEST",
228
+ "ne": "false"
229
+ }
230
+ },
231
+ {
232
+ "name": "Nevercode",
233
+ "constant": "NEVERCODE",
234
+ "env": "NEVERCODE",
235
+ "pr": {
236
+ "env": "NEVERCODE_PULL_REQUEST",
237
+ "ne": "false"
238
+ }
239
+ },
240
+ {
241
+ "name": "Prow",
242
+ "constant": "PROW",
243
+ "env": "PROW_JOB_ID"
244
+ },
245
+ {
246
+ "name": "ReleaseHub",
247
+ "constant": "RELEASEHUB",
248
+ "env": "RELEASE_BUILD_ID"
249
+ },
250
+ {
251
+ "name": "Render",
252
+ "constant": "RENDER",
253
+ "env": "RENDER",
254
+ "pr": {
255
+ "IS_PULL_REQUEST": "true"
256
+ }
257
+ },
258
+ {
259
+ "name": "Sail CI",
260
+ "constant": "SAIL",
261
+ "env": "SAILCI",
262
+ "pr": "SAIL_PULL_REQUEST_NUMBER"
263
+ },
264
+ {
265
+ "name": "Screwdriver",
266
+ "constant": "SCREWDRIVER",
267
+ "env": "SCREWDRIVER",
268
+ "pr": {
269
+ "env": "SD_PULL_REQUEST",
270
+ "ne": "false"
271
+ }
272
+ },
273
+ {
274
+ "name": "Semaphore",
275
+ "constant": "SEMAPHORE",
276
+ "env": "SEMAPHORE",
277
+ "pr": "PULL_REQUEST_NUMBER"
278
+ },
279
+ {
280
+ "name": "Sourcehut",
281
+ "constant": "SOURCEHUT",
282
+ "env": {
283
+ "CI_NAME": "sourcehut"
284
+ }
285
+ },
286
+ {
287
+ "name": "Strider CD",
288
+ "constant": "STRIDER",
289
+ "env": "STRIDER"
290
+ },
291
+ {
292
+ "name": "TaskCluster",
293
+ "constant": "TASKCLUSTER",
294
+ "env": [
295
+ "TASK_ID",
296
+ "RUN_ID"
297
+ ]
298
+ },
299
+ {
300
+ "name": "TeamCity",
301
+ "constant": "TEAMCITY",
302
+ "env": "TEAMCITY_VERSION"
303
+ },
304
+ {
305
+ "name": "Travis CI",
306
+ "constant": "TRAVIS",
307
+ "env": "TRAVIS",
308
+ "pr": {
309
+ "env": "TRAVIS_PULL_REQUEST",
310
+ "ne": "false"
311
+ }
312
+ },
313
+ {
314
+ "name": "Vela",
315
+ "constant": "VELA",
316
+ "env": "VELA",
317
+ "pr": {
318
+ "VELA_PULL_REQUEST": "1"
319
+ }
320
+ },
321
+ {
322
+ "name": "Vercel",
323
+ "constant": "VERCEL",
324
+ "env": {
325
+ "any": [
326
+ "NOW_BUILDER",
327
+ "VERCEL"
328
+ ]
329
+ },
330
+ "pr": "VERCEL_GIT_PULL_REQUEST_ID"
331
+ },
332
+ {
333
+ "name": "Visual Studio App Center",
334
+ "constant": "APPCENTER",
335
+ "env": "APPCENTER_BUILD_ID"
336
+ },
337
+ {
338
+ "name": "Woodpecker",
339
+ "constant": "WOODPECKER",
340
+ "env": {
341
+ "CI": "woodpecker"
342
+ },
343
+ "pr": {
344
+ "CI_BUILD_EVENT": "pull_request"
345
+ }
346
+ },
347
+ {
348
+ "name": "Xcode Cloud",
349
+ "constant": "XCODE_CLOUD",
350
+ "env": "CI_XCODE_PROJECT",
351
+ "pr": "CI_PULL_REQUEST_NUMBER"
352
+ },
353
+ {
354
+ "name": "Xcode Server",
355
+ "constant": "XCODE_SERVER",
356
+ "env": "XCS"
357
+ }
358
+ ]
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cidr-regex/LICENSE ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Copyright (c) silverwind
2
+ All rights reserved.
3
+
4
+ Redistribution and use in source and binary forms, with or without
5
+ modification, are permitted provided that the following conditions are met:
6
+
7
+ 1. Redistributions of source code must retain the above copyright notice, this
8
+ list of conditions and the following disclaimer.
9
+ 2. Redistributions in binary form must reproduce the above copyright notice,
10
+ this list of conditions and the following disclaimer in the documentation
11
+ and/or other materials provided with the distribution.
12
+
13
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
14
+ ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
15
+ WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
16
+ DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
17
+ ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
18
+ (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
19
+ LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
20
+ ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
21
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
22
+ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
data_prepare/node-v24.12.0-linux-x64/lib/node_modules/npm/node_modules/cidr-regex/package.json ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "name": "cidr-regex",
3
+ "version": "5.0.1",
4
+ "description": "Regular expression for matching IP addresses in CIDR notation",
5
+ "author": "silverwind <me@silverwind.io>",
6
+ "contributors": [
7
+ "Felipe Apostol <flipjs.io@gmail.com> (http://flipjs.io/)"
8
+ ],
9
+ "repository": "silverwind/cidr-regex",
10
+ "license": "BSD-2-Clause",
11
+ "type": "module",
12
+ "sideEffects": false,
13
+ "main": "./dist/index.js",
14
+ "exports": "./dist/index.js",
15
+ "types": "./dist/index.d.ts",
16
+ "files": [
17
+ "dist"
18
+ ],
19
+ "engines": {
20
+ "node": ">=20"
21
+ },
22
+ "dependencies": {
23
+ "ip-regex": "5.0.0"
24
+ },
25
+ "devDependencies": {
26
+ "@types/node": "24.5.2",
27
+ "eslint": "9.36.0",
28
+ "eslint-config-silverwind": "105.1.0",
29
+ "typescript": "5.9.2",
30
+ "typescript-config-silverwind": "10.0.1",
31
+ "updates": "16.7.2",
32
+ "versions": "13.1.2",
33
+ "vite": "7.1.7",
34
+ "vite-config-silverwind": "6.0.2",
35
+ "vitest": "3.2.4",
36
+ "vitest-config-silverwind": "10.2.0"
37
+ }
38
+ }