diff --git a/DEPENDENCIES.md b/DEPENDENCIES.md index 4e7164db0a2c2..64d255ca192d5 100644 --- a/DEPENDENCIES.md +++ b/DEPENDENCIES.md @@ -116,6 +116,7 @@ graph LR; npm-->npmcli-fs["@npmcli/fs"]; npm-->npmcli-git["@npmcli/git"]; npm-->npmcli-map-workspaces["@npmcli/map-workspaces"]; + npm-->npmcli-metavuln-calculator["@npmcli/metavuln-calculator"]; npm-->npmcli-mock-globals["@npmcli/mock-globals"]; npm-->npmcli-mock-registry["@npmcli/mock-registry"]; npm-->npmcli-package-json["@npmcli/package-json"]; @@ -268,7 +269,6 @@ graph LR; cacache-->npmcli-fs["@npmcli/fs"]; cacache-->p-map; cacache-->ssri; - cacache-->tar; cacache-->unique-filename; cidr-regex-->ip-regex; cli-columns-->string-width; @@ -490,6 +490,7 @@ graph LR; npm-->npmcli-fs["@npmcli/fs"]; npm-->npmcli-git["@npmcli/git"]; npm-->npmcli-map-workspaces["@npmcli/map-workspaces"]; + npm-->npmcli-metavuln-calculator["@npmcli/metavuln-calculator"]; npm-->npmcli-mock-globals["@npmcli/mock-globals"]; npm-->npmcli-mock-registry["@npmcli/mock-registry"]; npm-->npmcli-package-json["@npmcli/package-json"]; diff --git a/node_modules/.gitignore b/node_modules/.gitignore index f153b0b421820..81f08e3240249 100644 --- a/node_modules/.gitignore +++ b/node_modules/.gitignore @@ -31,15 +31,15 @@ !/@npmcli/query !/@npmcli/redact !/@npmcli/run-script -!/@pkgjs/ -/@pkgjs/* -!/@pkgjs/parseargs !/@sigstore/ /@sigstore/* !/@sigstore/bundle !/@sigstore/core !/@sigstore/protobuf-specs !/@sigstore/sign +!/@sigstore/sign/node_modules/ +/@sigstore/sign/node_modules/* +!/@sigstore/sign/node_modules/proc-log !/@sigstore/tuf !/@sigstore/verify !/@tufjs/ @@ -60,6 +60,9 @@ !/binary-extensions !/brace-expansion !/cacache +!/cacache/node_modules/ +/cacache/node_modules/* +!/cacache/node_modules/ssri !/chalk !/chownr !/ci-info @@ -97,6 +100,9 @@ !/imurmurhash !/ini !/init-package-json +!/init-package-json/node_modules/ +/init-package-json/node_modules/* +!/init-package-json/node_modules/validate-npm-package-name !/ip-address !/ip-regex !/is-cidr @@ -131,18 +137,6 @@ !/mute-stream !/negotiator !/node-gyp -!/node-gyp/node_modules/ -/node-gyp/node_modules/* -!/node-gyp/node_modules/@npmcli/ -/node-gyp/node_modules/@npmcli/* -!/node-gyp/node_modules/@npmcli/agent -!/node-gyp/node_modules/cacache -!/node-gyp/node_modules/glob -!/node-gyp/node_modules/jackspeak -!/node-gyp/node_modules/lru-cache -!/node-gyp/node_modules/make-fetch-happen -!/node-gyp/node_modules/minimatch -!/node-gyp/node_modules/path-scurry !/nopt !/npm-audit-report !/npm-bundled diff --git a/node_modules/@npmcli/git/package.json b/node_modules/@npmcli/git/package.json index f4e844bccab0d..78d077513dd81 100644 --- a/node_modules/@npmcli/git/package.json +++ b/node_modules/@npmcli/git/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/git", - "version": "7.0.0", + "version": "7.0.1", "main": "lib/index.js", "files": [ "bin/", @@ -38,14 +38,14 @@ "tap": "^16.0.1" }, "dependencies": { - "@npmcli/promise-spawn": "^8.0.0", - "ini": "^5.0.0", + "@npmcli/promise-spawn": "^9.0.0", + "ini": "^6.0.0", "lru-cache": "^11.2.1", "npm-pick-manifest": "^11.0.1", - "proc-log": "^5.0.0", + "proc-log": "^6.0.0", "promise-retry": "^2.0.1", "semver": "^7.3.5", - "which": "^5.0.0" + "which": "^6.0.0" }, "engines": { "node": "^20.17.0 || >=22.9.0" diff --git a/node_modules/@npmcli/installed-package-contents/package.json b/node_modules/@npmcli/installed-package-contents/package.json index d5b68a737daf4..599b285fb467d 100644 --- a/node_modules/@npmcli/installed-package-contents/package.json +++ b/node_modules/@npmcli/installed-package-contents/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/installed-package-contents", - "version": "3.0.0", + "version": "4.0.0", "description": "Get the list of files installed in a package in node_modules, including bundled dependencies", "author": "GitHub Inc.", "main": "lib/index.js", @@ -20,12 +20,12 @@ }, "devDependencies": { "@npmcli/eslint-config": "^5.0.0", - "@npmcli/template-oss": "4.23.3", + "@npmcli/template-oss": "4.27.1", "tap": "^16.3.0" }, "dependencies": { - "npm-bundled": "^4.0.0", - "npm-normalize-package-bin": "^4.0.0" + "npm-bundled": "^5.0.0", + "npm-normalize-package-bin": "^5.0.0" }, "repository": { "type": "git", @@ -36,11 +36,11 @@ "lib/" ], "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.23.3", + "version": "4.27.1", "publish": true }, "tap": { diff --git a/node_modules/@npmcli/map-workspaces/package.json b/node_modules/@npmcli/map-workspaces/package.json index fb77ea8615c1c..5f6c9c24f5ed7 100644 --- a/node_modules/@npmcli/map-workspaces/package.json +++ b/node_modules/@npmcli/map-workspaces/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/map-workspaces", - "version": "5.0.0", + "version": "5.0.1", "main": "lib/index.js", "files": [ "bin/", @@ -44,18 +44,18 @@ }, "devDependencies": { "@npmcli/eslint-config": "^5.0.0", - "@npmcli/template-oss": "4.25.0", + "@npmcli/template-oss": "4.27.1", "tap": "^16.0.1" }, "dependencies": { - "@npmcli/name-from-folder": "^3.0.0", + "@npmcli/name-from-folder": "^4.0.0", "@npmcli/package-json": "^7.0.0", "glob": "^11.0.3", "minimatch": "^10.0.3" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.25.0", + "version": "4.27.1", "publish": "true" } } diff --git a/node_modules/@npmcli/metavuln-calculator/package.json b/node_modules/@npmcli/metavuln-calculator/package.json index 9d17000653c0e..02b13bc8e8219 100644 --- a/node_modules/@npmcli/metavuln-calculator/package.json +++ b/node_modules/@npmcli/metavuln-calculator/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/metavuln-calculator", - "version": "9.0.2", + "version": "9.0.3", "main": "lib/index.js", "files": [ "bin/", @@ -34,15 +34,15 @@ }, "devDependencies": { "@npmcli/eslint-config": "^5.0.0", - "@npmcli/template-oss": "4.25.0", + "@npmcli/template-oss": "4.27.1", "require-inject": "^1.4.4", "tap": "^16.0.1" }, "dependencies": { "cacache": "^20.0.0", - "json-parse-even-better-errors": "^4.0.0", + "json-parse-even-better-errors": "^5.0.0", "pacote": "^21.0.0", - "proc-log": "^5.0.0", + "proc-log": "^6.0.0", "semver": "^7.3.5" }, "engines": { @@ -50,7 +50,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.25.0", + "version": "4.27.1", "publish": "true", "ciVersions": [ "16.14.0", diff --git a/node_modules/@npmcli/name-from-folder/package.json b/node_modules/@npmcli/name-from-folder/package.json index 323edd81d22fb..503667521565d 100644 --- a/node_modules/@npmcli/name-from-folder/package.json +++ b/node_modules/@npmcli/name-from-folder/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/name-from-folder", - "version": "3.0.0", + "version": "4.0.0", "files": [ "bin/", "lib/" @@ -25,15 +25,15 @@ }, "devDependencies": { "@npmcli/eslint-config": "^5.0.0", - "@npmcli/template-oss": "4.23.3", + "@npmcli/template-oss": "4.27.1", "tap": "^16.3.2" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.23.3", + "version": "4.27.1", "publish": true }, "tap": { diff --git a/node_modules/@npmcli/node-gyp/package.json b/node_modules/@npmcli/node-gyp/package.json index 3be9663a39de0..a34dc6be61751 100644 --- a/node_modules/@npmcli/node-gyp/package.json +++ b/node_modules/@npmcli/node-gyp/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/node-gyp", - "version": "4.0.0", + "version": "5.0.0", "description": "Tools for dealing with node-gyp packages", "scripts": { "test": "tap", @@ -30,15 +30,15 @@ "license": "ISC", "devDependencies": { "@npmcli/eslint-config": "^5.0.0", - "@npmcli/template-oss": "4.23.3", + "@npmcli/template-oss": "4.27.1", "tap": "^16.0.1" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.23.3", + "version": "4.27.1", "publish": true }, "tap": { diff --git a/node_modules/@npmcli/package-json/lib/index.js b/node_modules/@npmcli/package-json/lib/index.js index fabe5fbcda7bc..adcbac67eabba 100644 --- a/node_modules/@npmcli/package-json/lib/index.js +++ b/node_modules/@npmcli/package-json/lib/index.js @@ -225,7 +225,7 @@ class PackageJson { this.#manifest = step({ content, originalContent: this.content }) } - // unknown properties will just be overwitten + // unknown properties will just be overwritten for (const [key, value] of Object.entries(content)) { if (!knownKeys.has(key)) { this.content[key] = value diff --git a/node_modules/@npmcli/package-json/package.json b/node_modules/@npmcli/package-json/package.json index 46c39c22a1900..3dc9f45c847cf 100644 --- a/node_modules/@npmcli/package-json/package.json +++ b/node_modules/@npmcli/package-json/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/package-json", - "version": "7.0.1", + "version": "7.0.2", "description": "Programmatic API to update package.json", "keywords": [ "npm", @@ -32,14 +32,14 @@ "@npmcli/git": "^7.0.0", "glob": "^11.0.3", "hosted-git-info": "^9.0.0", - "json-parse-even-better-errors": "^4.0.0", - "proc-log": "^5.0.0", + "json-parse-even-better-errors": "^5.0.0", + "proc-log": "^6.0.0", "semver": "^7.5.3", "validate-npm-package-license": "^3.0.4" }, "devDependencies": { - "@npmcli/eslint-config": "^5.1.0", - "@npmcli/template-oss": "4.25.0", + "@npmcli/eslint-config": "^6.0.0", + "@npmcli/template-oss": "4.28.0", "tap": "^16.0.1" }, "engines": { @@ -47,7 +47,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.25.0", + "version": "4.28.0", "publish": "true" }, "tap": { diff --git a/node_modules/@npmcli/promise-spawn/lib/escape.js b/node_modules/@npmcli/promise-spawn/lib/escape.js index 9aca8bde70a6e..5fab00210f26c 100644 --- a/node_modules/@npmcli/promise-spawn/lib/escape.js +++ b/node_modules/@npmcli/promise-spawn/lib/escape.js @@ -1,6 +1,5 @@ 'use strict' -// eslint-disable-next-line max-len // this code adapted from: https://blogs.msdn.microsoft.com/twistylittlepassagesallalike/2011/04/23/everyone-quotes-command-line-arguments-the-wrong-way/ const cmd = (input, doubleEscape) => { if (!input.length) { diff --git a/node_modules/@npmcli/promise-spawn/package.json b/node_modules/@npmcli/promise-spawn/package.json index 1436659a44612..f00ee324355c8 100644 --- a/node_modules/@npmcli/promise-spawn/package.json +++ b/node_modules/@npmcli/promise-spawn/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/promise-spawn", - "version": "8.0.3", + "version": "9.0.1", "files": [ "bin/", "lib/" @@ -32,20 +32,20 @@ ] }, "devDependencies": { - "@npmcli/eslint-config": "^5.0.0", - "@npmcli/template-oss": "4.25.0", + "@npmcli/eslint-config": "^6.0.0", + "@npmcli/template-oss": "4.28.0", "spawk": "^1.7.1", "tap": "^16.0.1" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.25.0", + "version": "4.28.0", "publish": true }, "dependencies": { - "which": "^5.0.0" + "which": "^6.0.0" } } diff --git a/node_modules/@npmcli/redact/package.json b/node_modules/@npmcli/redact/package.json index b5070113b1330..53d0edf50b73d 100644 --- a/node_modules/@npmcli/redact/package.json +++ b/node_modules/@npmcli/redact/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/redact", - "version": "3.2.2", + "version": "4.0.0", "description": "Redact sensitive npm information from output", "main": "lib/index.js", "exports": { @@ -31,7 +31,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.24.3", + "version": "4.27.1", "publish": true }, "tap": { @@ -43,10 +43,10 @@ }, "devDependencies": { "@npmcli/eslint-config": "^5.0.0", - "@npmcli/template-oss": "4.24.3", + "@npmcli/template-oss": "4.27.1", "tap": "^16.3.10" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } } diff --git a/node_modules/@npmcli/run-script/package.json b/node_modules/@npmcli/run-script/package.json index 2873f7cbf91c5..9ddb499084173 100644 --- a/node_modules/@npmcli/run-script/package.json +++ b/node_modules/@npmcli/run-script/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/run-script", - "version": "10.0.0", + "version": "10.0.3", "description": "Run a lifecycle script for a package (descendant of npm-lifecycle)", "author": "GitHub Inc.", "license": "ISC", @@ -15,18 +15,18 @@ "template-oss-apply": "template-oss-apply --force" }, "devDependencies": { - "@npmcli/eslint-config": "^5.0.0", - "@npmcli/template-oss": "4.25.0", + "@npmcli/eslint-config": "^6.0.0", + "@npmcli/template-oss": "4.28.0", "spawk": "^1.8.1", "tap": "^16.0.1" }, "dependencies": { - "@npmcli/node-gyp": "^4.0.0", + "@npmcli/node-gyp": "^5.0.0", "@npmcli/package-json": "^7.0.0", - "@npmcli/promise-spawn": "^8.0.0", - "node-gyp": "^11.0.0", - "proc-log": "^5.0.0", - "which": "^5.0.0" + "@npmcli/promise-spawn": "^9.0.0", + "node-gyp": "^12.1.0", + "proc-log": "^6.0.0", + "which": "^6.0.0" }, "files": [ "bin/", @@ -42,7 +42,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.25.0", + "version": "4.28.0", "publish": "true" }, "tap": { diff --git a/node_modules/@pkgjs/parseargs/LICENSE b/node_modules/@pkgjs/parseargs/LICENSE deleted file mode 100644 index 261eeb9e9f8b2..0000000000000 --- a/node_modules/@pkgjs/parseargs/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/node_modules/@pkgjs/parseargs/examples/is-default-value.js b/node_modules/@pkgjs/parseargs/examples/is-default-value.js deleted file mode 100644 index 0a67972b71d13..0000000000000 --- a/node_modules/@pkgjs/parseargs/examples/is-default-value.js +++ /dev/null @@ -1,25 +0,0 @@ -'use strict'; - -// This example shows how to understand if a default value is used or not. - -// 1. const { parseArgs } = require('node:util'); // from node -// 2. const { parseArgs } = require('@pkgjs/parseargs'); // from package -const { parseArgs } = require('..'); // in repo - -const options = { - file: { short: 'f', type: 'string', default: 'FOO' }, -}; - -const { values, tokens } = parseArgs({ options, tokens: true }); - -const isFileDefault = !tokens.some((token) => token.kind === 'option' && - token.name === 'file' -); - -console.log(values); -console.log(`Is the file option [${values.file}] the default value? ${isFileDefault}`); - -// Try the following: -// node is-default-value.js -// node is-default-value.js -f FILE -// node is-default-value.js --file FILE diff --git a/node_modules/@pkgjs/parseargs/examples/limit-long-syntax.js b/node_modules/@pkgjs/parseargs/examples/limit-long-syntax.js deleted file mode 100644 index 943e643ee9553..0000000000000 --- a/node_modules/@pkgjs/parseargs/examples/limit-long-syntax.js +++ /dev/null @@ -1,35 +0,0 @@ -'use strict'; - -// This is an example of using tokens to add a custom behaviour. -// -// Require the use of `=` for long options and values by blocking -// the use of space separated values. -// So allow `--foo=bar`, and not allow `--foo bar`. -// -// Note: this is not a common behaviour, most CLIs allow both forms. - -// 1. const { parseArgs } = require('node:util'); // from node -// 2. const { parseArgs } = require('@pkgjs/parseargs'); // from package -const { parseArgs } = require('..'); // in repo - -const options = { - file: { short: 'f', type: 'string' }, - log: { type: 'string' }, -}; - -const { values, tokens } = parseArgs({ options, tokens: true }); - -const badToken = tokens.find((token) => token.kind === 'option' && - token.value != null && - token.rawName.startsWith('--') && - !token.inlineValue -); -if (badToken) { - throw new Error(`Option value for '${badToken.rawName}' must be inline, like '${badToken.rawName}=VALUE'`); -} - -console.log(values); - -// Try the following: -// node limit-long-syntax.js -f FILE --log=LOG -// node limit-long-syntax.js --file FILE diff --git a/node_modules/@pkgjs/parseargs/examples/negate.js b/node_modules/@pkgjs/parseargs/examples/negate.js deleted file mode 100644 index b6634690a4a0c..0000000000000 --- a/node_modules/@pkgjs/parseargs/examples/negate.js +++ /dev/null @@ -1,43 +0,0 @@ -'use strict'; - -// This example is used in the documentation. - -// How might I add my own support for --no-foo? - -// 1. const { parseArgs } = require('node:util'); // from node -// 2. const { parseArgs } = require('@pkgjs/parseargs'); // from package -const { parseArgs } = require('..'); // in repo - -const options = { - 'color': { type: 'boolean' }, - 'no-color': { type: 'boolean' }, - 'logfile': { type: 'string' }, - 'no-logfile': { type: 'boolean' }, -}; -const { values, tokens } = parseArgs({ options, tokens: true }); - -// Reprocess the option tokens and overwrite the returned values. -tokens - .filter((token) => token.kind === 'option') - .forEach((token) => { - if (token.name.startsWith('no-')) { - // Store foo:false for --no-foo - const positiveName = token.name.slice(3); - values[positiveName] = false; - delete values[token.name]; - } else { - // Resave value so last one wins if both --foo and --no-foo. - values[token.name] = token.value ?? true; - } - }); - -const color = values.color; -const logfile = values.logfile ?? 'default.log'; - -console.log({ logfile, color }); - -// Try the following: -// node negate.js -// node negate.js --no-logfile --no-color -// negate.js --logfile=test.log --color -// node negate.js --no-logfile --logfile=test.log --color --no-color diff --git a/node_modules/@pkgjs/parseargs/examples/no-repeated-options.js b/node_modules/@pkgjs/parseargs/examples/no-repeated-options.js deleted file mode 100644 index 0c324688af030..0000000000000 --- a/node_modules/@pkgjs/parseargs/examples/no-repeated-options.js +++ /dev/null @@ -1,31 +0,0 @@ -'use strict'; - -// This is an example of using tokens to add a custom behaviour. -// -// Throw an error if an option is used more than once. - -// 1. const { parseArgs } = require('node:util'); // from node -// 2. const { parseArgs } = require('@pkgjs/parseargs'); // from package -const { parseArgs } = require('..'); // in repo - -const options = { - ding: { type: 'boolean', short: 'd' }, - beep: { type: 'boolean', short: 'b' } -}; -const { values, tokens } = parseArgs({ options, tokens: true }); - -const seenBefore = new Set(); -tokens.forEach((token) => { - if (token.kind !== 'option') return; - if (seenBefore.has(token.name)) { - throw new Error(`option '${token.name}' used multiple times`); - } - seenBefore.add(token.name); -}); - -console.log(values); - -// Try the following: -// node no-repeated-options --ding --beep -// node no-repeated-options --beep -b -// node no-repeated-options -ddd diff --git a/node_modules/@pkgjs/parseargs/examples/ordered-options.mjs b/node_modules/@pkgjs/parseargs/examples/ordered-options.mjs deleted file mode 100644 index 8ab7367b8bbb1..0000000000000 --- a/node_modules/@pkgjs/parseargs/examples/ordered-options.mjs +++ /dev/null @@ -1,41 +0,0 @@ -// This is an example of using tokens to add a custom behaviour. -// -// This adds a option order check so that --some-unstable-option -// may only be used after --enable-experimental-options -// -// Note: this is not a common behaviour, the order of different options -// does not usually matter. - -import { parseArgs } from '../index.js'; - -function findTokenIndex(tokens, target) { - return tokens.findIndex((token) => token.kind === 'option' && - token.name === target - ); -} - -const experimentalName = 'enable-experimental-options'; -const unstableName = 'some-unstable-option'; - -const options = { - [experimentalName]: { type: 'boolean' }, - [unstableName]: { type: 'boolean' }, -}; - -const { values, tokens } = parseArgs({ options, tokens: true }); - -const experimentalIndex = findTokenIndex(tokens, experimentalName); -const unstableIndex = findTokenIndex(tokens, unstableName); -if (unstableIndex !== -1 && - ((experimentalIndex === -1) || (unstableIndex < experimentalIndex))) { - throw new Error(`'--${experimentalName}' must be specified before '--${unstableName}'`); -} - -console.log(values); - -/* eslint-disable max-len */ -// Try the following: -// node ordered-options.mjs -// node ordered-options.mjs --some-unstable-option -// node ordered-options.mjs --some-unstable-option --enable-experimental-options -// node ordered-options.mjs --enable-experimental-options --some-unstable-option diff --git a/node_modules/@pkgjs/parseargs/examples/simple-hard-coded.js b/node_modules/@pkgjs/parseargs/examples/simple-hard-coded.js deleted file mode 100644 index eff04c2a60fa2..0000000000000 --- a/node_modules/@pkgjs/parseargs/examples/simple-hard-coded.js +++ /dev/null @@ -1,26 +0,0 @@ -'use strict'; - -// This example is used in the documentation. - -// 1. const { parseArgs } = require('node:util'); // from node -// 2. const { parseArgs } = require('@pkgjs/parseargs'); // from package -const { parseArgs } = require('..'); // in repo - -const args = ['-f', '--bar', 'b']; -const options = { - foo: { - type: 'boolean', - short: 'f' - }, - bar: { - type: 'string' - } -}; -const { - values, - positionals -} = parseArgs({ args, options }); -console.log(values, positionals); - -// Try the following: -// node simple-hard-coded.js diff --git a/node_modules/@pkgjs/parseargs/index.js b/node_modules/@pkgjs/parseargs/index.js deleted file mode 100644 index b1004c7b72f27..0000000000000 --- a/node_modules/@pkgjs/parseargs/index.js +++ /dev/null @@ -1,396 +0,0 @@ -'use strict'; - -const { - ArrayPrototypeForEach, - ArrayPrototypeIncludes, - ArrayPrototypeMap, - ArrayPrototypePush, - ArrayPrototypePushApply, - ArrayPrototypeShift, - ArrayPrototypeSlice, - ArrayPrototypeUnshiftApply, - ObjectEntries, - ObjectPrototypeHasOwnProperty: ObjectHasOwn, - StringPrototypeCharAt, - StringPrototypeIndexOf, - StringPrototypeSlice, - StringPrototypeStartsWith, -} = require('./internal/primordials'); - -const { - validateArray, - validateBoolean, - validateBooleanArray, - validateObject, - validateString, - validateStringArray, - validateUnion, -} = require('./internal/validators'); - -const { - kEmptyObject, -} = require('./internal/util'); - -const { - findLongOptionForShort, - isLoneLongOption, - isLoneShortOption, - isLongOptionAndValue, - isOptionValue, - isOptionLikeValue, - isShortOptionAndValue, - isShortOptionGroup, - useDefaultValueOption, - objectGetOwn, - optionsGetOwn, -} = require('./utils'); - -const { - codes: { - ERR_INVALID_ARG_VALUE, - ERR_PARSE_ARGS_INVALID_OPTION_VALUE, - ERR_PARSE_ARGS_UNKNOWN_OPTION, - ERR_PARSE_ARGS_UNEXPECTED_POSITIONAL, - }, -} = require('./internal/errors'); - -function getMainArgs() { - // Work out where to slice process.argv for user supplied arguments. - - // Check node options for scenarios where user CLI args follow executable. - const execArgv = process.execArgv; - if (ArrayPrototypeIncludes(execArgv, '-e') || - ArrayPrototypeIncludes(execArgv, '--eval') || - ArrayPrototypeIncludes(execArgv, '-p') || - ArrayPrototypeIncludes(execArgv, '--print')) { - return ArrayPrototypeSlice(process.argv, 1); - } - - // Normally first two arguments are executable and script, then CLI arguments - return ArrayPrototypeSlice(process.argv, 2); -} - -/** - * In strict mode, throw for possible usage errors like --foo --bar - * - * @param {object} token - from tokens as available from parseArgs - */ -function checkOptionLikeValue(token) { - if (!token.inlineValue && isOptionLikeValue(token.value)) { - // Only show short example if user used short option. - const example = StringPrototypeStartsWith(token.rawName, '--') ? - `'${token.rawName}=-XYZ'` : - `'--${token.name}=-XYZ' or '${token.rawName}-XYZ'`; - const errorMessage = `Option '${token.rawName}' argument is ambiguous. -Did you forget to specify the option argument for '${token.rawName}'? -To specify an option argument starting with a dash use ${example}.`; - throw new ERR_PARSE_ARGS_INVALID_OPTION_VALUE(errorMessage); - } -} - -/** - * In strict mode, throw for usage errors. - * - * @param {object} config - from config passed to parseArgs - * @param {object} token - from tokens as available from parseArgs - */ -function checkOptionUsage(config, token) { - if (!ObjectHasOwn(config.options, token.name)) { - throw new ERR_PARSE_ARGS_UNKNOWN_OPTION( - token.rawName, config.allowPositionals); - } - - const short = optionsGetOwn(config.options, token.name, 'short'); - const shortAndLong = `${short ? `-${short}, ` : ''}--${token.name}`; - const type = optionsGetOwn(config.options, token.name, 'type'); - if (type === 'string' && typeof token.value !== 'string') { - throw new ERR_PARSE_ARGS_INVALID_OPTION_VALUE(`Option '${shortAndLong} ' argument missing`); - } - // (Idiomatic test for undefined||null, expecting undefined.) - if (type === 'boolean' && token.value != null) { - throw new ERR_PARSE_ARGS_INVALID_OPTION_VALUE(`Option '${shortAndLong}' does not take an argument`); - } -} - - -/** - * Store the option value in `values`. - * - * @param {string} longOption - long option name e.g. 'foo' - * @param {string|undefined} optionValue - value from user args - * @param {object} options - option configs, from parseArgs({ options }) - * @param {object} values - option values returned in `values` by parseArgs - */ -function storeOption(longOption, optionValue, options, values) { - if (longOption === '__proto__') { - return; // No. Just no. - } - - // We store based on the option value rather than option type, - // preserving the users intent for author to deal with. - const newValue = optionValue ?? true; - if (optionsGetOwn(options, longOption, 'multiple')) { - // Always store value in array, including for boolean. - // values[longOption] starts out not present, - // first value is added as new array [newValue], - // subsequent values are pushed to existing array. - // (note: values has null prototype, so simpler usage) - if (values[longOption]) { - ArrayPrototypePush(values[longOption], newValue); - } else { - values[longOption] = [newValue]; - } - } else { - values[longOption] = newValue; - } -} - -/** - * Store the default option value in `values`. - * - * @param {string} longOption - long option name e.g. 'foo' - * @param {string - * | boolean - * | string[] - * | boolean[]} optionValue - default value from option config - * @param {object} values - option values returned in `values` by parseArgs - */ -function storeDefaultOption(longOption, optionValue, values) { - if (longOption === '__proto__') { - return; // No. Just no. - } - - values[longOption] = optionValue; -} - -/** - * Process args and turn into identified tokens: - * - option (along with value, if any) - * - positional - * - option-terminator - * - * @param {string[]} args - from parseArgs({ args }) or mainArgs - * @param {object} options - option configs, from parseArgs({ options }) - */ -function argsToTokens(args, options) { - const tokens = []; - let index = -1; - let groupCount = 0; - - const remainingArgs = ArrayPrototypeSlice(args); - while (remainingArgs.length > 0) { - const arg = ArrayPrototypeShift(remainingArgs); - const nextArg = remainingArgs[0]; - if (groupCount > 0) - groupCount--; - else - index++; - - // Check if `arg` is an options terminator. - // Guideline 10 in https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap12.html - if (arg === '--') { - // Everything after a bare '--' is considered a positional argument. - ArrayPrototypePush(tokens, { kind: 'option-terminator', index }); - ArrayPrototypePushApply( - tokens, ArrayPrototypeMap(remainingArgs, (arg) => { - return { kind: 'positional', index: ++index, value: arg }; - }) - ); - break; // Finished processing args, leave while loop. - } - - if (isLoneShortOption(arg)) { - // e.g. '-f' - const shortOption = StringPrototypeCharAt(arg, 1); - const longOption = findLongOptionForShort(shortOption, options); - let value; - let inlineValue; - if (optionsGetOwn(options, longOption, 'type') === 'string' && - isOptionValue(nextArg)) { - // e.g. '-f', 'bar' - value = ArrayPrototypeShift(remainingArgs); - inlineValue = false; - } - ArrayPrototypePush( - tokens, - { kind: 'option', name: longOption, rawName: arg, - index, value, inlineValue }); - if (value != null) ++index; - continue; - } - - if (isShortOptionGroup(arg, options)) { - // Expand -fXzy to -f -X -z -y - const expanded = []; - for (let index = 1; index < arg.length; index++) { - const shortOption = StringPrototypeCharAt(arg, index); - const longOption = findLongOptionForShort(shortOption, options); - if (optionsGetOwn(options, longOption, 'type') !== 'string' || - index === arg.length - 1) { - // Boolean option, or last short in group. Well formed. - ArrayPrototypePush(expanded, `-${shortOption}`); - } else { - // String option in middle. Yuck. - // Expand -abfFILE to -a -b -fFILE - ArrayPrototypePush(expanded, `-${StringPrototypeSlice(arg, index)}`); - break; // finished short group - } - } - ArrayPrototypeUnshiftApply(remainingArgs, expanded); - groupCount = expanded.length; - continue; - } - - if (isShortOptionAndValue(arg, options)) { - // e.g. -fFILE - const shortOption = StringPrototypeCharAt(arg, 1); - const longOption = findLongOptionForShort(shortOption, options); - const value = StringPrototypeSlice(arg, 2); - ArrayPrototypePush( - tokens, - { kind: 'option', name: longOption, rawName: `-${shortOption}`, - index, value, inlineValue: true }); - continue; - } - - if (isLoneLongOption(arg)) { - // e.g. '--foo' - const longOption = StringPrototypeSlice(arg, 2); - let value; - let inlineValue; - if (optionsGetOwn(options, longOption, 'type') === 'string' && - isOptionValue(nextArg)) { - // e.g. '--foo', 'bar' - value = ArrayPrototypeShift(remainingArgs); - inlineValue = false; - } - ArrayPrototypePush( - tokens, - { kind: 'option', name: longOption, rawName: arg, - index, value, inlineValue }); - if (value != null) ++index; - continue; - } - - if (isLongOptionAndValue(arg)) { - // e.g. --foo=bar - const equalIndex = StringPrototypeIndexOf(arg, '='); - const longOption = StringPrototypeSlice(arg, 2, equalIndex); - const value = StringPrototypeSlice(arg, equalIndex + 1); - ArrayPrototypePush( - tokens, - { kind: 'option', name: longOption, rawName: `--${longOption}`, - index, value, inlineValue: true }); - continue; - } - - ArrayPrototypePush(tokens, { kind: 'positional', index, value: arg }); - } - - return tokens; -} - -const parseArgs = (config = kEmptyObject) => { - const args = objectGetOwn(config, 'args') ?? getMainArgs(); - const strict = objectGetOwn(config, 'strict') ?? true; - const allowPositionals = objectGetOwn(config, 'allowPositionals') ?? !strict; - const returnTokens = objectGetOwn(config, 'tokens') ?? false; - const options = objectGetOwn(config, 'options') ?? { __proto__: null }; - // Bundle these up for passing to strict-mode checks. - const parseConfig = { args, strict, options, allowPositionals }; - - // Validate input configuration. - validateArray(args, 'args'); - validateBoolean(strict, 'strict'); - validateBoolean(allowPositionals, 'allowPositionals'); - validateBoolean(returnTokens, 'tokens'); - validateObject(options, 'options'); - ArrayPrototypeForEach( - ObjectEntries(options), - ({ 0: longOption, 1: optionConfig }) => { - validateObject(optionConfig, `options.${longOption}`); - - // type is required - const optionType = objectGetOwn(optionConfig, 'type'); - validateUnion(optionType, `options.${longOption}.type`, ['string', 'boolean']); - - if (ObjectHasOwn(optionConfig, 'short')) { - const shortOption = optionConfig.short; - validateString(shortOption, `options.${longOption}.short`); - if (shortOption.length !== 1) { - throw new ERR_INVALID_ARG_VALUE( - `options.${longOption}.short`, - shortOption, - 'must be a single character' - ); - } - } - - const multipleOption = objectGetOwn(optionConfig, 'multiple'); - if (ObjectHasOwn(optionConfig, 'multiple')) { - validateBoolean(multipleOption, `options.${longOption}.multiple`); - } - - const defaultValue = objectGetOwn(optionConfig, 'default'); - if (defaultValue !== undefined) { - let validator; - switch (optionType) { - case 'string': - validator = multipleOption ? validateStringArray : validateString; - break; - - case 'boolean': - validator = multipleOption ? validateBooleanArray : validateBoolean; - break; - } - validator(defaultValue, `options.${longOption}.default`); - } - } - ); - - // Phase 1: identify tokens - const tokens = argsToTokens(args, options); - - // Phase 2: process tokens into parsed option values and positionals - const result = { - values: { __proto__: null }, - positionals: [], - }; - if (returnTokens) { - result.tokens = tokens; - } - ArrayPrototypeForEach(tokens, (token) => { - if (token.kind === 'option') { - if (strict) { - checkOptionUsage(parseConfig, token); - checkOptionLikeValue(token); - } - storeOption(token.name, token.value, options, result.values); - } else if (token.kind === 'positional') { - if (!allowPositionals) { - throw new ERR_PARSE_ARGS_UNEXPECTED_POSITIONAL(token.value); - } - ArrayPrototypePush(result.positionals, token.value); - } - }); - - // Phase 3: fill in default values for missing args - ArrayPrototypeForEach(ObjectEntries(options), ({ 0: longOption, - 1: optionConfig }) => { - const mustSetDefault = useDefaultValueOption(longOption, - optionConfig, - result.values); - if (mustSetDefault) { - storeDefaultOption(longOption, - objectGetOwn(optionConfig, 'default'), - result.values); - } - }); - - - return result; -}; - -module.exports = { - parseArgs, -}; diff --git a/node_modules/@pkgjs/parseargs/internal/errors.js b/node_modules/@pkgjs/parseargs/internal/errors.js deleted file mode 100644 index e1b237b5b1639..0000000000000 --- a/node_modules/@pkgjs/parseargs/internal/errors.js +++ /dev/null @@ -1,47 +0,0 @@ -'use strict'; - -class ERR_INVALID_ARG_TYPE extends TypeError { - constructor(name, expected, actual) { - super(`${name} must be ${expected} got ${actual}`); - this.code = 'ERR_INVALID_ARG_TYPE'; - } -} - -class ERR_INVALID_ARG_VALUE extends TypeError { - constructor(arg1, arg2, expected) { - super(`The property ${arg1} ${expected}. Received '${arg2}'`); - this.code = 'ERR_INVALID_ARG_VALUE'; - } -} - -class ERR_PARSE_ARGS_INVALID_OPTION_VALUE extends Error { - constructor(message) { - super(message); - this.code = 'ERR_PARSE_ARGS_INVALID_OPTION_VALUE'; - } -} - -class ERR_PARSE_ARGS_UNKNOWN_OPTION extends Error { - constructor(option, allowPositionals) { - const suggestDashDash = allowPositionals ? `. To specify a positional argument starting with a '-', place it at the end of the command after '--', as in '-- ${JSON.stringify(option)}` : ''; - super(`Unknown option '${option}'${suggestDashDash}`); - this.code = 'ERR_PARSE_ARGS_UNKNOWN_OPTION'; - } -} - -class ERR_PARSE_ARGS_UNEXPECTED_POSITIONAL extends Error { - constructor(positional) { - super(`Unexpected argument '${positional}'. This command does not take positional arguments`); - this.code = 'ERR_PARSE_ARGS_UNEXPECTED_POSITIONAL'; - } -} - -module.exports = { - codes: { - ERR_INVALID_ARG_TYPE, - ERR_INVALID_ARG_VALUE, - ERR_PARSE_ARGS_INVALID_OPTION_VALUE, - ERR_PARSE_ARGS_UNKNOWN_OPTION, - ERR_PARSE_ARGS_UNEXPECTED_POSITIONAL, - } -}; diff --git a/node_modules/@pkgjs/parseargs/internal/primordials.js b/node_modules/@pkgjs/parseargs/internal/primordials.js deleted file mode 100644 index 63e23ab117a9c..0000000000000 --- a/node_modules/@pkgjs/parseargs/internal/primordials.js +++ /dev/null @@ -1,393 +0,0 @@ -/* -This file is copied from https://github.com/nodejs/node/blob/v14.19.3/lib/internal/per_context/primordials.js -under the following license: - -Copyright Node.js contributors. All rights reserved. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. -*/ - -'use strict'; - -/* eslint-disable node-core/prefer-primordials */ - -// This file subclasses and stores the JS builtins that come from the VM -// so that Node.js's builtin modules do not need to later look these up from -// the global proxy, which can be mutated by users. - -// Use of primordials have sometimes a dramatic impact on performance, please -// benchmark all changes made in performance-sensitive areas of the codebase. -// See: https://github.com/nodejs/node/pull/38248 - -const primordials = {}; - -const { - defineProperty: ReflectDefineProperty, - getOwnPropertyDescriptor: ReflectGetOwnPropertyDescriptor, - ownKeys: ReflectOwnKeys, -} = Reflect; - -// `uncurryThis` is equivalent to `func => Function.prototype.call.bind(func)`. -// It is using `bind.bind(call)` to avoid using `Function.prototype.bind` -// and `Function.prototype.call` after it may have been mutated by users. -const { apply, bind, call } = Function.prototype; -const uncurryThis = bind.bind(call); -primordials.uncurryThis = uncurryThis; - -// `applyBind` is equivalent to `func => Function.prototype.apply.bind(func)`. -// It is using `bind.bind(apply)` to avoid using `Function.prototype.bind` -// and `Function.prototype.apply` after it may have been mutated by users. -const applyBind = bind.bind(apply); -primordials.applyBind = applyBind; - -// Methods that accept a variable number of arguments, and thus it's useful to -// also create `${prefix}${key}Apply`, which uses `Function.prototype.apply`, -// instead of `Function.prototype.call`, and thus doesn't require iterator -// destructuring. -const varargsMethods = [ - // 'ArrayPrototypeConcat' is omitted, because it performs the spread - // on its own for arrays and array-likes with a truthy - // @@isConcatSpreadable symbol property. - 'ArrayOf', - 'ArrayPrototypePush', - 'ArrayPrototypeUnshift', - // 'FunctionPrototypeCall' is omitted, since there's 'ReflectApply' - // and 'FunctionPrototypeApply'. - 'MathHypot', - 'MathMax', - 'MathMin', - 'StringPrototypeConcat', - 'TypedArrayOf', -]; - -function getNewKey(key) { - return typeof key === 'symbol' ? - `Symbol${key.description[7].toUpperCase()}${key.description.slice(8)}` : - `${key[0].toUpperCase()}${key.slice(1)}`; -} - -function copyAccessor(dest, prefix, key, { enumerable, get, set }) { - ReflectDefineProperty(dest, `${prefix}Get${key}`, { - value: uncurryThis(get), - enumerable - }); - if (set !== undefined) { - ReflectDefineProperty(dest, `${prefix}Set${key}`, { - value: uncurryThis(set), - enumerable - }); - } -} - -function copyPropsRenamed(src, dest, prefix) { - for (const key of ReflectOwnKeys(src)) { - const newKey = getNewKey(key); - const desc = ReflectGetOwnPropertyDescriptor(src, key); - if ('get' in desc) { - copyAccessor(dest, prefix, newKey, desc); - } else { - const name = `${prefix}${newKey}`; - ReflectDefineProperty(dest, name, desc); - if (varargsMethods.includes(name)) { - ReflectDefineProperty(dest, `${name}Apply`, { - // `src` is bound as the `this` so that the static `this` points - // to the object it was defined on, - // e.g.: `ArrayOfApply` gets a `this` of `Array`: - value: applyBind(desc.value, src), - }); - } - } - } -} - -function copyPropsRenamedBound(src, dest, prefix) { - for (const key of ReflectOwnKeys(src)) { - const newKey = getNewKey(key); - const desc = ReflectGetOwnPropertyDescriptor(src, key); - if ('get' in desc) { - copyAccessor(dest, prefix, newKey, desc); - } else { - const { value } = desc; - if (typeof value === 'function') { - desc.value = value.bind(src); - } - - const name = `${prefix}${newKey}`; - ReflectDefineProperty(dest, name, desc); - if (varargsMethods.includes(name)) { - ReflectDefineProperty(dest, `${name}Apply`, { - value: applyBind(value, src), - }); - } - } - } -} - -function copyPrototype(src, dest, prefix) { - for (const key of ReflectOwnKeys(src)) { - const newKey = getNewKey(key); - const desc = ReflectGetOwnPropertyDescriptor(src, key); - if ('get' in desc) { - copyAccessor(dest, prefix, newKey, desc); - } else { - const { value } = desc; - if (typeof value === 'function') { - desc.value = uncurryThis(value); - } - - const name = `${prefix}${newKey}`; - ReflectDefineProperty(dest, name, desc); - if (varargsMethods.includes(name)) { - ReflectDefineProperty(dest, `${name}Apply`, { - value: applyBind(value), - }); - } - } - } -} - -// Create copies of configurable value properties of the global object -[ - 'Proxy', - 'globalThis', -].forEach((name) => { - // eslint-disable-next-line no-restricted-globals - primordials[name] = globalThis[name]; -}); - -// Create copies of URI handling functions -[ - decodeURI, - decodeURIComponent, - encodeURI, - encodeURIComponent, -].forEach((fn) => { - primordials[fn.name] = fn; -}); - -// Create copies of the namespace objects -[ - 'JSON', - 'Math', - 'Proxy', - 'Reflect', -].forEach((name) => { - // eslint-disable-next-line no-restricted-globals - copyPropsRenamed(global[name], primordials, name); -}); - -// Create copies of intrinsic objects -[ - 'Array', - 'ArrayBuffer', - 'BigInt', - 'BigInt64Array', - 'BigUint64Array', - 'Boolean', - 'DataView', - 'Date', - 'Error', - 'EvalError', - 'Float32Array', - 'Float64Array', - 'Function', - 'Int16Array', - 'Int32Array', - 'Int8Array', - 'Map', - 'Number', - 'Object', - 'RangeError', - 'ReferenceError', - 'RegExp', - 'Set', - 'String', - 'Symbol', - 'SyntaxError', - 'TypeError', - 'URIError', - 'Uint16Array', - 'Uint32Array', - 'Uint8Array', - 'Uint8ClampedArray', - 'WeakMap', - 'WeakSet', -].forEach((name) => { - // eslint-disable-next-line no-restricted-globals - const original = global[name]; - primordials[name] = original; - copyPropsRenamed(original, primordials, name); - copyPrototype(original.prototype, primordials, `${name}Prototype`); -}); - -// Create copies of intrinsic objects that require a valid `this` to call -// static methods. -// Refs: https://www.ecma-international.org/ecma-262/#sec-promise.all -[ - 'Promise', -].forEach((name) => { - // eslint-disable-next-line no-restricted-globals - const original = global[name]; - primordials[name] = original; - copyPropsRenamedBound(original, primordials, name); - copyPrototype(original.prototype, primordials, `${name}Prototype`); -}); - -// Create copies of abstract intrinsic objects that are not directly exposed -// on the global object. -// Refs: https://tc39.es/ecma262/#sec-%typedarray%-intrinsic-object -[ - { name: 'TypedArray', original: Reflect.getPrototypeOf(Uint8Array) }, - { name: 'ArrayIterator', original: { - prototype: Reflect.getPrototypeOf(Array.prototype[Symbol.iterator]()), - } }, - { name: 'StringIterator', original: { - prototype: Reflect.getPrototypeOf(String.prototype[Symbol.iterator]()), - } }, -].forEach(({ name, original }) => { - primordials[name] = original; - // The static %TypedArray% methods require a valid `this`, but can't be bound, - // as they need a subclass constructor as the receiver: - copyPrototype(original, primordials, name); - copyPrototype(original.prototype, primordials, `${name}Prototype`); -}); - -/* eslint-enable node-core/prefer-primordials */ - -const { - ArrayPrototypeForEach, - FunctionPrototypeCall, - Map, - ObjectFreeze, - ObjectSetPrototypeOf, - Set, - SymbolIterator, - WeakMap, - WeakSet, -} = primordials; - -// Because these functions are used by `makeSafe`, which is exposed -// on the `primordials` object, it's important to use const references -// to the primordials that they use: -const createSafeIterator = (factory, next) => { - class SafeIterator { - constructor(iterable) { - this._iterator = factory(iterable); - } - next() { - return next(this._iterator); - } - [SymbolIterator]() { - return this; - } - } - ObjectSetPrototypeOf(SafeIterator.prototype, null); - ObjectFreeze(SafeIterator.prototype); - ObjectFreeze(SafeIterator); - return SafeIterator; -}; - -primordials.SafeArrayIterator = createSafeIterator( - primordials.ArrayPrototypeSymbolIterator, - primordials.ArrayIteratorPrototypeNext -); -primordials.SafeStringIterator = createSafeIterator( - primordials.StringPrototypeSymbolIterator, - primordials.StringIteratorPrototypeNext -); - -const copyProps = (src, dest) => { - ArrayPrototypeForEach(ReflectOwnKeys(src), (key) => { - if (!ReflectGetOwnPropertyDescriptor(dest, key)) { - ReflectDefineProperty( - dest, - key, - ReflectGetOwnPropertyDescriptor(src, key)); - } - }); -}; - -const makeSafe = (unsafe, safe) => { - if (SymbolIterator in unsafe.prototype) { - const dummy = new unsafe(); - let next; // We can reuse the same `next` method. - - ArrayPrototypeForEach(ReflectOwnKeys(unsafe.prototype), (key) => { - if (!ReflectGetOwnPropertyDescriptor(safe.prototype, key)) { - const desc = ReflectGetOwnPropertyDescriptor(unsafe.prototype, key); - if ( - typeof desc.value === 'function' && - desc.value.length === 0 && - SymbolIterator in (FunctionPrototypeCall(desc.value, dummy) ?? {}) - ) { - const createIterator = uncurryThis(desc.value); - next = next ?? uncurryThis(createIterator(dummy).next); - const SafeIterator = createSafeIterator(createIterator, next); - desc.value = function() { - return new SafeIterator(this); - }; - } - ReflectDefineProperty(safe.prototype, key, desc); - } - }); - } else { - copyProps(unsafe.prototype, safe.prototype); - } - copyProps(unsafe, safe); - - ObjectSetPrototypeOf(safe.prototype, null); - ObjectFreeze(safe.prototype); - ObjectFreeze(safe); - return safe; -}; -primordials.makeSafe = makeSafe; - -// Subclass the constructors because we need to use their prototype -// methods later. -// Defining the `constructor` is necessary here to avoid the default -// constructor which uses the user-mutable `%ArrayIteratorPrototype%.next`. -primordials.SafeMap = makeSafe( - Map, - class SafeMap extends Map { - constructor(i) { super(i); } // eslint-disable-line no-useless-constructor - } -); -primordials.SafeWeakMap = makeSafe( - WeakMap, - class SafeWeakMap extends WeakMap { - constructor(i) { super(i); } // eslint-disable-line no-useless-constructor - } -); -primordials.SafeSet = makeSafe( - Set, - class SafeSet extends Set { - constructor(i) { super(i); } // eslint-disable-line no-useless-constructor - } -); -primordials.SafeWeakSet = makeSafe( - WeakSet, - class SafeWeakSet extends WeakSet { - constructor(i) { super(i); } // eslint-disable-line no-useless-constructor - } -); - -ObjectSetPrototypeOf(primordials, null); -ObjectFreeze(primordials); - -module.exports = primordials; diff --git a/node_modules/@pkgjs/parseargs/internal/util.js b/node_modules/@pkgjs/parseargs/internal/util.js deleted file mode 100644 index b9b8fe5b8d7c0..0000000000000 --- a/node_modules/@pkgjs/parseargs/internal/util.js +++ /dev/null @@ -1,14 +0,0 @@ -'use strict'; - -// This is a placeholder for util.js in node.js land. - -const { - ObjectCreate, - ObjectFreeze, -} = require('./primordials'); - -const kEmptyObject = ObjectFreeze(ObjectCreate(null)); - -module.exports = { - kEmptyObject, -}; diff --git a/node_modules/@pkgjs/parseargs/internal/validators.js b/node_modules/@pkgjs/parseargs/internal/validators.js deleted file mode 100644 index b5ac4fb501eff..0000000000000 --- a/node_modules/@pkgjs/parseargs/internal/validators.js +++ /dev/null @@ -1,89 +0,0 @@ -'use strict'; - -// This file is a proxy of the original file located at: -// https://github.com/nodejs/node/blob/main/lib/internal/validators.js -// Every addition or modification to this file must be evaluated -// during the PR review. - -const { - ArrayIsArray, - ArrayPrototypeIncludes, - ArrayPrototypeJoin, -} = require('./primordials'); - -const { - codes: { - ERR_INVALID_ARG_TYPE - } -} = require('./errors'); - -function validateString(value, name) { - if (typeof value !== 'string') { - throw new ERR_INVALID_ARG_TYPE(name, 'String', value); - } -} - -function validateUnion(value, name, union) { - if (!ArrayPrototypeIncludes(union, value)) { - throw new ERR_INVALID_ARG_TYPE(name, `('${ArrayPrototypeJoin(union, '|')}')`, value); - } -} - -function validateBoolean(value, name) { - if (typeof value !== 'boolean') { - throw new ERR_INVALID_ARG_TYPE(name, 'Boolean', value); - } -} - -function validateArray(value, name) { - if (!ArrayIsArray(value)) { - throw new ERR_INVALID_ARG_TYPE(name, 'Array', value); - } -} - -function validateStringArray(value, name) { - validateArray(value, name); - for (let i = 0; i < value.length; i++) { - validateString(value[i], `${name}[${i}]`); - } -} - -function validateBooleanArray(value, name) { - validateArray(value, name); - for (let i = 0; i < value.length; i++) { - validateBoolean(value[i], `${name}[${i}]`); - } -} - -/** - * @param {unknown} value - * @param {string} name - * @param {{ - * allowArray?: boolean, - * allowFunction?: boolean, - * nullable?: boolean - * }} [options] - */ -function validateObject(value, name, options) { - const useDefaultOptions = options == null; - const allowArray = useDefaultOptions ? false : options.allowArray; - const allowFunction = useDefaultOptions ? false : options.allowFunction; - const nullable = useDefaultOptions ? false : options.nullable; - if ((!nullable && value === null) || - (!allowArray && ArrayIsArray(value)) || - (typeof value !== 'object' && ( - !allowFunction || typeof value !== 'function' - ))) { - throw new ERR_INVALID_ARG_TYPE(name, 'Object', value); - } -} - -module.exports = { - validateArray, - validateObject, - validateString, - validateStringArray, - validateUnion, - validateBoolean, - validateBooleanArray, -}; diff --git a/node_modules/@pkgjs/parseargs/package.json b/node_modules/@pkgjs/parseargs/package.json deleted file mode 100644 index 0bcc05c0d4a3e..0000000000000 --- a/node_modules/@pkgjs/parseargs/package.json +++ /dev/null @@ -1,36 +0,0 @@ -{ - "name": "@pkgjs/parseargs", - "version": "0.11.0", - "description": "Polyfill of future proposal for `util.parseArgs()`", - "engines": { - "node": ">=14" - }, - "main": "index.js", - "exports": { - ".": "./index.js", - "./package.json": "./package.json" - }, - "scripts": { - "coverage": "c8 --check-coverage tape 'test/*.js'", - "test": "c8 tape 'test/*.js'", - "posttest": "eslint .", - "fix": "npm run posttest -- --fix" - }, - "repository": { - "type": "git", - "url": "git@github.com:pkgjs/parseargs.git" - }, - "keywords": [], - "author": "", - "license": "MIT", - "bugs": { - "url": "https://github.com/pkgjs/parseargs/issues" - }, - "homepage": "https://github.com/pkgjs/parseargs#readme", - "devDependencies": { - "c8": "^7.10.0", - "eslint": "^8.2.0", - "eslint-plugin-node-core": "iansu/eslint-plugin-node-core", - "tape": "^5.2.2" - } -} diff --git a/node_modules/@pkgjs/parseargs/utils.js b/node_modules/@pkgjs/parseargs/utils.js deleted file mode 100644 index d7f420a233924..0000000000000 --- a/node_modules/@pkgjs/parseargs/utils.js +++ /dev/null @@ -1,198 +0,0 @@ -'use strict'; - -const { - ArrayPrototypeFind, - ObjectEntries, - ObjectPrototypeHasOwnProperty: ObjectHasOwn, - StringPrototypeCharAt, - StringPrototypeIncludes, - StringPrototypeStartsWith, -} = require('./internal/primordials'); - -const { - validateObject, -} = require('./internal/validators'); - -// These are internal utilities to make the parsing logic easier to read, and -// add lots of detail for the curious. They are in a separate file to allow -// unit testing, although that is not essential (this could be rolled into -// main file and just tested implicitly via API). -// -// These routines are for internal use, not for export to client. - -/** - * Return the named property, but only if it is an own property. - */ -function objectGetOwn(obj, prop) { - if (ObjectHasOwn(obj, prop)) - return obj[prop]; -} - -/** - * Return the named options property, but only if it is an own property. - */ -function optionsGetOwn(options, longOption, prop) { - if (ObjectHasOwn(options, longOption)) - return objectGetOwn(options[longOption], prop); -} - -/** - * Determines if the argument may be used as an option value. - * @example - * isOptionValue('V') // returns true - * isOptionValue('-v') // returns true (greedy) - * isOptionValue('--foo') // returns true (greedy) - * isOptionValue(undefined) // returns false - */ -function isOptionValue(value) { - if (value == null) return false; - - // Open Group Utility Conventions are that an option-argument - // is the argument after the option, and may start with a dash. - return true; // greedy! -} - -/** - * Detect whether there is possible confusion and user may have omitted - * the option argument, like `--port --verbose` when `port` of type:string. - * In strict mode we throw errors if value is option-like. - */ -function isOptionLikeValue(value) { - if (value == null) return false; - - return value.length > 1 && StringPrototypeCharAt(value, 0) === '-'; -} - -/** - * Determines if `arg` is just a short option. - * @example '-f' - */ -function isLoneShortOption(arg) { - return arg.length === 2 && - StringPrototypeCharAt(arg, 0) === '-' && - StringPrototypeCharAt(arg, 1) !== '-'; -} - -/** - * Determines if `arg` is a lone long option. - * @example - * isLoneLongOption('a') // returns false - * isLoneLongOption('-a') // returns false - * isLoneLongOption('--foo') // returns true - * isLoneLongOption('--foo=bar') // returns false - */ -function isLoneLongOption(arg) { - return arg.length > 2 && - StringPrototypeStartsWith(arg, '--') && - !StringPrototypeIncludes(arg, '=', 3); -} - -/** - * Determines if `arg` is a long option and value in the same argument. - * @example - * isLongOptionAndValue('--foo') // returns false - * isLongOptionAndValue('--foo=bar') // returns true - */ -function isLongOptionAndValue(arg) { - return arg.length > 2 && - StringPrototypeStartsWith(arg, '--') && - StringPrototypeIncludes(arg, '=', 3); -} - -/** - * Determines if `arg` is a short option group. - * - * See Guideline 5 of the [Open Group Utility Conventions](https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap12.html). - * One or more options without option-arguments, followed by at most one - * option that takes an option-argument, should be accepted when grouped - * behind one '-' delimiter. - * @example - * isShortOptionGroup('-a', {}) // returns false - * isShortOptionGroup('-ab', {}) // returns true - * // -fb is an option and a value, not a short option group - * isShortOptionGroup('-fb', { - * options: { f: { type: 'string' } } - * }) // returns false - * isShortOptionGroup('-bf', { - * options: { f: { type: 'string' } } - * }) // returns true - * // -bfb is an edge case, return true and caller sorts it out - * isShortOptionGroup('-bfb', { - * options: { f: { type: 'string' } } - * }) // returns true - */ -function isShortOptionGroup(arg, options) { - if (arg.length <= 2) return false; - if (StringPrototypeCharAt(arg, 0) !== '-') return false; - if (StringPrototypeCharAt(arg, 1) === '-') return false; - - const firstShort = StringPrototypeCharAt(arg, 1); - const longOption = findLongOptionForShort(firstShort, options); - return optionsGetOwn(options, longOption, 'type') !== 'string'; -} - -/** - * Determine if arg is a short string option followed by its value. - * @example - * isShortOptionAndValue('-a', {}); // returns false - * isShortOptionAndValue('-ab', {}); // returns false - * isShortOptionAndValue('-fFILE', { - * options: { foo: { short: 'f', type: 'string' }} - * }) // returns true - */ -function isShortOptionAndValue(arg, options) { - validateObject(options, 'options'); - - if (arg.length <= 2) return false; - if (StringPrototypeCharAt(arg, 0) !== '-') return false; - if (StringPrototypeCharAt(arg, 1) === '-') return false; - - const shortOption = StringPrototypeCharAt(arg, 1); - const longOption = findLongOptionForShort(shortOption, options); - return optionsGetOwn(options, longOption, 'type') === 'string'; -} - -/** - * Find the long option associated with a short option. Looks for a configured - * `short` and returns the short option itself if a long option is not found. - * @example - * findLongOptionForShort('a', {}) // returns 'a' - * findLongOptionForShort('b', { - * options: { bar: { short: 'b' } } - * }) // returns 'bar' - */ -function findLongOptionForShort(shortOption, options) { - validateObject(options, 'options'); - const longOptionEntry = ArrayPrototypeFind( - ObjectEntries(options), - ({ 1: optionConfig }) => objectGetOwn(optionConfig, 'short') === shortOption - ); - return longOptionEntry?.[0] ?? shortOption; -} - -/** - * Check if the given option includes a default value - * and that option has not been set by the input args. - * - * @param {string} longOption - long option name e.g. 'foo' - * @param {object} optionConfig - the option configuration properties - * @param {object} values - option values returned in `values` by parseArgs - */ -function useDefaultValueOption(longOption, optionConfig, values) { - return objectGetOwn(optionConfig, 'default') !== undefined && - values[longOption] === undefined; -} - -module.exports = { - findLongOptionForShort, - isLoneLongOption, - isLoneShortOption, - isLongOptionAndValue, - isOptionValue, - isOptionLikeValue, - isShortOptionAndValue, - isShortOptionGroup, - useDefaultValueOption, - objectGetOwn, - optionsGetOwn, -}; diff --git a/node_modules/tar/LICENSE b/node_modules/@sigstore/sign/node_modules/proc-log/LICENSE similarity index 93% rename from node_modules/tar/LICENSE rename to node_modules/@sigstore/sign/node_modules/proc-log/LICENSE index 19129e315fe59..83837797202b7 100644 --- a/node_modules/tar/LICENSE +++ b/node_modules/@sigstore/sign/node_modules/proc-log/LICENSE @@ -1,6 +1,6 @@ The ISC License -Copyright (c) Isaac Z. Schlueter and Contributors +Copyright (c) GitHub, Inc. Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above diff --git a/node_modules/@sigstore/sign/node_modules/proc-log/lib/index.js b/node_modules/@sigstore/sign/node_modules/proc-log/lib/index.js new file mode 100644 index 0000000000000..86d90861078da --- /dev/null +++ b/node_modules/@sigstore/sign/node_modules/proc-log/lib/index.js @@ -0,0 +1,153 @@ +const META = Symbol('proc-log.meta') +module.exports = { + META: META, + output: { + LEVELS: [ + 'standard', + 'error', + 'buffer', + 'flush', + ], + KEYS: { + standard: 'standard', + error: 'error', + buffer: 'buffer', + flush: 'flush', + }, + standard: function (...args) { + return process.emit('output', 'standard', ...args) + }, + error: function (...args) { + return process.emit('output', 'error', ...args) + }, + buffer: function (...args) { + return process.emit('output', 'buffer', ...args) + }, + flush: function (...args) { + return process.emit('output', 'flush', ...args) + }, + }, + log: { + LEVELS: [ + 'notice', + 'error', + 'warn', + 'info', + 'verbose', + 'http', + 'silly', + 'timing', + 'pause', + 'resume', + ], + KEYS: { + notice: 'notice', + error: 'error', + warn: 'warn', + info: 'info', + verbose: 'verbose', + http: 'http', + silly: 'silly', + timing: 'timing', + pause: 'pause', + resume: 'resume', + }, + error: function (...args) { + return process.emit('log', 'error', ...args) + }, + notice: function (...args) { + return process.emit('log', 'notice', ...args) + }, + warn: function (...args) { + return process.emit('log', 'warn', ...args) + }, + info: function (...args) { + return process.emit('log', 'info', ...args) + }, + verbose: function (...args) { + return process.emit('log', 'verbose', ...args) + }, + http: function (...args) { + return process.emit('log', 'http', ...args) + }, + silly: function (...args) { + return process.emit('log', 'silly', ...args) + }, + timing: function (...args) { + return process.emit('log', 'timing', ...args) + }, + pause: function () { + return process.emit('log', 'pause') + }, + resume: function () { + return process.emit('log', 'resume') + }, + }, + time: { + LEVELS: [ + 'start', + 'end', + ], + KEYS: { + start: 'start', + end: 'end', + }, + start: function (name, fn) { + process.emit('time', 'start', name) + function end () { + return process.emit('time', 'end', name) + } + if (typeof fn === 'function') { + const res = fn() + if (res && res.finally) { + return res.finally(end) + } + end() + return res + } + return end + }, + end: function (name) { + return process.emit('time', 'end', name) + }, + }, + input: { + LEVELS: [ + 'start', + 'end', + 'read', + ], + KEYS: { + start: 'start', + end: 'end', + read: 'read', + }, + start: function (fn) { + process.emit('input', 'start') + function end () { + return process.emit('input', 'end') + } + if (typeof fn === 'function') { + const res = fn() + if (res && res.finally) { + return res.finally(end) + } + end() + return res + } + return end + }, + end: function () { + return process.emit('input', 'end') + }, + read: function (...args) { + let resolve, reject + const promise = new Promise((_resolve, _reject) => { + resolve = _resolve + reject = _reject + }) + process.emit('input', 'read', resolve, reject, ...args) + return promise + }, + }, +} diff --git a/node_modules/@sigstore/sign/node_modules/proc-log/package.json b/node_modules/@sigstore/sign/node_modules/proc-log/package.json new file mode 100644 index 0000000000000..957209d3954e5 --- /dev/null +++ b/node_modules/@sigstore/sign/node_modules/proc-log/package.json @@ -0,0 +1,46 @@ +{ + "name": "proc-log", + "version": "5.0.0", + "files": [ + "bin/", + "lib/" + ], + "main": "lib/index.js", + "description": "just emit 'log' events on the process object", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/proc-log.git" + }, + "author": "GitHub Inc.", + "license": "ISC", + "scripts": { + "test": "tap", + "snap": "tap", + "posttest": "npm run lint", + "postsnap": "eslint index.js test/*.js --fix", + "lint": "npm run eslint", + "postlint": "template-oss-check", + "lintfix": "npm run eslint -- --fix", + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" + }, + "devDependencies": { + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", + "tap": "^16.0.1" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.23.3", + "publish": true + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/node_modules/abbrev/package.json b/node_modules/abbrev/package.json index 077d4bccd0e69..f17aaccfa56ad 100644 --- a/node_modules/abbrev/package.json +++ b/node_modules/abbrev/package.json @@ -1,18 +1,19 @@ { "name": "abbrev", - "version": "3.0.1", + "version": "4.0.0", "description": "Like ruby's abbrev module, but in js", "author": "GitHub Inc.", "main": "lib/index.js", "scripts": { - "test": "tap", + "test": "node --test", "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", "lintfix": "npm run eslint -- --fix", - "snap": "tap", + "snap": "node --test --test-update-snapshots", "posttest": "npm run lint", - "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "test:cover": "node --test --experimental-test-coverage --test-timeout=3000 --test-coverage-lines=100 --test-coverage-functions=100 --test-coverage-branches=100" }, "repository": { "type": "git", @@ -21,25 +22,20 @@ "license": "ISC", "devDependencies": { "@npmcli/eslint-config": "^5.0.0", - "@npmcli/template-oss": "4.24.3", - "tap": "^16.3.0" - }, - "tap": { - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] + "@npmcli/template-oss": "4.26.1" }, "files": [ "bin/", "lib/" ], "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.24.3", - "publish": true + "version": "4.26.1", + "publish": true, + "testRunner": "node:test", + "latestCiVersion": 24 } } diff --git a/node_modules/bin-links/package.json b/node_modules/bin-links/package.json index 22858d660ae0b..23f52cfc96ec4 100644 --- a/node_modules/bin-links/package.json +++ b/node_modules/bin-links/package.json @@ -1,6 +1,6 @@ { "name": "bin-links", - "version": "5.0.0", + "version": "6.0.0", "description": "JavaScript package binary linker", "main": "./lib/index.js", "scripts": { @@ -24,15 +24,15 @@ ], "license": "ISC", "dependencies": { - "cmd-shim": "^7.0.0", - "npm-normalize-package-bin": "^4.0.0", - "proc-log": "^5.0.0", - "read-cmd-shim": "^5.0.0", - "write-file-atomic": "^6.0.0" + "cmd-shim": "^8.0.0", + "npm-normalize-package-bin": "^5.0.0", + "proc-log": "^6.0.0", + "read-cmd-shim": "^6.0.0", + "write-file-atomic": "^7.0.0" }, "devDependencies": { "@npmcli/eslint-config": "^5.0.0", - "@npmcli/template-oss": "4.23.3", + "@npmcli/template-oss": "4.27.1", "require-inject": "^1.4.4", "tap": "^16.0.1" }, @@ -49,13 +49,13 @@ "lib/" ], "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" }, "author": "GitHub Inc.", "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "windowsCI": false, - "version": "4.23.3", + "version": "4.27.1", "publish": true } } diff --git a/node_modules/node-gyp/node_modules/cacache/LICENSE.md b/node_modules/cacache/node_modules/ssri/LICENSE.md similarity index 96% rename from node_modules/node-gyp/node_modules/cacache/LICENSE.md rename to node_modules/cacache/node_modules/ssri/LICENSE.md index 8d28acf866d93..e335388869f50 100644 --- a/node_modules/node-gyp/node_modules/cacache/LICENSE.md +++ b/node_modules/cacache/node_modules/ssri/LICENSE.md @@ -1,6 +1,6 @@ ISC License -Copyright (c) npm, Inc. +Copyright 2021 (c) npm, Inc. Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the diff --git a/node_modules/cacache/node_modules/ssri/lib/index.js b/node_modules/cacache/node_modules/ssri/lib/index.js new file mode 100644 index 0000000000000..7d749ed480fb9 --- /dev/null +++ b/node_modules/cacache/node_modules/ssri/lib/index.js @@ -0,0 +1,580 @@ +'use strict' + +const crypto = require('crypto') +const { Minipass } = require('minipass') + +const SPEC_ALGORITHMS = ['sha512', 'sha384', 'sha256'] +const DEFAULT_ALGORITHMS = ['sha512'] + +// TODO: this should really be a hardcoded list of algorithms we support, +// rather than [a-z0-9]. +const BASE64_REGEX = /^[a-z0-9+/]+(?:=?=?)$/i +const SRI_REGEX = /^([a-z0-9]+)-([^?]+)([?\S*]*)$/ +const STRICT_SRI_REGEX = /^([a-z0-9]+)-([A-Za-z0-9+/=]{44,88})(\?[\x21-\x7E]*)?$/ +const VCHAR_REGEX = /^[\x21-\x7E]+$/ + +const getOptString = options => options?.length ? `?${options.join('?')}` : '' + +class IntegrityStream extends Minipass { + #emittedIntegrity + #emittedSize + #emittedVerified + + constructor (opts) { + super() + this.size = 0 + this.opts = opts + + // may be overridden later, but set now for class consistency + this.#getOptions() + + // options used for calculating stream. can't be changed. + if (opts?.algorithms) { + this.algorithms = [...opts.algorithms] + } else { + this.algorithms = [...DEFAULT_ALGORITHMS] + } + if (this.algorithm !== null && !this.algorithms.includes(this.algorithm)) { + this.algorithms.push(this.algorithm) + } + + this.hashes = this.algorithms.map(crypto.createHash) + } + + #getOptions () { + // For verification + this.sri = this.opts?.integrity ? parse(this.opts?.integrity, this.opts) : null + this.expectedSize = this.opts?.size + + if (!this.sri) { + this.algorithm = null + } else if (this.sri.isHash) { + this.goodSri = true + this.algorithm = this.sri.algorithm + } else { + this.goodSri = !this.sri.isEmpty() + this.algorithm = this.sri.pickAlgorithm(this.opts) + } + + this.digests = this.goodSri ? this.sri[this.algorithm] : null + this.optString = getOptString(this.opts?.options) + } + + on (ev, handler) { + if (ev === 'size' && this.#emittedSize) { + return handler(this.#emittedSize) + } + + if (ev === 'integrity' && this.#emittedIntegrity) { + return handler(this.#emittedIntegrity) + } + + if (ev === 'verified' && this.#emittedVerified) { + return handler(this.#emittedVerified) + } + + return super.on(ev, handler) + } + + emit (ev, data) { + if (ev === 'end') { + this.#onEnd() + } + return super.emit(ev, data) + } + + write (data) { + this.size += data.length + this.hashes.forEach(h => h.update(data)) + return super.write(data) + } + + #onEnd () { + if (!this.goodSri) { + this.#getOptions() + } + const newSri = parse(this.hashes.map((h, i) => { + return `${this.algorithms[i]}-${h.digest('base64')}${this.optString}` + }).join(' '), this.opts) + // Integrity verification mode + const match = this.goodSri && newSri.match(this.sri, this.opts) + if (typeof this.expectedSize === 'number' && this.size !== this.expectedSize) { + /* eslint-disable-next-line max-len */ + const err = new Error(`stream size mismatch when checking ${this.sri}.\n Wanted: ${this.expectedSize}\n Found: ${this.size}`) + err.code = 'EBADSIZE' + err.found = this.size + err.expected = this.expectedSize + err.sri = this.sri + this.emit('error', err) + } else if (this.sri && !match) { + /* eslint-disable-next-line max-len */ + const err = new Error(`${this.sri} integrity checksum failed when using ${this.algorithm}: wanted ${this.digests} but got ${newSri}. (${this.size} bytes)`) + err.code = 'EINTEGRITY' + err.found = newSri + err.expected = this.digests + err.algorithm = this.algorithm + err.sri = this.sri + this.emit('error', err) + } else { + this.#emittedSize = this.size + this.emit('size', this.size) + this.#emittedIntegrity = newSri + this.emit('integrity', newSri) + if (match) { + this.#emittedVerified = match + this.emit('verified', match) + } + } + } +} + +class Hash { + get isHash () { + return true + } + + constructor (hash, opts) { + const strict = opts?.strict + this.source = hash.trim() + + // set default values so that we make V8 happy to + // always see a familiar object template. + this.digest = '' + this.algorithm = '' + this.options = [] + + // 3.1. Integrity metadata (called "Hash" by ssri) + // https://w3c.github.io/webappsec-subresource-integrity/#integrity-metadata-description + const match = this.source.match( + strict + ? STRICT_SRI_REGEX + : SRI_REGEX + ) + if (!match) { + return + } + if (strict && !SPEC_ALGORITHMS.includes(match[1])) { + return + } + this.algorithm = match[1] + this.digest = match[2] + + const rawOpts = match[3] + if (rawOpts) { + this.options = rawOpts.slice(1).split('?') + } + } + + hexDigest () { + return this.digest && Buffer.from(this.digest, 'base64').toString('hex') + } + + toJSON () { + return this.toString() + } + + match (integrity, opts) { + const other = parse(integrity, opts) + if (!other) { + return false + } + if (other.isIntegrity) { + const algo = other.pickAlgorithm(opts, [this.algorithm]) + + if (!algo) { + return false + } + + const foundHash = other[algo].find(hash => hash.digest === this.digest) + + if (foundHash) { + return foundHash + } + + return false + } + return other.digest === this.digest ? other : false + } + + toString (opts) { + if (opts?.strict) { + // Strict mode enforces the standard as close to the foot of the + // letter as it can. + if (!( + // The spec has very restricted productions for algorithms. + // https://www.w3.org/TR/CSP2/#source-list-syntax + SPEC_ALGORITHMS.includes(this.algorithm) && + // Usually, if someone insists on using a "different" base64, we + // leave it as-is, since there's multiple standards, and the + // specified is not a URL-safe variant. + // https://www.w3.org/TR/CSP2/#base64_value + this.digest.match(BASE64_REGEX) && + // Option syntax is strictly visual chars. + // https://w3c.github.io/webappsec-subresource-integrity/#grammardef-option-expression + // https://tools.ietf.org/html/rfc5234#appendix-B.1 + this.options.every(opt => opt.match(VCHAR_REGEX)) + )) { + return '' + } + } + return `${this.algorithm}-${this.digest}${getOptString(this.options)}` + } +} + +function integrityHashToString (toString, sep, opts, hashes) { + const toStringIsNotEmpty = toString !== '' + + let shouldAddFirstSep = false + let complement = '' + + const lastIndex = hashes.length - 1 + + for (let i = 0; i < lastIndex; i++) { + const hashString = Hash.prototype.toString.call(hashes[i], opts) + + if (hashString) { + shouldAddFirstSep = true + + complement += hashString + complement += sep + } + } + + const finalHashString = Hash.prototype.toString.call(hashes[lastIndex], opts) + + if (finalHashString) { + shouldAddFirstSep = true + complement += finalHashString + } + + if (toStringIsNotEmpty && shouldAddFirstSep) { + return toString + sep + complement + } + + return toString + complement +} + +class Integrity { + get isIntegrity () { + return true + } + + toJSON () { + return this.toString() + } + + isEmpty () { + return Object.keys(this).length === 0 + } + + toString (opts) { + let sep = opts?.sep || ' ' + let toString = '' + + if (opts?.strict) { + // Entries must be separated by whitespace, according to spec. + sep = sep.replace(/\S+/g, ' ') + + for (const hash of SPEC_ALGORITHMS) { + if (this[hash]) { + toString = integrityHashToString(toString, sep, opts, this[hash]) + } + } + } else { + for (const hash of Object.keys(this)) { + toString = integrityHashToString(toString, sep, opts, this[hash]) + } + } + + return toString + } + + concat (integrity, opts) { + const other = typeof integrity === 'string' + ? integrity + : stringify(integrity, opts) + return parse(`${this.toString(opts)} ${other}`, opts) + } + + hexDigest () { + return parse(this, { single: true }).hexDigest() + } + + // add additional hashes to an integrity value, but prevent + // *changing* an existing integrity hash. + merge (integrity, opts) { + const other = parse(integrity, opts) + for (const algo in other) { + if (this[algo]) { + if (!this[algo].find(hash => + other[algo].find(otherhash => + hash.digest === otherhash.digest))) { + throw new Error('hashes do not match, cannot update integrity') + } + } else { + this[algo] = other[algo] + } + } + } + + match (integrity, opts) { + const other = parse(integrity, opts) + if (!other) { + return false + } + const algo = other.pickAlgorithm(opts, Object.keys(this)) + return ( + !!algo && + this[algo] && + other[algo] && + this[algo].find(hash => + other[algo].find(otherhash => + hash.digest === otherhash.digest + ) + ) + ) || false + } + + // Pick the highest priority algorithm present, optionally also limited to a + // set of hashes found in another integrity. When limiting it may return + // nothing. + pickAlgorithm (opts, hashes) { + const pickAlgorithm = opts?.pickAlgorithm || getPrioritizedHash + const keys = Object.keys(this).filter(k => { + if (hashes?.length) { + return hashes.includes(k) + } + return true + }) + if (keys.length) { + return keys.reduce((acc, algo) => pickAlgorithm(acc, algo) || acc) + } + // no intersection between this and hashes, + return null + } +} + +module.exports.parse = parse +function parse (sri, opts) { + if (!sri) { + return null + } + if (typeof sri === 'string') { + return _parse(sri, opts) + } else if (sri.algorithm && sri.digest) { + const fullSri = new Integrity() + fullSri[sri.algorithm] = [sri] + return _parse(stringify(fullSri, opts), opts) + } else { + return _parse(stringify(sri, opts), opts) + } +} + +function _parse (integrity, opts) { + // 3.4.3. Parse metadata + // https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata + if (opts?.single) { + return new Hash(integrity, opts) + } + const hashes = integrity.trim().split(/\s+/).reduce((acc, string) => { + const hash = new Hash(string, opts) + if (hash.algorithm && hash.digest) { + const algo = hash.algorithm + if (!acc[algo]) { + acc[algo] = [] + } + acc[algo].push(hash) + } + return acc + }, new Integrity()) + return hashes.isEmpty() ? null : hashes +} + +module.exports.stringify = stringify +function stringify (obj, opts) { + if (obj.algorithm && obj.digest) { + return Hash.prototype.toString.call(obj, opts) + } else if (typeof obj === 'string') { + return stringify(parse(obj, opts), opts) + } else { + return Integrity.prototype.toString.call(obj, opts) + } +} + +module.exports.fromHex = fromHex +function fromHex (hexDigest, algorithm, opts) { + const optString = getOptString(opts?.options) + return parse( + `${algorithm}-${ + Buffer.from(hexDigest, 'hex').toString('base64') + }${optString}`, opts + ) +} + +module.exports.fromData = fromData +function fromData (data, opts) { + const algorithms = opts?.algorithms || [...DEFAULT_ALGORITHMS] + const optString = getOptString(opts?.options) + return algorithms.reduce((acc, algo) => { + const digest = crypto.createHash(algo).update(data).digest('base64') + const hash = new Hash( + `${algo}-${digest}${optString}`, + opts + ) + /* istanbul ignore else - it would be VERY strange if the string we + * just calculated with an algo did not have an algo or digest. + */ + if (hash.algorithm && hash.digest) { + const hashAlgo = hash.algorithm + if (!acc[hashAlgo]) { + acc[hashAlgo] = [] + } + acc[hashAlgo].push(hash) + } + return acc + }, new Integrity()) +} + +module.exports.fromStream = fromStream +function fromStream (stream, opts) { + const istream = integrityStream(opts) + return new Promise((resolve, reject) => { + stream.pipe(istream) + stream.on('error', reject) + istream.on('error', reject) + let sri + istream.on('integrity', s => { + sri = s + }) + istream.on('end', () => resolve(sri)) + istream.resume() + }) +} + +module.exports.checkData = checkData +function checkData (data, sri, opts) { + sri = parse(sri, opts) + if (!sri || !Object.keys(sri).length) { + if (opts?.error) { + throw Object.assign( + new Error('No valid integrity hashes to check against'), { + code: 'EINTEGRITY', + } + ) + } else { + return false + } + } + const algorithm = sri.pickAlgorithm(opts) + const digest = crypto.createHash(algorithm).update(data).digest('base64') + const newSri = parse({ algorithm, digest }) + const match = newSri.match(sri, opts) + opts = opts || {} + if (match || !(opts.error)) { + return match + } else if (typeof opts.size === 'number' && (data.length !== opts.size)) { + /* eslint-disable-next-line max-len */ + const err = new Error(`data size mismatch when checking ${sri}.\n Wanted: ${opts.size}\n Found: ${data.length}`) + err.code = 'EBADSIZE' + err.found = data.length + err.expected = opts.size + err.sri = sri + throw err + } else { + /* eslint-disable-next-line max-len */ + const err = new Error(`Integrity checksum failed when using ${algorithm}: Wanted ${sri}, but got ${newSri}. (${data.length} bytes)`) + err.code = 'EINTEGRITY' + err.found = newSri + err.expected = sri + err.algorithm = algorithm + err.sri = sri + throw err + } +} + +module.exports.checkStream = checkStream +function checkStream (stream, sri, opts) { + opts = opts || Object.create(null) + opts.integrity = sri + sri = parse(sri, opts) + if (!sri || !Object.keys(sri).length) { + return Promise.reject(Object.assign( + new Error('No valid integrity hashes to check against'), { + code: 'EINTEGRITY', + } + )) + } + const checker = integrityStream(opts) + return new Promise((resolve, reject) => { + stream.pipe(checker) + stream.on('error', reject) + checker.on('error', reject) + let verified + checker.on('verified', s => { + verified = s + }) + checker.on('end', () => resolve(verified)) + checker.resume() + }) +} + +module.exports.integrityStream = integrityStream +function integrityStream (opts = Object.create(null)) { + return new IntegrityStream(opts) +} + +module.exports.create = createIntegrity +function createIntegrity (opts) { + const algorithms = opts?.algorithms || [...DEFAULT_ALGORITHMS] + const optString = getOptString(opts?.options) + + const hashes = algorithms.map(crypto.createHash) + + return { + update: function (chunk, enc) { + hashes.forEach(h => h.update(chunk, enc)) + return this + }, + digest: function () { + const integrity = algorithms.reduce((acc, algo) => { + const digest = hashes.shift().digest('base64') + const hash = new Hash( + `${algo}-${digest}${optString}`, + opts + ) + /* istanbul ignore else - it would be VERY strange if the hash we + * just calculated with an algo did not have an algo or digest. + */ + if (hash.algorithm && hash.digest) { + const hashAlgo = hash.algorithm + if (!acc[hashAlgo]) { + acc[hashAlgo] = [] + } + acc[hashAlgo].push(hash) + } + return acc + }, new Integrity()) + + return integrity + }, + } +} + +const NODE_HASHES = crypto.getHashes() + +// This is a Best Effortâ„¢ at a reasonable priority for hash algos +const DEFAULT_PRIORITY = [ + 'md5', 'whirlpool', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512', + // TODO - it's unclear _which_ of these Node will actually use as its name + // for the algorithm, so we guesswork it based on the OpenSSL names. + 'sha3', + 'sha3-256', 'sha3-384', 'sha3-512', + 'sha3_256', 'sha3_384', 'sha3_512', +].filter(algo => NODE_HASHES.includes(algo)) + +function getPrioritizedHash (algo1, algo2) { + /* eslint-disable-next-line max-len */ + return DEFAULT_PRIORITY.indexOf(algo1.toLowerCase()) >= DEFAULT_PRIORITY.indexOf(algo2.toLowerCase()) + ? algo1 + : algo2 +} diff --git a/node_modules/node-gyp/node_modules/@npmcli/agent/package.json b/node_modules/cacache/node_modules/ssri/package.json similarity index 56% rename from node_modules/node-gyp/node_modules/@npmcli/agent/package.json rename to node_modules/cacache/node_modules/ssri/package.json index 4d648fb5dfe05..83306cd044ec3 100644 --- a/node_modules/node-gyp/node_modules/@npmcli/agent/package.json +++ b/node_modules/cacache/node_modules/ssri/package.json @@ -1,60 +1,66 @@ { - "name": "@npmcli/agent", - "version": "3.0.0", - "description": "the http/https agent used by the npm cli", + "name": "ssri", + "version": "12.0.0", + "description": "Standard Subresource Integrity library -- parses, serializes, generates, and verifies integrity metadata according to the SRI spec.", "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], "scripts": { - "gencerts": "bash scripts/create-cert.sh", + "prerelease": "npm t", + "postrelease": "npm publish", + "posttest": "npm run lint", "test": "tap", + "coverage": "tap", "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", "lintfix": "npm run eslint -- --fix", "snap": "tap", - "posttest": "npm run lint", "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, + "tap": { + "check-coverage": true, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/ssri.git" + }, + "keywords": [ + "w3c", + "web", + "security", + "integrity", + "checksum", + "hashing", + "subresource integrity", + "sri", + "sri hash", + "sri string", + "sri generator", + "html" + ], "author": "GitHub Inc.", "license": "ISC", - "bugs": { - "url": "https://github.com/npm/agent/issues" + "dependencies": { + "minipass": "^7.0.3" + }, + "devDependencies": { + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", + "tap": "^16.0.1" }, - "homepage": "https://github.com/npm/agent#readme", - "files": [ - "bin/", - "lib/" - ], "engines": { "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.23.1", + "version": "4.23.3", "publish": "true" - }, - "dependencies": { - "agent-base": "^7.1.0", - "http-proxy-agent": "^7.0.0", - "https-proxy-agent": "^7.0.1", - "lru-cache": "^10.0.1", - "socks-proxy-agent": "^8.0.3" - }, - "devDependencies": { - "@npmcli/eslint-config": "^5.0.0", - "@npmcli/template-oss": "4.23.1", - "minipass-fetch": "^3.0.3", - "nock": "^13.2.7", - "socksv5": "^0.0.6", - "tap": "^16.3.0" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/npm/agent.git" - }, - "tap": { - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] } } diff --git a/node_modules/cmd-shim/package.json b/node_modules/cmd-shim/package.json index 5f2e85d1c73db..0da1978b985d2 100644 --- a/node_modules/cmd-shim/package.json +++ b/node_modules/cmd-shim/package.json @@ -1,6 +1,6 @@ { "name": "cmd-shim", - "version": "7.0.0", + "version": "8.0.0", "description": "Used in npm for command line application support", "scripts": { "test": "tap", @@ -19,7 +19,7 @@ "license": "ISC", "devDependencies": { "@npmcli/eslint-config": "^5.0.0", - "@npmcli/template-oss": "4.23.1", + "@npmcli/template-oss": "4.27.1", "tap": "^16.0.1" }, "files": [ @@ -37,12 +37,12 @@ ] }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" }, "author": "GitHub Inc.", "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.23.1", + "version": "4.27.1", "publish": true } } diff --git a/node_modules/exponential-backoff/package.json b/node_modules/exponential-backoff/package.json index 53fb159f82782..e3e8dc9a5dccd 100644 --- a/node_modules/exponential-backoff/package.json +++ b/node_modules/exponential-backoff/package.json @@ -1,6 +1,6 @@ { "name": "exponential-backoff", - "version": "3.1.2", + "version": "3.1.3", "description": "A utility that allows retrying a function with an exponential delay between attempts.", "files": [ "dist/", diff --git a/node_modules/ini/package.json b/node_modules/ini/package.json index 6a3995f158cc5..7bbc0576937c5 100644 --- a/node_modules/ini/package.json +++ b/node_modules/ini/package.json @@ -2,7 +2,7 @@ "author": "GitHub Inc.", "name": "ini", "description": "An ini encoder/decoder for node", - "version": "5.0.0", + "version": "6.0.0", "repository": { "type": "git", "url": "git+https://github.com/npm/ini.git" @@ -20,7 +20,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^5.0.0", - "@npmcli/template-oss": "4.23.3", + "@npmcli/template-oss": "4.27.1", "tap": "^16.0.1" }, "license": "ISC", @@ -29,11 +29,11 @@ "lib/" ], "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.23.3", + "version": "4.27.1", "publish": "true" }, "tap": { diff --git a/node_modules/init-package-json/node_modules/validate-npm-package-name/LICENSE b/node_modules/init-package-json/node_modules/validate-npm-package-name/LICENSE new file mode 100644 index 0000000000000..fdcd63b302308 --- /dev/null +++ b/node_modules/init-package-json/node_modules/validate-npm-package-name/LICENSE @@ -0,0 +1,6 @@ +Copyright (c) 2015, npm, Inc + + +Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/init-package-json/node_modules/validate-npm-package-name/lib/index.js b/node_modules/init-package-json/node_modules/validate-npm-package-name/lib/index.js new file mode 100644 index 0000000000000..db6e86b0dfc60 --- /dev/null +++ b/node_modules/init-package-json/node_modules/validate-npm-package-name/lib/index.js @@ -0,0 +1,110 @@ +'use strict' +const { builtinModules: builtins } = require('module') + +var scopedPackagePattern = new RegExp('^(?:@([^/]+?)[/])?([^/]+?)$') +var exclusionList = [ + 'node_modules', + 'favicon.ico', +] + +function validate (name) { + var warnings = [] + var errors = [] + + if (name === null) { + errors.push('name cannot be null') + return done(warnings, errors) + } + + if (name === undefined) { + errors.push('name cannot be undefined') + return done(warnings, errors) + } + + if (typeof name !== 'string') { + errors.push('name must be a string') + return done(warnings, errors) + } + + if (!name.length) { + errors.push('name length must be greater than zero') + } + + if (name.startsWith('.')) { + errors.push('name cannot start with a period') + } + + if (name.match(/^_/)) { + errors.push('name cannot start with an underscore') + } + + if (name.trim() !== name) { + errors.push('name cannot contain leading or trailing spaces') + } + + // No funny business + exclusionList.forEach(function (excludedName) { + if (name.toLowerCase() === excludedName) { + errors.push(excludedName + ' is not a valid package name') + } + }) + + // Generate warnings for stuff that used to be allowed + + // core module names like http, events, util, etc + if (builtins.includes(name.toLowerCase())) { + warnings.push(name + ' is a core module name') + } + + if (name.length > 214) { + warnings.push('name can no longer contain more than 214 characters') + } + + // mIxeD CaSe nAMEs + if (name.toLowerCase() !== name) { + warnings.push('name can no longer contain capital letters') + } + + if (/[~'!()*]/.test(name.split('/').slice(-1)[0])) { + warnings.push('name can no longer contain special characters ("~\'!()*")') + } + + if (encodeURIComponent(name) !== name) { + // Maybe it's a scoped package name, like @user/package + var nameMatch = name.match(scopedPackagePattern) + if (nameMatch) { + var user = nameMatch[1] + var pkg = nameMatch[2] + + if (pkg.startsWith('.')) { + errors.push('name cannot start with a period') + } + + if (encodeURIComponent(user) === user && encodeURIComponent(pkg) === pkg) { + return done(warnings, errors) + } + } + + errors.push('name can only contain URL-friendly characters') + } + + return done(warnings, errors) +} + +var done = function (warnings, errors) { + var result = { + validForNewPackages: errors.length === 0 && warnings.length === 0, + validForOldPackages: errors.length === 0, + warnings: warnings, + errors: errors, + } + if (!result.warnings.length) { + delete result.warnings + } + if (!result.errors.length) { + delete result.errors + } + return result +} + +module.exports = validate diff --git a/node_modules/init-package-json/node_modules/validate-npm-package-name/package.json b/node_modules/init-package-json/node_modules/validate-npm-package-name/package.json new file mode 100644 index 0000000000000..e5162f847fe53 --- /dev/null +++ b/node_modules/init-package-json/node_modules/validate-npm-package-name/package.json @@ -0,0 +1,61 @@ +{ + "name": "validate-npm-package-name", + "version": "6.0.2", + "description": "Give me a string and I'll tell you if it's a valid npm package name", + "main": "lib/", + "directories": { + "test": "test" + }, + "devDependencies": { + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.25.0", + "tap": "^16.0.1" + }, + "scripts": { + "cov:test": "TAP_FLAGS='--cov' npm run test:code", + "test:code": "tap ${TAP_FLAGS:-'--'} test/*.js", + "test:style": "standard", + "test": "tap", + "lint": "npm run eslint", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run eslint -- --fix", + "snap": "tap", + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/validate-npm-package-name.git" + }, + "keywords": [ + "npm", + "package", + "names", + "validation" + ], + "author": "GitHub Inc.", + "license": "ISC", + "bugs": { + "url": "https://github.com/npm/validate-npm-package-name/issues" + }, + "homepage": "https://github.com/npm/validate-npm-package-name", + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^18.17.0 || >=20.5.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.25.0", + "publish": true + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/node_modules/json-parse-even-better-errors/package.json b/node_modules/json-parse-even-better-errors/package.json index 193f0d9d459a5..6e696c98548db 100644 --- a/node_modules/json-parse-even-better-errors/package.json +++ b/node_modules/json-parse-even-better-errors/package.json @@ -1,6 +1,6 @@ { "name": "json-parse-even-better-errors", - "version": "4.0.0", + "version": "5.0.0", "description": "JSON.parse with context information on error", "main": "lib/index.js", "files": [ @@ -29,7 +29,7 @@ "license": "MIT", "devDependencies": { "@npmcli/eslint-config": "^5.0.0", - "@npmcli/template-oss": "4.23.3", + "@npmcli/template-oss": "4.27.1", "tap": "^16.3.0" }, "tap": { @@ -40,11 +40,11 @@ ] }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.23.3", + "version": "4.27.1", "publish": true } } diff --git a/node_modules/make-fetch-happen/package.json b/node_modules/make-fetch-happen/package.json index 41815ec3c8f11..203b32304c461 100644 --- a/node_modules/make-fetch-happen/package.json +++ b/node_modules/make-fetch-happen/package.json @@ -1,6 +1,6 @@ { "name": "make-fetch-happen", - "version": "15.0.2", + "version": "15.0.3", "description": "Opinionated, caching, retrying fetch client", "main": "lib/index.js", "files": [ @@ -37,13 +37,13 @@ "cacache": "^20.0.1", "http-cache-semantics": "^4.1.1", "minipass": "^7.0.2", - "minipass-fetch": "^4.0.0", + "minipass-fetch": "^5.0.0", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", "negotiator": "^1.0.0", - "proc-log": "^5.0.0", + "proc-log": "^6.0.0", "promise-retry": "^2.0.1", - "ssri": "^12.0.0" + "ssri": "^13.0.0" }, "devDependencies": { "@npmcli/eslint-config": "^5.0.0", diff --git a/node_modules/minimatch/LICENSE b/node_modules/minimatch/LICENSE deleted file mode 100644 index 1493534e60dce..0000000000000 --- a/node_modules/minimatch/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) 2011-2023 Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/node-gyp/node_modules/jackspeak/LICENSE.md b/node_modules/minimatch/LICENSE.md similarity index 100% rename from node_modules/node-gyp/node_modules/jackspeak/LICENSE.md rename to node_modules/minimatch/LICENSE.md diff --git a/node_modules/minimatch/dist/commonjs/ast.js b/node_modules/minimatch/dist/commonjs/ast.js index 7b2109625eaeb..fd7f3d73ed31d 100644 --- a/node_modules/minimatch/dist/commonjs/ast.js +++ b/node_modules/minimatch/dist/commonjs/ast.js @@ -415,7 +415,9 @@ class AST { if (this.#root === this) this.#fillNegs(); if (!this.type) { - const noEmpty = this.isStart() && this.isEnd(); + const noEmpty = this.isStart() && + this.isEnd() && + !this.#parts.some(s => typeof s !== 'string'); const src = this.#parts .map(p => { const [re, _, hasMagic, uflag] = typeof p === 'string' @@ -571,10 +573,7 @@ class AST { } } if (c === '*') { - if (noEmpty && glob === '*') - re += starNoEmpty; - else - re += star; + re += noEmpty && glob === '*' ? starNoEmpty : star; hasMagic = true; continue; } diff --git a/node_modules/minimatch/dist/commonjs/escape.js b/node_modules/minimatch/dist/commonjs/escape.js index 02a4f8a8e0a58..6fb634fb41033 100644 --- a/node_modules/minimatch/dist/commonjs/escape.js +++ b/node_modules/minimatch/dist/commonjs/escape.js @@ -4,16 +4,24 @@ exports.escape = void 0; /** * Escape all magic characters in a glob pattern. * - * If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape} + * If the {@link MinimatchOptions.windowsPathsNoEscape} * option is used, then characters are escaped by wrapping in `[]`, because * a magic character wrapped in a character class can only be satisfied by * that exact character. In this mode, `\` is _not_ escaped, because it is * not interpreted as a magic character, but instead as a path separator. + * + * If the {@link MinimatchOptions.magicalBraces} option is used, + * then braces (`{` and `}`) will be escaped. */ -const escape = (s, { windowsPathsNoEscape = false, } = {}) => { +const escape = (s, { windowsPathsNoEscape = false, magicalBraces = false, } = {}) => { // don't need to escape +@! because we escape the parens // that make those magic, and escaping ! as [!] isn't valid, // because [!]] is a valid glob class meaning not ']'. + if (magicalBraces) { + return windowsPathsNoEscape + ? s.replace(/[?*()[\]{}]/g, '[$&]') + : s.replace(/[?*()[\]\\{}]/g, '\\$&'); + } return windowsPathsNoEscape ? s.replace(/[?*()[\]]/g, '[$&]') : s.replace(/[?*()[\]\\]/g, '\\$&'); diff --git a/node_modules/minimatch/dist/commonjs/index.js b/node_modules/minimatch/dist/commonjs/index.js index f58fb8616aa9a..966dc9b8bb216 100644 --- a/node_modules/minimatch/dist/commonjs/index.js +++ b/node_modules/minimatch/dist/commonjs/index.js @@ -640,7 +640,7 @@ class Minimatch { } } // resolve and reduce . and .. portions in the file as well. - // dont' need to do the second phase, because it's only one string[] + // don't need to do the second phase, because it's only one string[] const { optimizationLevel = 1 } = this.options; if (optimizationLevel >= 2) { file = this.levelTwoFileOptimize(file); @@ -893,14 +893,25 @@ class Minimatch { } } else if (next === undefined) { - pp[i - 1] = prev + '(?:\\/|' + twoStar + ')?'; + pp[i - 1] = prev + '(?:\\/|\\/' + twoStar + ')?'; } else if (next !== exports.GLOBSTAR) { pp[i - 1] = prev + '(?:\\/|\\/' + twoStar + '\\/)' + next; pp[i + 1] = exports.GLOBSTAR; } }); - return pp.filter(p => p !== exports.GLOBSTAR).join('/'); + const filtered = pp.filter(p => p !== exports.GLOBSTAR); + // For partial matches, we need to make the pattern match + // any prefix of the full path. We do this by generating + // alternative patterns that match progressively longer prefixes. + if (this.partial && filtered.length >= 1) { + const prefixes = []; + for (let i = 1; i <= filtered.length; i++) { + prefixes.push(filtered.slice(0, i).join('/')); + } + return '(?:' + prefixes.join('|') + ')'; + } + return filtered.join('/'); }) .join('|'); // need to wrap in parens if we had more than one thing with |, @@ -909,6 +920,10 @@ class Minimatch { // must match entire pattern // ending in a * or ** will make it less strict. re = '^' + open + re + close + '$'; + // In partial mode, '/' should always match as it's a valid prefix for any pattern + if (this.partial) { + re = '^(?:\\/|' + open + re.slice(1, -1) + close + ')$'; + } // can match anything, as long as it's not this. if (this.negate) re = '^(?!' + re + ').+$'; diff --git a/node_modules/minimatch/dist/commonjs/unescape.js b/node_modules/minimatch/dist/commonjs/unescape.js index 47c36bcee5a02..171098d8a4ceb 100644 --- a/node_modules/minimatch/dist/commonjs/unescape.js +++ b/node_modules/minimatch/dist/commonjs/unescape.js @@ -4,21 +4,35 @@ exports.unescape = void 0; /** * Un-escape a string that has been escaped with {@link escape}. * - * If the {@link windowsPathsNoEscape} option is used, then square-brace - * escapes are removed, but not backslash escapes. For example, it will turn - * the string `'[*]'` into `*`, but it will not turn `'\\*'` into `'*'`, - * becuase `\` is a path separator in `windowsPathsNoEscape` mode. + * If the {@link MinimatchOptions.windowsPathsNoEscape} option is used, then + * square-bracket escapes are removed, but not backslash escapes. * - * When `windowsPathsNoEscape` is not set, then both brace escapes and + * For example, it will turn the string `'[*]'` into `*`, but it will not + * turn `'\\*'` into `'*'`, because `\` is a path separator in + * `windowsPathsNoEscape` mode. + * + * When `windowsPathsNoEscape` is not set, then both square-bracket escapes and * backslash escapes are removed. * * Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot be escaped * or unescaped. + * + * When `magicalBraces` is not set, escapes of braces (`{` and `}`) will not be + * unescaped. */ -const unescape = (s, { windowsPathsNoEscape = false, } = {}) => { +const unescape = (s, { windowsPathsNoEscape = false, magicalBraces = true, } = {}) => { + if (magicalBraces) { + return windowsPathsNoEscape + ? s.replace(/\[([^\/\\])\]/g, '$1') + : s + .replace(/((?!\\).|^)\[([^\/\\])\]/g, '$1$2') + .replace(/\\([^\/])/g, '$1'); + } return windowsPathsNoEscape - ? s.replace(/\[([^\/\\])\]/g, '$1') - : s.replace(/((?!\\).|^)\[([^\/\\])\]/g, '$1$2').replace(/\\([^\/])/g, '$1'); + ? s.replace(/\[([^\/\\{}])\]/g, '$1') + : s + .replace(/((?!\\).|^)\[([^\/\\{}])\]/g, '$1$2') + .replace(/\\([^\/{}])/g, '$1'); }; exports.unescape = unescape; //# sourceMappingURL=unescape.js.map \ No newline at end of file diff --git a/node_modules/minimatch/dist/esm/ast.js b/node_modules/minimatch/dist/esm/ast.js index 2d2bced6533de..e4547b16131b6 100644 --- a/node_modules/minimatch/dist/esm/ast.js +++ b/node_modules/minimatch/dist/esm/ast.js @@ -412,7 +412,9 @@ export class AST { if (this.#root === this) this.#fillNegs(); if (!this.type) { - const noEmpty = this.isStart() && this.isEnd(); + const noEmpty = this.isStart() && + this.isEnd() && + !this.#parts.some(s => typeof s !== 'string'); const src = this.#parts .map(p => { const [re, _, hasMagic, uflag] = typeof p === 'string' @@ -568,10 +570,7 @@ export class AST { } } if (c === '*') { - if (noEmpty && glob === '*') - re += starNoEmpty; - else - re += star; + re += noEmpty && glob === '*' ? starNoEmpty : star; hasMagic = true; continue; } diff --git a/node_modules/minimatch/dist/esm/escape.js b/node_modules/minimatch/dist/esm/escape.js index 16f7c8c7bdc64..bab968ff3d83c 100644 --- a/node_modules/minimatch/dist/esm/escape.js +++ b/node_modules/minimatch/dist/esm/escape.js @@ -1,16 +1,24 @@ /** * Escape all magic characters in a glob pattern. * - * If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape} + * If the {@link MinimatchOptions.windowsPathsNoEscape} * option is used, then characters are escaped by wrapping in `[]`, because * a magic character wrapped in a character class can only be satisfied by * that exact character. In this mode, `\` is _not_ escaped, because it is * not interpreted as a magic character, but instead as a path separator. + * + * If the {@link MinimatchOptions.magicalBraces} option is used, + * then braces (`{` and `}`) will be escaped. */ -export const escape = (s, { windowsPathsNoEscape = false, } = {}) => { +export const escape = (s, { windowsPathsNoEscape = false, magicalBraces = false, } = {}) => { // don't need to escape +@! because we escape the parens // that make those magic, and escaping ! as [!] isn't valid, // because [!]] is a valid glob class meaning not ']'. + if (magicalBraces) { + return windowsPathsNoEscape + ? s.replace(/[?*()[\]{}]/g, '[$&]') + : s.replace(/[?*()[\]\\{}]/g, '\\$&'); + } return windowsPathsNoEscape ? s.replace(/[?*()[\]]/g, '[$&]') : s.replace(/[?*()[\]\\]/g, '\\$&'); diff --git a/node_modules/minimatch/dist/esm/index.js b/node_modules/minimatch/dist/esm/index.js index 790d6c02a2f22..e83823fa6e1b5 100644 --- a/node_modules/minimatch/dist/esm/index.js +++ b/node_modules/minimatch/dist/esm/index.js @@ -631,7 +631,7 @@ export class Minimatch { } } // resolve and reduce . and .. portions in the file as well. - // dont' need to do the second phase, because it's only one string[] + // don't need to do the second phase, because it's only one string[] const { optimizationLevel = 1 } = this.options; if (optimizationLevel >= 2) { file = this.levelTwoFileOptimize(file); @@ -884,14 +884,25 @@ export class Minimatch { } } else if (next === undefined) { - pp[i - 1] = prev + '(?:\\/|' + twoStar + ')?'; + pp[i - 1] = prev + '(?:\\/|\\/' + twoStar + ')?'; } else if (next !== GLOBSTAR) { pp[i - 1] = prev + '(?:\\/|\\/' + twoStar + '\\/)' + next; pp[i + 1] = GLOBSTAR; } }); - return pp.filter(p => p !== GLOBSTAR).join('/'); + const filtered = pp.filter(p => p !== GLOBSTAR); + // For partial matches, we need to make the pattern match + // any prefix of the full path. We do this by generating + // alternative patterns that match progressively longer prefixes. + if (this.partial && filtered.length >= 1) { + const prefixes = []; + for (let i = 1; i <= filtered.length; i++) { + prefixes.push(filtered.slice(0, i).join('/')); + } + return '(?:' + prefixes.join('|') + ')'; + } + return filtered.join('/'); }) .join('|'); // need to wrap in parens if we had more than one thing with |, @@ -900,6 +911,10 @@ export class Minimatch { // must match entire pattern // ending in a * or ** will make it less strict. re = '^' + open + re + close + '$'; + // In partial mode, '/' should always match as it's a valid prefix for any pattern + if (this.partial) { + re = '^(?:\\/|' + open + re.slice(1, -1) + close + ')$'; + } // can match anything, as long as it's not this. if (this.negate) re = '^(?!' + re + ').+$'; diff --git a/node_modules/minimatch/dist/esm/unescape.js b/node_modules/minimatch/dist/esm/unescape.js index 0faf9a2b7306f..dfa408d39853b 100644 --- a/node_modules/minimatch/dist/esm/unescape.js +++ b/node_modules/minimatch/dist/esm/unescape.js @@ -1,20 +1,34 @@ /** * Un-escape a string that has been escaped with {@link escape}. * - * If the {@link windowsPathsNoEscape} option is used, then square-brace - * escapes are removed, but not backslash escapes. For example, it will turn - * the string `'[*]'` into `*`, but it will not turn `'\\*'` into `'*'`, - * becuase `\` is a path separator in `windowsPathsNoEscape` mode. + * If the {@link MinimatchOptions.windowsPathsNoEscape} option is used, then + * square-bracket escapes are removed, but not backslash escapes. * - * When `windowsPathsNoEscape` is not set, then both brace escapes and + * For example, it will turn the string `'[*]'` into `*`, but it will not + * turn `'\\*'` into `'*'`, because `\` is a path separator in + * `windowsPathsNoEscape` mode. + * + * When `windowsPathsNoEscape` is not set, then both square-bracket escapes and * backslash escapes are removed. * * Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot be escaped * or unescaped. + * + * When `magicalBraces` is not set, escapes of braces (`{` and `}`) will not be + * unescaped. */ -export const unescape = (s, { windowsPathsNoEscape = false, } = {}) => { +export const unescape = (s, { windowsPathsNoEscape = false, magicalBraces = true, } = {}) => { + if (magicalBraces) { + return windowsPathsNoEscape + ? s.replace(/\[([^\/\\])\]/g, '$1') + : s + .replace(/((?!\\).|^)\[([^\/\\])\]/g, '$1$2') + .replace(/\\([^\/])/g, '$1'); + } return windowsPathsNoEscape - ? s.replace(/\[([^\/\\])\]/g, '$1') - : s.replace(/((?!\\).|^)\[([^\/\\])\]/g, '$1$2').replace(/\\([^\/])/g, '$1'); + ? s.replace(/\[([^\/\\{}])\]/g, '$1') + : s + .replace(/((?!\\).|^)\[([^\/\\{}])\]/g, '$1$2') + .replace(/\\([^\/{}])/g, '$1'); }; //# sourceMappingURL=unescape.js.map \ No newline at end of file diff --git a/node_modules/minimatch/package.json b/node_modules/minimatch/package.json index bfa2423f50b5e..620c0309f0d16 100644 --- a/node_modules/minimatch/package.json +++ b/node_modules/minimatch/package.json @@ -2,10 +2,10 @@ "author": "Isaac Z. Schlueter (http://blog.izs.me)", "name": "minimatch", "description": "a glob matcher in javascript", - "version": "10.0.3", + "version": "10.1.1", "repository": { "type": "git", - "url": "git://github.com/isaacs/minimatch.git" + "url": "git@github.com:isaacs/minimatch" }, "main": "./dist/commonjs/index.js", "types": "./dist/commonjs/index.d.ts", @@ -34,9 +34,9 @@ "presnap": "npm run prepare", "test": "tap", "snap": "tap", - "format": "prettier --write . --loglevel warn", + "format": "prettier --write . --log-level warn", "benchmark": "node benchmark/index.js", - "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts" + "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts" }, "prettier": { "semi": false, @@ -53,10 +53,9 @@ "node": "20 || >=22" }, "devDependencies": { - "@types/brace-expansion": "^1.1.2", "@types/node": "^24.0.0", "mkdirp": "^3.0.1", - "prettier": "^3.3.2", + "prettier": "^3.6.2", "tap": "^21.1.0", "tshy": "^3.0.2", "typedoc": "^0.28.5" @@ -64,7 +63,7 @@ "funding": { "url": "https://github.com/sponsors/isaacs" }, - "license": "ISC", + "license": "BlueOak-1.0.0", "tshy": { "exports": { "./package.json": "./package.json", diff --git a/node_modules/minipass-fetch/package.json b/node_modules/minipass-fetch/package.json index eb8a4d4fac40d..7863e7e5e9dee 100644 --- a/node_modules/minipass-fetch/package.json +++ b/node_modules/minipass-fetch/package.json @@ -1,6 +1,6 @@ { "name": "minipass-fetch", - "version": "4.0.1", + "version": "5.0.0", "description": "An implementation of window.fetch in Node.js using Minipass streams", "license": "MIT", "main": "lib/index.js", @@ -25,7 +25,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^5.0.0", - "@npmcli/template-oss": "4.23.3", + "@npmcli/template-oss": "4.27.1", "@ungap/url-search-params": "^0.2.2", "abort-controller": "^3.0.0", "abortcontroller-polyfill": "~1.7.3", @@ -59,12 +59,12 @@ "lib/" ], "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" }, "author": "GitHub Inc.", "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.23.3", + "version": "4.27.1", "publish": "true" } } diff --git a/node_modules/node-gyp/.release-please-manifest.json b/node_modules/node-gyp/.release-please-manifest.json index a94451c9e1342..4899c67643487 100644 --- a/node_modules/node-gyp/.release-please-manifest.json +++ b/node_modules/node-gyp/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "11.4.2" + ".": "12.1.0" } diff --git a/node_modules/node-gyp/gyp/.release-please-manifest.json b/node_modules/node-gyp/gyp/.release-please-manifest.json index bdb726346fc28..ca64307ab8475 100644 --- a/node_modules/node-gyp/gyp/.release-please-manifest.json +++ b/node_modules/node-gyp/gyp/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.20.4" + ".": "0.21.0" } diff --git a/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py b/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py index 09baf44b2b0f8..2d8e4ceab9a94 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py @@ -270,6 +270,18 @@ def _CreateVersion(name, path, sdk_based=False): if path: path = os.path.normpath(path) versions = { + "2026": VisualStudioVersion( + "2026", + "Visual Studio 2026", + solution_version="12.00", + project_version="18.0", + flat_sln=False, + uses_vcxproj=True, + path=path, + sdk_based=sdk_based, + default_toolset="v145", + compatible_sdks=["v8.1", "v10.0"], + ), "2022": VisualStudioVersion( "2022", "Visual Studio 2022", @@ -462,6 +474,7 @@ def _DetectVisualStudioVersions(versions_to_check, force_express): "15.0": "2017", "16.0": "2019", "17.0": "2022", + "18.0": "2026", } versions = [] for version in versions_to_check: @@ -537,7 +550,18 @@ def SelectVisualStudioVersion(version="auto", allow_fallback=True): if version == "auto": version = os.environ.get("GYP_MSVS_VERSION", "auto") version_map = { - "auto": ("17.0", "16.0", "15.0", "14.0", "12.0", "10.0", "9.0", "8.0", "11.0"), + "auto": ( + "18.0", + "17.0", + "16.0", + "15.0", + "14.0", + "12.0", + "10.0", + "9.0", + "8.0", + "11.0", + ), "2005": ("8.0",), "2005e": ("8.0",), "2008": ("9.0",), @@ -552,6 +576,7 @@ def SelectVisualStudioVersion(version="auto", allow_fallback=True): "2017": ("15.0",), "2019": ("16.0",), "2022": ("17.0",), + "2026": ("18.0",), } if override_path := os.environ.get("GYP_MSVS_OVERRIDE_PATH"): msvs_version = os.environ.get("GYP_MSVS_VERSION") diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py index cfc0681f6bb04..5d5cae2afbf66 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py @@ -378,7 +378,7 @@ def WriteRules(self, rules, extra_sources, extra_outputs): inputs = rule.get("inputs") for rule_source in rule.get("rule_sources", []): (rule_source_dirname, rule_source_basename) = os.path.split(rule_source) - (rule_source_root, rule_source_ext) = os.path.splitext( + (rule_source_root, _rule_source_ext) = os.path.splitext( rule_source_basename ) diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/compile_commands_json.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/compile_commands_json.py index bebb1303154e1..1361aeca48d0c 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/generator/compile_commands_json.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/compile_commands_json.py @@ -100,7 +100,7 @@ def resolve(filename): def GenerateOutput(target_list, target_dicts, data, params): per_config_commands = {} for qualified_target, target in target_dicts.items(): - build_file, target_name, toolset = gyp.common.ParseQualifiedTarget( + build_file, _target_name, _toolset = gyp.common.ParseQualifiedTarget( qualified_target ) if IsMac(params): diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py index 3c70b81fd2562..89af24a201b10 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py @@ -73,7 +73,7 @@ def GenerateOutput(target_list, target_dicts, data, params): output_files = {} for qualified_target in target_list: - [input_file, target] = gyp.common.ParseQualifiedTarget(qualified_target)[0:2] + [input_file, _target] = gyp.common.ParseQualifiedTarget(qualified_target)[0:2] if input_file[-4:] != ".gyp": continue diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py index 1f0995718b59b..5f30f39fc503e 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py @@ -1169,7 +1169,7 @@ def WriteRules( for rule_source in rule.get("rule_sources", []): dirs = set() (rule_source_dirname, rule_source_basename) = os.path.split(rule_source) - (rule_source_root, rule_source_ext) = os.path.splitext( + (rule_source_root, _rule_source_ext) = os.path.splitext( rule_source_basename ) diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py index 3b258ee8f395e..0f14c055049ad 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py @@ -1666,7 +1666,7 @@ def _HandlePreCompiledHeaders(p, sources, spec): p.AddFileConfig( source, _ConfigFullName(config_name, config), {}, tools=[tool] ) - basename, extension = os.path.splitext(source) + _basename, extension = os.path.splitext(source) if extension == ".c": extensions_excluded_from_precompile = [".cc", ".cpp", ".cxx"] else: @@ -1677,7 +1677,7 @@ def DisableForSourceTree(source_tree): if isinstance(source, MSVSProject.Filter): DisableForSourceTree(source.contents) else: - basename, extension = os.path.splitext(source) + _basename, extension = os.path.splitext(source) if extension in extensions_excluded_from_precompile: for config_name, config in spec["configurations"].items(): tool = MSVSProject.Tool( @@ -3579,7 +3579,7 @@ def _AddSources2( # If the precompiled header is generated by a C source, # we must not try to use it for C++ sources, # and vice versa. - basename, extension = os.path.splitext(precompiled_source) + _basename, extension = os.path.splitext(precompiled_source) if extension == ".c": extensions_excluded_from_precompile = [ ".cc", diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py index 8e05657961fe9..db4b45d1a04d2 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py @@ -531,7 +531,7 @@ def AddSourceToTarget(source, type, pbxp, xct): library_extensions = ["a", "dylib", "framework", "o"] basename = posixpath.basename(source) - (root, ext) = posixpath.splitext(basename) + (_root, ext) = posixpath.splitext(basename) if ext: ext = ext[1:].lower() @@ -696,7 +696,7 @@ def GenerateOutput(target_list, target_dicts, data, params): xcode_targets = {} xcode_target_to_target_dict = {} for qualified_target in target_list: - [build_file, target_name, toolset] = gyp.common.ParseQualifiedTarget( + [build_file, target_name, _toolset] = gyp.common.ParseQualifiedTarget( qualified_target ) @@ -1215,7 +1215,7 @@ def GenerateOutput(target_list, target_dicts, data, params): # Add "sources". for source in spec.get("sources", []): - (source_root, source_extension) = posixpath.splitext(source) + (_source_root, source_extension) = posixpath.splitext(source) if source_extension[1:] not in rules_by_ext: # AddSourceToTarget will add the file to a root group if it's not # already there. @@ -1227,7 +1227,7 @@ def GenerateOutput(target_list, target_dicts, data, params): # it's a bundle of any type. if is_bundle: for resource in tgt_mac_bundle_resources: - (resource_root, resource_extension) = posixpath.splitext(resource) + (_resource_root, resource_extension) = posixpath.splitext(resource) if resource_extension[1:] not in rules_by_ext: AddResourceToTarget(resource, pbxp, xct) else: diff --git a/node_modules/node-gyp/gyp/pylib/gyp/input.py b/node_modules/node-gyp/gyp/pylib/gyp/input.py index 4965ff1571c73..f3a5e168f2075 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/input.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/input.py @@ -2757,7 +2757,7 @@ def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules): source_keys.extend(extra_sources_for_rules) for source_key in source_keys: for source in target_dict.get(source_key, []): - (source_root, source_extension) = os.path.splitext(source) + (_source_root, source_extension) = os.path.splitext(source) if source_extension.startswith("."): source_extension = source_extension[1:] if source_extension == rule_extension: diff --git a/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py b/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py index 192a523529fdd..d13eaa9af240b 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py @@ -1534,18 +1534,20 @@ def CLTVersion(): FROM_XCODE_PKG_ID = "com.apple.pkg.DeveloperToolsCLI" MAVERICKS_PKG_ID = "com.apple.pkg.CLTools_Executables" - regex = re.compile("version: (?P.+)") + regex = re.compile(r"version: (?P.+)") for key in [MAVERICKS_PKG_ID, STANDALONE_PKG_ID, FROM_XCODE_PKG_ID]: try: output = GetStdout(["/usr/sbin/pkgutil", "--pkg-info", key]) - return re.search(regex, output).groupdict()["version"] + if m := re.search(regex, output): + return m.groupdict()["version"] except (GypError, OSError): continue regex = re.compile(r"Command Line Tools for Xcode\s+(?P\S+)") try: output = GetStdout(["/usr/sbin/softwareupdate", "--history"]) - return re.search(regex, output).groupdict()["version"] + if m := re.search(regex, output): + return m.groupdict()["version"] except (GypError, OSError): return None diff --git a/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py b/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py index 1a97a06c51d9f..a133fdbe8b4f5 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py @@ -22,7 +22,7 @@ def _WriteWorkspace(main_gyp, sources_gyp, params): """Create a workspace to wrap main and sources gyp paths.""" - (build_file_root, build_file_ext) = os.path.splitext(main_gyp) + (build_file_root, _build_file_ext) = os.path.splitext(main_gyp) workspace_path = build_file_root + ".xcworkspace" options = params["options"] if options.generator_output: diff --git a/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py b/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py index 11e2be0737223..cb467470d3044 100644 --- a/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py +++ b/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py @@ -487,7 +487,7 @@ def Children(self): children = [] for property, attributes in self._schema.items(): - (is_list, property_type, is_strong) = attributes[0:3] + (is_list, _property_type, is_strong) = attributes[0:3] if is_strong and property in self._properties: if not is_list: children.append(self._properties[property]) @@ -913,7 +913,7 @@ def VerifyHasRequiredProperties(self): # TODO(mark): A stronger verification mechanism is needed. Some # subclasses need to perform validation beyond what the schema can enforce. for property, attributes in self._schema.items(): - (is_list, property_type, is_strong, is_required) = attributes[0:4] + (_is_list, _property_type, _is_strong, is_required) = attributes[0:4] if is_required and property not in self._properties: raise KeyError(self.__class__.__name__ + " requires " + property) @@ -923,7 +923,7 @@ def _SetDefaultsFromSchema(self): defaults = {} for property, attributes in self._schema.items(): - (is_list, property_type, is_strong, is_required) = attributes[0:4] + (_is_list, _property_type, _is_strong, is_required) = attributes[0:4] if ( is_required and len(attributes) >= 5 @@ -1616,7 +1616,7 @@ def __init__(self, properties=None, id=None, parent=None): prop_name = "lastKnownFileType" else: basename = posixpath.basename(self._properties["path"]) - (root, ext) = posixpath.splitext(basename) + (_root, ext) = posixpath.splitext(basename) # Check the map using a lowercase extension. # TODO(mark): Maybe it should try with the original case first and fall # back to lowercase, in case there are any instances where case @@ -2010,7 +2010,7 @@ def Name(self): return "Frameworks" def FileGroup(self, path): - (root, ext) = posixpath.splitext(path) + (_root, ext) = posixpath.splitext(path) if ext != "": ext = ext[1:].lower() if ext == "o": diff --git a/node_modules/node-gyp/gyp/pyproject.toml b/node_modules/node-gyp/gyp/pyproject.toml index 3a029c4fc5140..cd4f0383fd37c 100644 --- a/node_modules/node-gyp/gyp/pyproject.toml +++ b/node_modules/node-gyp/gyp/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "gyp-next" -version = "0.20.4" +version = "0.21.0" authors = [ { name="Node.js contributors", email="ryzokuken@disroot.org" }, ] diff --git a/node_modules/node-gyp/lib/find-visualstudio.js b/node_modules/node-gyp/lib/find-visualstudio.js index e9aa7fafdc98a..e0cf383489e28 100644 --- a/node_modules/node-gyp/lib/find-visualstudio.js +++ b/node_modules/node-gyp/lib/find-visualstudio.js @@ -119,7 +119,7 @@ class VisualStudioFinder { } async findVisualStudio2019OrNewerFromSpecifiedLocation () { - return this.findVSFromSpecifiedLocation([2019, 2022]) + return this.findVSFromSpecifiedLocation([2019, 2022, 2026]) } async findVisualStudio2017FromSpecifiedLocation () { @@ -162,7 +162,7 @@ class VisualStudioFinder { } async findVisualStudio2019OrNewerUsingSetupModule () { - return this.findNewVSUsingSetupModule([2019, 2022]) + return this.findNewVSUsingSetupModule([2019, 2022, 2026]) } async findVisualStudio2017UsingSetupModule () { @@ -223,7 +223,7 @@ class VisualStudioFinder { // Invoke the PowerShell script to get information about Visual Studio 2019 // or newer installations async findVisualStudio2019OrNewer () { - return this.findNewVS([2019, 2022]) + return this.findNewVS([2019, 2022, 2026]) } // Invoke the PowerShell script to get information about Visual Studio 2017 @@ -389,6 +389,10 @@ class VisualStudioFinder { ret.versionYear = 2022 return ret } + if (ret.versionMajor === 18) { + ret.versionYear = 2026 + return ret + } this.log.silly('- unsupported version:', ret.versionMajor) return {} } @@ -456,6 +460,8 @@ class VisualStudioFinder { return 'v142' } else if (versionYear === 2022) { return 'v143' + } else if (versionYear === 2026) { + return 'v145' } this.log.silly('- invalid versionYear:', versionYear) return null diff --git a/node_modules/node-gyp/node_modules/@npmcli/agent/lib/agents.js b/node_modules/node-gyp/node_modules/@npmcli/agent/lib/agents.js deleted file mode 100644 index c541b93001517..0000000000000 --- a/node_modules/node-gyp/node_modules/@npmcli/agent/lib/agents.js +++ /dev/null @@ -1,206 +0,0 @@ -'use strict' - -const net = require('net') -const tls = require('tls') -const { once } = require('events') -const timers = require('timers/promises') -const { normalizeOptions, cacheOptions } = require('./options') -const { getProxy, getProxyAgent, proxyCache } = require('./proxy.js') -const Errors = require('./errors.js') -const { Agent: AgentBase } = require('agent-base') - -module.exports = class Agent extends AgentBase { - #options - #timeouts - #proxy - #noProxy - #ProxyAgent - - constructor (options = {}) { - const { timeouts, proxy, noProxy, ...normalizedOptions } = normalizeOptions(options) - - super(normalizedOptions) - - this.#options = normalizedOptions - this.#timeouts = timeouts - - if (proxy) { - this.#proxy = new URL(proxy) - this.#noProxy = noProxy - this.#ProxyAgent = getProxyAgent(proxy) - } - } - - get proxy () { - return this.#proxy ? { url: this.#proxy } : {} - } - - #getProxy (options) { - if (!this.#proxy) { - return - } - - const proxy = getProxy(`${options.protocol}//${options.host}:${options.port}`, { - proxy: this.#proxy, - noProxy: this.#noProxy, - }) - - if (!proxy) { - return - } - - const cacheKey = cacheOptions({ - ...options, - ...this.#options, - timeouts: this.#timeouts, - proxy, - }) - - if (proxyCache.has(cacheKey)) { - return proxyCache.get(cacheKey) - } - - let ProxyAgent = this.#ProxyAgent - if (Array.isArray(ProxyAgent)) { - ProxyAgent = this.isSecureEndpoint(options) ? ProxyAgent[1] : ProxyAgent[0] - } - - const proxyAgent = new ProxyAgent(proxy, { - ...this.#options, - socketOptions: { family: this.#options.family }, - }) - proxyCache.set(cacheKey, proxyAgent) - - return proxyAgent - } - - // takes an array of promises and races them against the connection timeout - // which will throw the necessary error if it is hit. This will return the - // result of the promise race. - async #timeoutConnection ({ promises, options, timeout }, ac = new AbortController()) { - if (timeout) { - const connectionTimeout = timers.setTimeout(timeout, null, { signal: ac.signal }) - .then(() => { - throw new Errors.ConnectionTimeoutError(`${options.host}:${options.port}`) - }).catch((err) => { - if (err.name === 'AbortError') { - return - } - throw err - }) - promises.push(connectionTimeout) - } - - let result - try { - result = await Promise.race(promises) - ac.abort() - } catch (err) { - ac.abort() - throw err - } - return result - } - - async connect (request, options) { - // if the connection does not have its own lookup function - // set, then use the one from our options - options.lookup ??= this.#options.lookup - - let socket - let timeout = this.#timeouts.connection - const isSecureEndpoint = this.isSecureEndpoint(options) - - const proxy = this.#getProxy(options) - if (proxy) { - // some of the proxies will wait for the socket to fully connect before - // returning so we have to await this while also racing it against the - // connection timeout. - const start = Date.now() - socket = await this.#timeoutConnection({ - options, - timeout, - promises: [proxy.connect(request, options)], - }) - // see how much time proxy.connect took and subtract it from - // the timeout - if (timeout) { - timeout = timeout - (Date.now() - start) - } - } else { - socket = (isSecureEndpoint ? tls : net).connect(options) - } - - socket.setKeepAlive(this.keepAlive, this.keepAliveMsecs) - socket.setNoDelay(this.keepAlive) - - const abortController = new AbortController() - const { signal } = abortController - - const connectPromise = socket[isSecureEndpoint ? 'secureConnecting' : 'connecting'] - ? once(socket, isSecureEndpoint ? 'secureConnect' : 'connect', { signal }) - : Promise.resolve() - - await this.#timeoutConnection({ - options, - timeout, - promises: [ - connectPromise, - once(socket, 'error', { signal }).then((err) => { - throw err[0] - }), - ], - }, abortController) - - if (this.#timeouts.idle) { - socket.setTimeout(this.#timeouts.idle, () => { - socket.destroy(new Errors.IdleTimeoutError(`${options.host}:${options.port}`)) - }) - } - - return socket - } - - addRequest (request, options) { - const proxy = this.#getProxy(options) - // it would be better to call proxy.addRequest here but this causes the - // http-proxy-agent to call its super.addRequest which causes the request - // to be added to the agent twice. since we only support 3 agents - // currently (see the required agents in proxy.js) we have manually - // checked that the only public methods we need to call are called in the - // next block. this could change in the future and presumably we would get - // failing tests until we have properly called the necessary methods on - // each of our proxy agents - if (proxy?.setRequestProps) { - proxy.setRequestProps(request, options) - } - - request.setHeader('connection', this.keepAlive ? 'keep-alive' : 'close') - - if (this.#timeouts.response) { - let responseTimeout - request.once('finish', () => { - setTimeout(() => { - request.destroy(new Errors.ResponseTimeoutError(request, this.#proxy)) - }, this.#timeouts.response) - }) - request.once('response', () => { - clearTimeout(responseTimeout) - }) - } - - if (this.#timeouts.transfer) { - let transferTimeout - request.once('response', (res) => { - setTimeout(() => { - res.destroy(new Errors.TransferTimeoutError(request, this.#proxy)) - }, this.#timeouts.transfer) - res.once('close', () => { - clearTimeout(transferTimeout) - }) - }) - } - - return super.addRequest(request, options) - } -} diff --git a/node_modules/node-gyp/node_modules/@npmcli/agent/lib/dns.js b/node_modules/node-gyp/node_modules/@npmcli/agent/lib/dns.js deleted file mode 100644 index 3c6946c566d73..0000000000000 --- a/node_modules/node-gyp/node_modules/@npmcli/agent/lib/dns.js +++ /dev/null @@ -1,53 +0,0 @@ -'use strict' - -const { LRUCache } = require('lru-cache') -const dns = require('dns') - -// this is a factory so that each request can have its own opts (i.e. ttl) -// while still sharing the cache across all requests -const cache = new LRUCache({ max: 50 }) - -const getOptions = ({ - family = 0, - hints = dns.ADDRCONFIG, - all = false, - verbatim = undefined, - ttl = 5 * 60 * 1000, - lookup = dns.lookup, -}) => ({ - // hints and lookup are returned since both are top level properties to (net|tls).connect - hints, - lookup: (hostname, ...args) => { - const callback = args.pop() // callback is always last arg - const lookupOptions = args[0] ?? {} - - const options = { - family, - hints, - all, - verbatim, - ...(typeof lookupOptions === 'number' ? { family: lookupOptions } : lookupOptions), - } - - const key = JSON.stringify({ hostname, ...options }) - - if (cache.has(key)) { - const cached = cache.get(key) - return process.nextTick(callback, null, ...cached) - } - - lookup(hostname, options, (err, ...result) => { - if (err) { - return callback(err) - } - - cache.set(key, result, { ttl }) - return callback(null, ...result) - }) - }, -}) - -module.exports = { - cache, - getOptions, -} diff --git a/node_modules/node-gyp/node_modules/@npmcli/agent/lib/errors.js b/node_modules/node-gyp/node_modules/@npmcli/agent/lib/errors.js deleted file mode 100644 index 70475aec8eb35..0000000000000 --- a/node_modules/node-gyp/node_modules/@npmcli/agent/lib/errors.js +++ /dev/null @@ -1,61 +0,0 @@ -'use strict' - -class InvalidProxyProtocolError extends Error { - constructor (url) { - super(`Invalid protocol \`${url.protocol}\` connecting to proxy \`${url.host}\``) - this.code = 'EINVALIDPROXY' - this.proxy = url - } -} - -class ConnectionTimeoutError extends Error { - constructor (host) { - super(`Timeout connecting to host \`${host}\``) - this.code = 'ECONNECTIONTIMEOUT' - this.host = host - } -} - -class IdleTimeoutError extends Error { - constructor (host) { - super(`Idle timeout reached for host \`${host}\``) - this.code = 'EIDLETIMEOUT' - this.host = host - } -} - -class ResponseTimeoutError extends Error { - constructor (request, proxy) { - let msg = 'Response timeout ' - if (proxy) { - msg += `from proxy \`${proxy.host}\` ` - } - msg += `connecting to host \`${request.host}\`` - super(msg) - this.code = 'ERESPONSETIMEOUT' - this.proxy = proxy - this.request = request - } -} - -class TransferTimeoutError extends Error { - constructor (request, proxy) { - let msg = 'Transfer timeout ' - if (proxy) { - msg += `from proxy \`${proxy.host}\` ` - } - msg += `for \`${request.host}\`` - super(msg) - this.code = 'ETRANSFERTIMEOUT' - this.proxy = proxy - this.request = request - } -} - -module.exports = { - InvalidProxyProtocolError, - ConnectionTimeoutError, - IdleTimeoutError, - ResponseTimeoutError, - TransferTimeoutError, -} diff --git a/node_modules/node-gyp/node_modules/@npmcli/agent/lib/index.js b/node_modules/node-gyp/node_modules/@npmcli/agent/lib/index.js deleted file mode 100644 index b33d6eaef07a2..0000000000000 --- a/node_modules/node-gyp/node_modules/@npmcli/agent/lib/index.js +++ /dev/null @@ -1,56 +0,0 @@ -'use strict' - -const { LRUCache } = require('lru-cache') -const { normalizeOptions, cacheOptions } = require('./options') -const { getProxy, proxyCache } = require('./proxy.js') -const dns = require('./dns.js') -const Agent = require('./agents.js') - -const agentCache = new LRUCache({ max: 20 }) - -const getAgent = (url, { agent, proxy, noProxy, ...options } = {}) => { - // false has meaning so this can't be a simple truthiness check - if (agent != null) { - return agent - } - - url = new URL(url) - - const proxyForUrl = getProxy(url, { proxy, noProxy }) - const normalizedOptions = { - ...normalizeOptions(options), - proxy: proxyForUrl, - } - - const cacheKey = cacheOptions({ - ...normalizedOptions, - secureEndpoint: url.protocol === 'https:', - }) - - if (agentCache.has(cacheKey)) { - return agentCache.get(cacheKey) - } - - const newAgent = new Agent(normalizedOptions) - agentCache.set(cacheKey, newAgent) - - return newAgent -} - -module.exports = { - getAgent, - Agent, - // these are exported for backwards compatability - HttpAgent: Agent, - HttpsAgent: Agent, - cache: { - proxy: proxyCache, - agent: agentCache, - dns: dns.cache, - clear: () => { - proxyCache.clear() - agentCache.clear() - dns.cache.clear() - }, - }, -} diff --git a/node_modules/node-gyp/node_modules/@npmcli/agent/lib/options.js b/node_modules/node-gyp/node_modules/@npmcli/agent/lib/options.js deleted file mode 100644 index 0bf53f725f084..0000000000000 --- a/node_modules/node-gyp/node_modules/@npmcli/agent/lib/options.js +++ /dev/null @@ -1,86 +0,0 @@ -'use strict' - -const dns = require('./dns') - -const normalizeOptions = (opts) => { - const family = parseInt(opts.family ?? '0', 10) - const keepAlive = opts.keepAlive ?? true - - const normalized = { - // nodejs http agent options. these are all the defaults - // but kept here to increase the likelihood of cache hits - // https://nodejs.org/api/http.html#new-agentoptions - keepAliveMsecs: keepAlive ? 1000 : undefined, - maxSockets: opts.maxSockets ?? 15, - maxTotalSockets: Infinity, - maxFreeSockets: keepAlive ? 256 : undefined, - scheduling: 'fifo', - // then spread the rest of the options - ...opts, - // we already set these to their defaults that we want - family, - keepAlive, - // our custom timeout options - timeouts: { - // the standard timeout option is mapped to our idle timeout - // and then deleted below - idle: opts.timeout ?? 0, - connection: 0, - response: 0, - transfer: 0, - ...opts.timeouts, - }, - // get the dns options that go at the top level of socket connection - ...dns.getOptions({ family, ...opts.dns }), - } - - // remove timeout since we already used it to set our own idle timeout - delete normalized.timeout - - return normalized -} - -const createKey = (obj) => { - let key = '' - const sorted = Object.entries(obj).sort((a, b) => a[0] - b[0]) - for (let [k, v] of sorted) { - if (v == null) { - v = 'null' - } else if (v instanceof URL) { - v = v.toString() - } else if (typeof v === 'object') { - v = createKey(v) - } - key += `${k}:${v}:` - } - return key -} - -const cacheOptions = ({ secureEndpoint, ...options }) => createKey({ - secureEndpoint: !!secureEndpoint, - // socket connect options - family: options.family, - hints: options.hints, - localAddress: options.localAddress, - // tls specific connect options - strictSsl: secureEndpoint ? !!options.rejectUnauthorized : false, - ca: secureEndpoint ? options.ca : null, - cert: secureEndpoint ? options.cert : null, - key: secureEndpoint ? options.key : null, - // http agent options - keepAlive: options.keepAlive, - keepAliveMsecs: options.keepAliveMsecs, - maxSockets: options.maxSockets, - maxTotalSockets: options.maxTotalSockets, - maxFreeSockets: options.maxFreeSockets, - scheduling: options.scheduling, - // timeout options - timeouts: options.timeouts, - // proxy - proxy: options.proxy, -}) - -module.exports = { - normalizeOptions, - cacheOptions, -} diff --git a/node_modules/node-gyp/node_modules/@npmcli/agent/lib/proxy.js b/node_modules/node-gyp/node_modules/@npmcli/agent/lib/proxy.js deleted file mode 100644 index 6272e929e57bc..0000000000000 --- a/node_modules/node-gyp/node_modules/@npmcli/agent/lib/proxy.js +++ /dev/null @@ -1,88 +0,0 @@ -'use strict' - -const { HttpProxyAgent } = require('http-proxy-agent') -const { HttpsProxyAgent } = require('https-proxy-agent') -const { SocksProxyAgent } = require('socks-proxy-agent') -const { LRUCache } = require('lru-cache') -const { InvalidProxyProtocolError } = require('./errors.js') - -const PROXY_CACHE = new LRUCache({ max: 20 }) - -const SOCKS_PROTOCOLS = new Set(SocksProxyAgent.protocols) - -const PROXY_ENV_KEYS = new Set(['https_proxy', 'http_proxy', 'proxy', 'no_proxy']) - -const PROXY_ENV = Object.entries(process.env).reduce((acc, [key, value]) => { - key = key.toLowerCase() - if (PROXY_ENV_KEYS.has(key)) { - acc[key] = value - } - return acc -}, {}) - -const getProxyAgent = (url) => { - url = new URL(url) - - const protocol = url.protocol.slice(0, -1) - if (SOCKS_PROTOCOLS.has(protocol)) { - return SocksProxyAgent - } - if (protocol === 'https' || protocol === 'http') { - return [HttpProxyAgent, HttpsProxyAgent] - } - - throw new InvalidProxyProtocolError(url) -} - -const isNoProxy = (url, noProxy) => { - if (typeof noProxy === 'string') { - noProxy = noProxy.split(',').map((p) => p.trim()).filter(Boolean) - } - - if (!noProxy || !noProxy.length) { - return false - } - - const hostSegments = url.hostname.split('.').reverse() - - return noProxy.some((no) => { - const noSegments = no.split('.').filter(Boolean).reverse() - if (!noSegments.length) { - return false - } - - for (let i = 0; i < noSegments.length; i++) { - if (hostSegments[i] !== noSegments[i]) { - return false - } - } - - return true - }) -} - -const getProxy = (url, { proxy, noProxy }) => { - url = new URL(url) - - if (!proxy) { - proxy = url.protocol === 'https:' - ? PROXY_ENV.https_proxy - : PROXY_ENV.https_proxy || PROXY_ENV.http_proxy || PROXY_ENV.proxy - } - - if (!noProxy) { - noProxy = PROXY_ENV.no_proxy - } - - if (!proxy || isNoProxy(url, noProxy)) { - return null - } - - return new URL(proxy) -} - -module.exports = { - getProxyAgent, - getProxy, - proxyCache: PROXY_CACHE, -} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/content/path.js b/node_modules/node-gyp/node_modules/cacache/lib/content/path.js deleted file mode 100644 index ad5a76a4f73f2..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/lib/content/path.js +++ /dev/null @@ -1,29 +0,0 @@ -'use strict' - -const contentVer = require('../../package.json')['cache-version'].content -const hashToSegments = require('../util/hash-to-segments') -const path = require('path') -const ssri = require('ssri') - -// Current format of content file path: -// -// sha512-BaSE64Hex= -> -// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee -// -module.exports = contentPath - -function contentPath (cache, integrity) { - const sri = ssri.parse(integrity, { single: true }) - // contentPath is the *strongest* algo given - return path.join( - contentDir(cache), - sri.algorithm, - ...hashToSegments(sri.hexDigest()) - ) -} - -module.exports.contentDir = contentDir - -function contentDir (cache) { - return path.join(cache, `content-v${contentVer}`) -} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/content/read.js b/node_modules/node-gyp/node_modules/cacache/lib/content/read.js deleted file mode 100644 index 5f6192c3cec56..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/lib/content/read.js +++ /dev/null @@ -1,165 +0,0 @@ -'use strict' - -const fs = require('fs/promises') -const fsm = require('fs-minipass') -const ssri = require('ssri') -const contentPath = require('./path') -const Pipeline = require('minipass-pipeline') - -module.exports = read - -const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024 -async function read (cache, integrity, opts = {}) { - const { size } = opts - const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { - // get size - const stat = size ? { size } : await fs.stat(cpath) - return { stat, cpath, sri } - }) - - if (stat.size > MAX_SINGLE_READ_SIZE) { - return readPipeline(cpath, stat.size, sri, new Pipeline()).concat() - } - - const data = await fs.readFile(cpath, { encoding: null }) - - if (stat.size !== data.length) { - throw sizeError(stat.size, data.length) - } - - if (!ssri.checkData(data, sri)) { - throw integrityError(sri, cpath) - } - - return data -} - -const readPipeline = (cpath, size, sri, stream) => { - stream.push( - new fsm.ReadStream(cpath, { - size, - readSize: MAX_SINGLE_READ_SIZE, - }), - ssri.integrityStream({ - integrity: sri, - size, - }) - ) - return stream -} - -module.exports.stream = readStream -module.exports.readStream = readStream - -function readStream (cache, integrity, opts = {}) { - const { size } = opts - const stream = new Pipeline() - // Set all this up to run on the stream and then just return the stream - Promise.resolve().then(async () => { - const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { - // get size - const stat = size ? { size } : await fs.stat(cpath) - return { stat, cpath, sri } - }) - - return readPipeline(cpath, stat.size, sri, stream) - }).catch(err => stream.emit('error', err)) - - return stream -} - -module.exports.copy = copy - -function copy (cache, integrity, dest) { - return withContentSri(cache, integrity, (cpath) => { - return fs.copyFile(cpath, dest) - }) -} - -module.exports.hasContent = hasContent - -async function hasContent (cache, integrity) { - if (!integrity) { - return false - } - - try { - return await withContentSri(cache, integrity, async (cpath, sri) => { - const stat = await fs.stat(cpath) - return { size: stat.size, sri, stat } - }) - } catch (err) { - if (err.code === 'ENOENT') { - return false - } - - if (err.code === 'EPERM') { - /* istanbul ignore else */ - if (process.platform !== 'win32') { - throw err - } else { - return false - } - } - } -} - -async function withContentSri (cache, integrity, fn) { - const sri = ssri.parse(integrity) - // If `integrity` has multiple entries, pick the first digest - // with available local data. - const algo = sri.pickAlgorithm() - const digests = sri[algo] - - if (digests.length <= 1) { - const cpath = contentPath(cache, digests[0]) - return fn(cpath, digests[0]) - } else { - // Can't use race here because a generic error can happen before - // a ENOENT error, and can happen before a valid result - const results = await Promise.all(digests.map(async (meta) => { - try { - return await withContentSri(cache, meta, fn) - } catch (err) { - if (err.code === 'ENOENT') { - return Object.assign( - new Error('No matching content found for ' + sri.toString()), - { code: 'ENOENT' } - ) - } - return err - } - })) - // Return the first non error if it is found - const result = results.find((r) => !(r instanceof Error)) - if (result) { - return result - } - - // Throw the No matching content found error - const enoentError = results.find((r) => r.code === 'ENOENT') - if (enoentError) { - throw enoentError - } - - // Throw generic error - throw results.find((r) => r instanceof Error) - } -} - -function sizeError (expected, found) { - /* eslint-disable-next-line max-len */ - const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) - err.expected = expected - err.found = found - err.code = 'EBADSIZE' - return err -} - -function integrityError (sri, path) { - const err = new Error(`Integrity verification failed for ${sri} (${path})`) - err.code = 'EINTEGRITY' - err.sri = sri - err.path = path - return err -} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/content/rm.js b/node_modules/node-gyp/node_modules/cacache/lib/content/rm.js deleted file mode 100644 index ce58d679e4cb2..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/lib/content/rm.js +++ /dev/null @@ -1,18 +0,0 @@ -'use strict' - -const fs = require('fs/promises') -const contentPath = require('./path') -const { hasContent } = require('./read') - -module.exports = rm - -async function rm (cache, integrity) { - const content = await hasContent(cache, integrity) - // ~pretty~ sure we can't end up with a content lacking sri, but be safe - if (content && content.sri) { - await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true }) - return true - } else { - return false - } -} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/content/write.js b/node_modules/node-gyp/node_modules/cacache/lib/content/write.js deleted file mode 100644 index e7187abca8788..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/lib/content/write.js +++ /dev/null @@ -1,206 +0,0 @@ -'use strict' - -const events = require('events') - -const contentPath = require('./path') -const fs = require('fs/promises') -const { moveFile } = require('@npmcli/fs') -const { Minipass } = require('minipass') -const Pipeline = require('minipass-pipeline') -const Flush = require('minipass-flush') -const path = require('path') -const ssri = require('ssri') -const uniqueFilename = require('unique-filename') -const fsm = require('fs-minipass') - -module.exports = write - -// Cache of move operations in process so we don't duplicate -const moveOperations = new Map() - -async function write (cache, data, opts = {}) { - const { algorithms, size, integrity } = opts - - if (typeof size === 'number' && data.length !== size) { - throw sizeError(size, data.length) - } - - const sri = ssri.fromData(data, algorithms ? { algorithms } : {}) - if (integrity && !ssri.checkData(data, integrity, opts)) { - throw checksumError(integrity, sri) - } - - for (const algo in sri) { - const tmp = await makeTmp(cache, opts) - const hash = sri[algo].toString() - try { - await fs.writeFile(tmp.target, data, { flag: 'wx' }) - await moveToDestination(tmp, cache, hash, opts) - } finally { - if (!tmp.moved) { - await fs.rm(tmp.target, { recursive: true, force: true }) - } - } - } - return { integrity: sri, size: data.length } -} - -module.exports.stream = writeStream - -// writes proxied to the 'inputStream' that is passed to the Promise -// 'end' is deferred until content is handled. -class CacacheWriteStream extends Flush { - constructor (cache, opts) { - super() - this.opts = opts - this.cache = cache - this.inputStream = new Minipass() - this.inputStream.on('error', er => this.emit('error', er)) - this.inputStream.on('drain', () => this.emit('drain')) - this.handleContentP = null - } - - write (chunk, encoding, cb) { - if (!this.handleContentP) { - this.handleContentP = handleContent( - this.inputStream, - this.cache, - this.opts - ) - this.handleContentP.catch(error => this.emit('error', error)) - } - return this.inputStream.write(chunk, encoding, cb) - } - - flush (cb) { - this.inputStream.end(() => { - if (!this.handleContentP) { - const e = new Error('Cache input stream was empty') - e.code = 'ENODATA' - // empty streams are probably emitting end right away. - // defer this one tick by rejecting a promise on it. - return Promise.reject(e).catch(cb) - } - // eslint-disable-next-line promise/catch-or-return - this.handleContentP.then( - (res) => { - res.integrity && this.emit('integrity', res.integrity) - // eslint-disable-next-line promise/always-return - res.size !== null && this.emit('size', res.size) - cb() - }, - (er) => cb(er) - ) - }) - } -} - -function writeStream (cache, opts = {}) { - return new CacacheWriteStream(cache, opts) -} - -async function handleContent (inputStream, cache, opts) { - const tmp = await makeTmp(cache, opts) - try { - const res = await pipeToTmp(inputStream, cache, tmp.target, opts) - await moveToDestination( - tmp, - cache, - res.integrity, - opts - ) - return res - } finally { - if (!tmp.moved) { - await fs.rm(tmp.target, { recursive: true, force: true }) - } - } -} - -async function pipeToTmp (inputStream, cache, tmpTarget, opts) { - const outStream = new fsm.WriteStream(tmpTarget, { - flags: 'wx', - }) - - if (opts.integrityEmitter) { - // we need to create these all simultaneously since they can fire in any order - const [integrity, size] = await Promise.all([ - events.once(opts.integrityEmitter, 'integrity').then(res => res[0]), - events.once(opts.integrityEmitter, 'size').then(res => res[0]), - new Pipeline(inputStream, outStream).promise(), - ]) - return { integrity, size } - } - - let integrity - let size - const hashStream = ssri.integrityStream({ - integrity: opts.integrity, - algorithms: opts.algorithms, - size: opts.size, - }) - hashStream.on('integrity', i => { - integrity = i - }) - hashStream.on('size', s => { - size = s - }) - - const pipeline = new Pipeline(inputStream, hashStream, outStream) - await pipeline.promise() - return { integrity, size } -} - -async function makeTmp (cache, opts) { - const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) - await fs.mkdir(path.dirname(tmpTarget), { recursive: true }) - return { - target: tmpTarget, - moved: false, - } -} - -async function moveToDestination (tmp, cache, sri) { - const destination = contentPath(cache, sri) - const destDir = path.dirname(destination) - if (moveOperations.has(destination)) { - return moveOperations.get(destination) - } - moveOperations.set( - destination, - fs.mkdir(destDir, { recursive: true }) - .then(async () => { - await moveFile(tmp.target, destination, { overwrite: false }) - tmp.moved = true - return tmp.moved - }) - .catch(err => { - if (!err.message.startsWith('The destination file exists')) { - throw Object.assign(err, { code: 'EEXIST' }) - } - }).finally(() => { - moveOperations.delete(destination) - }) - - ) - return moveOperations.get(destination) -} - -function sizeError (expected, found) { - /* eslint-disable-next-line max-len */ - const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) - err.expected = expected - err.found = found - err.code = 'EBADSIZE' - return err -} - -function checksumError (expected, found) { - const err = new Error(`Integrity check failed: - Wanted: ${expected} - Found: ${found}`) - err.code = 'EINTEGRITY' - err.expected = expected - err.found = found - return err -} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/entry-index.js b/node_modules/node-gyp/node_modules/cacache/lib/entry-index.js deleted file mode 100644 index 0e09b10818d09..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/lib/entry-index.js +++ /dev/null @@ -1,336 +0,0 @@ -'use strict' - -const crypto = require('crypto') -const { - appendFile, - mkdir, - readFile, - readdir, - rm, - writeFile, -} = require('fs/promises') -const { Minipass } = require('minipass') -const path = require('path') -const ssri = require('ssri') -const uniqueFilename = require('unique-filename') - -const contentPath = require('./content/path') -const hashToSegments = require('./util/hash-to-segments') -const indexV = require('../package.json')['cache-version'].index -const { moveFile } = require('@npmcli/fs') - -const lsStreamConcurrency = 5 - -module.exports.NotFoundError = class NotFoundError extends Error { - constructor (cache, key) { - super(`No cache entry for ${key} found in ${cache}`) - this.code = 'ENOENT' - this.cache = cache - this.key = key - } -} - -module.exports.compact = compact - -async function compact (cache, key, matchFn, opts = {}) { - const bucket = bucketPath(cache, key) - const entries = await bucketEntries(bucket) - const newEntries = [] - // we loop backwards because the bottom-most result is the newest - // since we add new entries with appendFile - for (let i = entries.length - 1; i >= 0; --i) { - const entry = entries[i] - // a null integrity could mean either a delete was appended - // or the user has simply stored an index that does not map - // to any content. we determine if the user wants to keep the - // null integrity based on the validateEntry function passed in options. - // if the integrity is null and no validateEntry is provided, we break - // as we consider the null integrity to be a deletion of everything - // that came before it. - if (entry.integrity === null && !opts.validateEntry) { - break - } - - // if this entry is valid, and it is either the first entry or - // the newEntries array doesn't already include an entry that - // matches this one based on the provided matchFn, then we add - // it to the beginning of our list - if ((!opts.validateEntry || opts.validateEntry(entry) === true) && - (newEntries.length === 0 || - !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) { - newEntries.unshift(entry) - } - } - - const newIndex = '\n' + newEntries.map((entry) => { - const stringified = JSON.stringify(entry) - const hash = hashEntry(stringified) - return `${hash}\t${stringified}` - }).join('\n') - - const setup = async () => { - const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) - await mkdir(path.dirname(target), { recursive: true }) - return { - target, - moved: false, - } - } - - const teardown = async (tmp) => { - if (!tmp.moved) { - return rm(tmp.target, { recursive: true, force: true }) - } - } - - const write = async (tmp) => { - await writeFile(tmp.target, newIndex, { flag: 'wx' }) - await mkdir(path.dirname(bucket), { recursive: true }) - // we use @npmcli/move-file directly here because we - // want to overwrite the existing file - await moveFile(tmp.target, bucket) - tmp.moved = true - } - - // write the file atomically - const tmp = await setup() - try { - await write(tmp) - } finally { - await teardown(tmp) - } - - // we reverse the list we generated such that the newest - // entries come first in order to make looping through them easier - // the true passed to formatEntry tells it to keep null - // integrity values, if they made it this far it's because - // validateEntry returned true, and as such we should return it - return newEntries.reverse().map((entry) => formatEntry(cache, entry, true)) -} - -module.exports.insert = insert - -async function insert (cache, key, integrity, opts = {}) { - const { metadata, size, time } = opts - const bucket = bucketPath(cache, key) - const entry = { - key, - integrity: integrity && ssri.stringify(integrity), - time: time || Date.now(), - size, - metadata, - } - try { - await mkdir(path.dirname(bucket), { recursive: true }) - const stringified = JSON.stringify(entry) - // NOTE - Cleverness ahoy! - // - // This works because it's tremendously unlikely for an entry to corrupt - // another while still preserving the string length of the JSON in - // question. So, we just slap the length in there and verify it on read. - // - // Thanks to @isaacs for the whiteboarding session that ended up with - // this. - await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`) - } catch (err) { - if (err.code === 'ENOENT') { - return undefined - } - - throw err - } - return formatEntry(cache, entry) -} - -module.exports.find = find - -async function find (cache, key) { - const bucket = bucketPath(cache, key) - try { - const entries = await bucketEntries(bucket) - return entries.reduce((latest, next) => { - if (next && next.key === key) { - return formatEntry(cache, next) - } else { - return latest - } - }, null) - } catch (err) { - if (err.code === 'ENOENT') { - return null - } else { - throw err - } - } -} - -module.exports.delete = del - -function del (cache, key, opts = {}) { - if (!opts.removeFully) { - return insert(cache, key, null, opts) - } - - const bucket = bucketPath(cache, key) - return rm(bucket, { recursive: true, force: true }) -} - -module.exports.lsStream = lsStream - -function lsStream (cache) { - const indexDir = bucketDir(cache) - const stream = new Minipass({ objectMode: true }) - - // Set all this up to run on the stream and then just return the stream - Promise.resolve().then(async () => { - const { default: pMap } = await import('p-map') - const buckets = await readdirOrEmpty(indexDir) - await pMap(buckets, async (bucket) => { - const bucketPath = path.join(indexDir, bucket) - const subbuckets = await readdirOrEmpty(bucketPath) - await pMap(subbuckets, async (subbucket) => { - const subbucketPath = path.join(bucketPath, subbucket) - - // "/cachename//./*" - const subbucketEntries = await readdirOrEmpty(subbucketPath) - await pMap(subbucketEntries, async (entry) => { - const entryPath = path.join(subbucketPath, entry) - try { - const entries = await bucketEntries(entryPath) - // using a Map here prevents duplicate keys from showing up - // twice, I guess? - const reduced = entries.reduce((acc, entry) => { - acc.set(entry.key, entry) - return acc - }, new Map()) - // reduced is a map of key => entry - for (const entry of reduced.values()) { - const formatted = formatEntry(cache, entry) - if (formatted) { - stream.write(formatted) - } - } - } catch (err) { - if (err.code === 'ENOENT') { - return undefined - } - throw err - } - }, - { concurrency: lsStreamConcurrency }) - }, - { concurrency: lsStreamConcurrency }) - }, - { concurrency: lsStreamConcurrency }) - stream.end() - return stream - }).catch(err => stream.emit('error', err)) - - return stream -} - -module.exports.ls = ls - -async function ls (cache) { - const entries = await lsStream(cache).collect() - return entries.reduce((acc, xs) => { - acc[xs.key] = xs - return acc - }, {}) -} - -module.exports.bucketEntries = bucketEntries - -async function bucketEntries (bucket, filter) { - const data = await readFile(bucket, 'utf8') - return _bucketEntries(data, filter) -} - -function _bucketEntries (data) { - const entries = [] - data.split('\n').forEach((entry) => { - if (!entry) { - return - } - - const pieces = entry.split('\t') - if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) { - // Hash is no good! Corruption or malice? Doesn't matter! - // EJECT EJECT - return - } - let obj - try { - obj = JSON.parse(pieces[1]) - } catch (_) { - // eslint-ignore-next-line no-empty-block - } - // coverage disabled here, no need to test with an entry that parses to something falsey - // istanbul ignore else - if (obj) { - entries.push(obj) - } - }) - return entries -} - -module.exports.bucketDir = bucketDir - -function bucketDir (cache) { - return path.join(cache, `index-v${indexV}`) -} - -module.exports.bucketPath = bucketPath - -function bucketPath (cache, key) { - const hashed = hashKey(key) - return path.join.apply( - path, - [bucketDir(cache)].concat(hashToSegments(hashed)) - ) -} - -module.exports.hashKey = hashKey - -function hashKey (key) { - return hash(key, 'sha256') -} - -module.exports.hashEntry = hashEntry - -function hashEntry (str) { - return hash(str, 'sha1') -} - -function hash (str, digest) { - return crypto - .createHash(digest) - .update(str) - .digest('hex') -} - -function formatEntry (cache, entry, keepAll) { - // Treat null digests as deletions. They'll shadow any previous entries. - if (!entry.integrity && !keepAll) { - return null - } - - return { - key: entry.key, - integrity: entry.integrity, - path: entry.integrity ? contentPath(cache, entry.integrity) : undefined, - size: entry.size, - time: entry.time, - metadata: entry.metadata, - } -} - -function readdirOrEmpty (dir) { - return readdir(dir).catch((err) => { - if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { - return [] - } - - throw err - }) -} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/get.js b/node_modules/node-gyp/node_modules/cacache/lib/get.js deleted file mode 100644 index 80ec206c7ecaa..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/lib/get.js +++ /dev/null @@ -1,170 +0,0 @@ -'use strict' - -const Collect = require('minipass-collect') -const { Minipass } = require('minipass') -const Pipeline = require('minipass-pipeline') - -const index = require('./entry-index') -const memo = require('./memoization') -const read = require('./content/read') - -async function getData (cache, key, opts = {}) { - const { integrity, memoize, size } = opts - const memoized = memo.get(cache, key, opts) - if (memoized && memoize !== false) { - return { - metadata: memoized.entry.metadata, - data: memoized.data, - integrity: memoized.entry.integrity, - size: memoized.entry.size, - } - } - - const entry = await index.find(cache, key, opts) - if (!entry) { - throw new index.NotFoundError(cache, key) - } - const data = await read(cache, entry.integrity, { integrity, size }) - if (memoize) { - memo.put(cache, entry, data, opts) - } - - return { - data, - metadata: entry.metadata, - size: entry.size, - integrity: entry.integrity, - } -} -module.exports = getData - -async function getDataByDigest (cache, key, opts = {}) { - const { integrity, memoize, size } = opts - const memoized = memo.get.byDigest(cache, key, opts) - if (memoized && memoize !== false) { - return memoized - } - - const res = await read(cache, key, { integrity, size }) - if (memoize) { - memo.put.byDigest(cache, key, res, opts) - } - return res -} -module.exports.byDigest = getDataByDigest - -const getMemoizedStream = (memoized) => { - const stream = new Minipass() - stream.on('newListener', function (ev, cb) { - ev === 'metadata' && cb(memoized.entry.metadata) - ev === 'integrity' && cb(memoized.entry.integrity) - ev === 'size' && cb(memoized.entry.size) - }) - stream.end(memoized.data) - return stream -} - -function getStream (cache, key, opts = {}) { - const { memoize, size } = opts - const memoized = memo.get(cache, key, opts) - if (memoized && memoize !== false) { - return getMemoizedStream(memoized) - } - - const stream = new Pipeline() - // Set all this up to run on the stream and then just return the stream - Promise.resolve().then(async () => { - const entry = await index.find(cache, key) - if (!entry) { - throw new index.NotFoundError(cache, key) - } - - stream.emit('metadata', entry.metadata) - stream.emit('integrity', entry.integrity) - stream.emit('size', entry.size) - stream.on('newListener', function (ev, cb) { - ev === 'metadata' && cb(entry.metadata) - ev === 'integrity' && cb(entry.integrity) - ev === 'size' && cb(entry.size) - }) - - const src = read.readStream( - cache, - entry.integrity, - { ...opts, size: typeof size !== 'number' ? entry.size : size } - ) - - if (memoize) { - const memoStream = new Collect.PassThrough() - memoStream.on('collect', data => memo.put(cache, entry, data, opts)) - stream.unshift(memoStream) - } - stream.unshift(src) - return stream - }).catch((err) => stream.emit('error', err)) - - return stream -} - -module.exports.stream = getStream - -function getStreamDigest (cache, integrity, opts = {}) { - const { memoize } = opts - const memoized = memo.get.byDigest(cache, integrity, opts) - if (memoized && memoize !== false) { - const stream = new Minipass() - stream.end(memoized) - return stream - } else { - const stream = read.readStream(cache, integrity, opts) - if (!memoize) { - return stream - } - - const memoStream = new Collect.PassThrough() - memoStream.on('collect', data => memo.put.byDigest( - cache, - integrity, - data, - opts - )) - return new Pipeline(stream, memoStream) - } -} - -module.exports.stream.byDigest = getStreamDigest - -function info (cache, key, opts = {}) { - const { memoize } = opts - const memoized = memo.get(cache, key, opts) - if (memoized && memoize !== false) { - return Promise.resolve(memoized.entry) - } else { - return index.find(cache, key) - } -} -module.exports.info = info - -async function copy (cache, key, dest, opts = {}) { - const entry = await index.find(cache, key, opts) - if (!entry) { - throw new index.NotFoundError(cache, key) - } - await read.copy(cache, entry.integrity, dest, opts) - return { - metadata: entry.metadata, - size: entry.size, - integrity: entry.integrity, - } -} - -module.exports.copy = copy - -async function copyByDigest (cache, key, dest, opts = {}) { - await read.copy(cache, key, dest, opts) - return key -} - -module.exports.copy.byDigest = copyByDigest - -module.exports.hasContent = read.hasContent diff --git a/node_modules/node-gyp/node_modules/cacache/lib/index.js b/node_modules/node-gyp/node_modules/cacache/lib/index.js deleted file mode 100644 index c9b0da5f3a271..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/lib/index.js +++ /dev/null @@ -1,42 +0,0 @@ -'use strict' - -const get = require('./get.js') -const put = require('./put.js') -const rm = require('./rm.js') -const verify = require('./verify.js') -const { clearMemoized } = require('./memoization.js') -const tmp = require('./util/tmp.js') -const index = require('./entry-index.js') - -module.exports.index = {} -module.exports.index.compact = index.compact -module.exports.index.insert = index.insert - -module.exports.ls = index.ls -module.exports.ls.stream = index.lsStream - -module.exports.get = get -module.exports.get.byDigest = get.byDigest -module.exports.get.stream = get.stream -module.exports.get.stream.byDigest = get.stream.byDigest -module.exports.get.copy = get.copy -module.exports.get.copy.byDigest = get.copy.byDigest -module.exports.get.info = get.info -module.exports.get.hasContent = get.hasContent - -module.exports.put = put -module.exports.put.stream = put.stream - -module.exports.rm = rm.entry -module.exports.rm.all = rm.all -module.exports.rm.entry = module.exports.rm -module.exports.rm.content = rm.content - -module.exports.clearMemoized = clearMemoized - -module.exports.tmp = {} -module.exports.tmp.mkdir = tmp.mkdir -module.exports.tmp.withTmp = tmp.withTmp - -module.exports.verify = verify -module.exports.verify.lastRun = verify.lastRun diff --git a/node_modules/node-gyp/node_modules/cacache/lib/memoization.js b/node_modules/node-gyp/node_modules/cacache/lib/memoization.js deleted file mode 100644 index 2ecc60912e456..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/lib/memoization.js +++ /dev/null @@ -1,72 +0,0 @@ -'use strict' - -const { LRUCache } = require('lru-cache') - -const MEMOIZED = new LRUCache({ - max: 500, - maxSize: 50 * 1024 * 1024, // 50MB - ttl: 3 * 60 * 1000, // 3 minutes - sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length, -}) - -module.exports.clearMemoized = clearMemoized - -function clearMemoized () { - const old = {} - MEMOIZED.forEach((v, k) => { - old[k] = v - }) - MEMOIZED.clear() - return old -} - -module.exports.put = put - -function put (cache, entry, data, opts) { - pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data }) - putDigest(cache, entry.integrity, data, opts) -} - -module.exports.put.byDigest = putDigest - -function putDigest (cache, integrity, data, opts) { - pickMem(opts).set(`digest:${cache}:${integrity}`, data) -} - -module.exports.get = get - -function get (cache, key, opts) { - return pickMem(opts).get(`key:${cache}:${key}`) -} - -module.exports.get.byDigest = getDigest - -function getDigest (cache, integrity, opts) { - return pickMem(opts).get(`digest:${cache}:${integrity}`) -} - -class ObjProxy { - constructor (obj) { - this.obj = obj - } - - get (key) { - return this.obj[key] - } - - set (key, val) { - this.obj[key] = val - } -} - -function pickMem (opts) { - if (!opts || !opts.memoize) { - return MEMOIZED - } else if (opts.memoize.get && opts.memoize.set) { - return opts.memoize - } else if (typeof opts.memoize === 'object') { - return new ObjProxy(opts.memoize) - } else { - return MEMOIZED - } -} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/put.js b/node_modules/node-gyp/node_modules/cacache/lib/put.js deleted file mode 100644 index 9fc932d5f6dec..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/lib/put.js +++ /dev/null @@ -1,80 +0,0 @@ -'use strict' - -const index = require('./entry-index') -const memo = require('./memoization') -const write = require('./content/write') -const Flush = require('minipass-flush') -const { PassThrough } = require('minipass-collect') -const Pipeline = require('minipass-pipeline') - -const putOpts = (opts) => ({ - algorithms: ['sha512'], - ...opts, -}) - -module.exports = putData - -async function putData (cache, key, data, opts = {}) { - const { memoize } = opts - opts = putOpts(opts) - const res = await write(cache, data, opts) - const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size }) - if (memoize) { - memo.put(cache, entry, data, opts) - } - - return res.integrity -} - -module.exports.stream = putStream - -function putStream (cache, key, opts = {}) { - const { memoize } = opts - opts = putOpts(opts) - let integrity - let size - let error - - let memoData - const pipeline = new Pipeline() - // first item in the pipeline is the memoizer, because we need - // that to end first and get the collected data. - if (memoize) { - const memoizer = new PassThrough().on('collect', data => { - memoData = data - }) - pipeline.push(memoizer) - } - - // contentStream is a write-only, not a passthrough - // no data comes out of it. - const contentStream = write.stream(cache, opts) - .on('integrity', (int) => { - integrity = int - }) - .on('size', (s) => { - size = s - }) - .on('error', (err) => { - error = err - }) - - pipeline.push(contentStream) - - // last but not least, we write the index and emit hash and size, - // and memoize if we're doing that - pipeline.push(new Flush({ - async flush () { - if (!error) { - const entry = await index.insert(cache, key, integrity, { ...opts, size }) - if (memoize && memoData) { - memo.put(cache, entry, memoData, opts) - } - pipeline.emit('integrity', integrity) - pipeline.emit('size', size) - } - }, - })) - - return pipeline -} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/rm.js b/node_modules/node-gyp/node_modules/cacache/lib/rm.js deleted file mode 100644 index a94760c7cf243..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/lib/rm.js +++ /dev/null @@ -1,31 +0,0 @@ -'use strict' - -const { rm } = require('fs/promises') -const glob = require('./util/glob.js') -const index = require('./entry-index') -const memo = require('./memoization') -const path = require('path') -const rmContent = require('./content/rm') - -module.exports = entry -module.exports.entry = entry - -function entry (cache, key, opts) { - memo.clearMemoized() - return index.delete(cache, key, opts) -} - -module.exports.content = content - -function content (cache, integrity) { - memo.clearMemoized() - return rmContent(cache, integrity) -} - -module.exports.all = all - -async function all (cache) { - memo.clearMemoized() - const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true }) - return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true }))) -} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/util/glob.js b/node_modules/node-gyp/node_modules/cacache/lib/util/glob.js deleted file mode 100644 index 8500c1c16a429..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/lib/util/glob.js +++ /dev/null @@ -1,7 +0,0 @@ -'use strict' - -const { glob } = require('glob') -const path = require('path') - -const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep) -module.exports = (path, options) => glob(globify(path), options) diff --git a/node_modules/node-gyp/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/node-gyp/node_modules/cacache/lib/util/hash-to-segments.js deleted file mode 100644 index 445599b503808..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/lib/util/hash-to-segments.js +++ /dev/null @@ -1,7 +0,0 @@ -'use strict' - -module.exports = hashToSegments - -function hashToSegments (hash) { - return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)] -} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/util/tmp.js b/node_modules/node-gyp/node_modules/cacache/lib/util/tmp.js deleted file mode 100644 index 0bf5302136ebe..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/lib/util/tmp.js +++ /dev/null @@ -1,26 +0,0 @@ -'use strict' - -const { withTempDir } = require('@npmcli/fs') -const fs = require('fs/promises') -const path = require('path') - -module.exports.mkdir = mktmpdir - -async function mktmpdir (cache, opts = {}) { - const { tmpPrefix } = opts - const tmpDir = path.join(cache, 'tmp') - await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' }) - // do not use path.join(), it drops the trailing / if tmpPrefix is unset - const target = `${tmpDir}${path.sep}${tmpPrefix || ''}` - return fs.mkdtemp(target, { owner: 'inherit' }) -} - -module.exports.withTmp = withTmp - -function withTmp (cache, opts, cb) { - if (!cb) { - cb = opts - opts = {} - } - return withTempDir(path.join(cache, 'tmp'), cb, opts) -} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/verify.js b/node_modules/node-gyp/node_modules/cacache/lib/verify.js deleted file mode 100644 index dcff3aa73f317..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/lib/verify.js +++ /dev/null @@ -1,258 +0,0 @@ -'use strict' - -const { - mkdir, - readFile, - rm, - stat, - truncate, - writeFile, -} = require('fs/promises') -const contentPath = require('./content/path') -const fsm = require('fs-minipass') -const glob = require('./util/glob.js') -const index = require('./entry-index') -const path = require('path') -const ssri = require('ssri') - -const hasOwnProperty = (obj, key) => - Object.prototype.hasOwnProperty.call(obj, key) - -const verifyOpts = (opts) => ({ - concurrency: 20, - log: { silly () {} }, - ...opts, -}) - -module.exports = verify - -async function verify (cache, opts) { - opts = verifyOpts(opts) - opts.log.silly('verify', 'verifying cache at', cache) - - const steps = [ - markStartTime, - fixPerms, - garbageCollect, - rebuildIndex, - cleanTmp, - writeVerifile, - markEndTime, - ] - - const stats = {} - for (const step of steps) { - const label = step.name - const start = new Date() - const s = await step(cache, opts) - if (s) { - Object.keys(s).forEach((k) => { - stats[k] = s[k] - }) - } - const end = new Date() - if (!stats.runTime) { - stats.runTime = {} - } - stats.runTime[label] = end - start - } - stats.runTime.total = stats.endTime - stats.startTime - opts.log.silly( - 'verify', - 'verification finished for', - cache, - 'in', - `${stats.runTime.total}ms` - ) - return stats -} - -async function markStartTime () { - return { startTime: new Date() } -} - -async function markEndTime () { - return { endTime: new Date() } -} - -async function fixPerms (cache, opts) { - opts.log.silly('verify', 'fixing cache permissions') - await mkdir(cache, { recursive: true }) - return null -} - -// Implements a naive mark-and-sweep tracing garbage collector. -// -// The algorithm is basically as follows: -// 1. Read (and filter) all index entries ("pointers") -// 2. Mark each integrity value as "live" -// 3. Read entire filesystem tree in `content-vX/` dir -// 4. If content is live, verify its checksum and delete it if it fails -// 5. If content is not marked as live, rm it. -// -async function garbageCollect (cache, opts) { - opts.log.silly('verify', 'garbage collecting content') - const { default: pMap } = await import('p-map') - const indexStream = index.lsStream(cache) - const liveContent = new Set() - indexStream.on('data', (entry) => { - if (opts.filter && !opts.filter(entry)) { - return - } - - // integrity is stringified, re-parse it so we can get each hash - const integrity = ssri.parse(entry.integrity) - for (const algo in integrity) { - liveContent.add(integrity[algo].toString()) - } - }) - await new Promise((resolve, reject) => { - indexStream.on('end', resolve).on('error', reject) - }) - const contentDir = contentPath.contentDir(cache) - const files = await glob(path.join(contentDir, '**'), { - follow: false, - nodir: true, - nosort: true, - }) - const stats = { - verifiedContent: 0, - reclaimedCount: 0, - reclaimedSize: 0, - badContentCount: 0, - keptSize: 0, - } - await pMap( - files, - async (f) => { - const split = f.split(/[/\\]/) - const digest = split.slice(split.length - 3).join('') - const algo = split[split.length - 4] - const integrity = ssri.fromHex(digest, algo) - if (liveContent.has(integrity.toString())) { - const info = await verifyContent(f, integrity) - if (!info.valid) { - stats.reclaimedCount++ - stats.badContentCount++ - stats.reclaimedSize += info.size - } else { - stats.verifiedContent++ - stats.keptSize += info.size - } - } else { - // No entries refer to this content. We can delete. - stats.reclaimedCount++ - const s = await stat(f) - await rm(f, { recursive: true, force: true }) - stats.reclaimedSize += s.size - } - return stats - }, - { concurrency: opts.concurrency } - ) - return stats -} - -async function verifyContent (filepath, sri) { - const contentInfo = {} - try { - const { size } = await stat(filepath) - contentInfo.size = size - contentInfo.valid = true - await ssri.checkStream(new fsm.ReadStream(filepath), sri) - } catch (err) { - if (err.code === 'ENOENT') { - return { size: 0, valid: false } - } - if (err.code !== 'EINTEGRITY') { - throw err - } - - await rm(filepath, { recursive: true, force: true }) - contentInfo.valid = false - } - return contentInfo -} - -async function rebuildIndex (cache, opts) { - opts.log.silly('verify', 'rebuilding index') - const { default: pMap } = await import('p-map') - const entries = await index.ls(cache) - const stats = { - missingContent: 0, - rejectedEntries: 0, - totalEntries: 0, - } - const buckets = {} - for (const k in entries) { - /* istanbul ignore else */ - if (hasOwnProperty(entries, k)) { - const hashed = index.hashKey(k) - const entry = entries[k] - const excluded = opts.filter && !opts.filter(entry) - excluded && stats.rejectedEntries++ - if (buckets[hashed] && !excluded) { - buckets[hashed].push(entry) - } else if (buckets[hashed] && excluded) { - // skip - } else if (excluded) { - buckets[hashed] = [] - buckets[hashed]._path = index.bucketPath(cache, k) - } else { - buckets[hashed] = [entry] - buckets[hashed]._path = index.bucketPath(cache, k) - } - } - } - await pMap( - Object.keys(buckets), - (key) => { - return rebuildBucket(cache, buckets[key], stats, opts) - }, - { concurrency: opts.concurrency } - ) - return stats -} - -async function rebuildBucket (cache, bucket, stats) { - await truncate(bucket._path) - // This needs to be serialized because cacache explicitly - // lets very racy bucket conflicts clobber each other. - for (const entry of bucket) { - const content = contentPath(cache, entry.integrity) - try { - await stat(content) - await index.insert(cache, entry.key, entry.integrity, { - metadata: entry.metadata, - size: entry.size, - time: entry.time, - }) - stats.totalEntries++ - } catch (err) { - if (err.code === 'ENOENT') { - stats.rejectedEntries++ - stats.missingContent++ - } else { - throw err - } - } - } -} - -function cleanTmp (cache, opts) { - opts.log.silly('verify', 'cleaning tmp directory') - return rm(path.join(cache, 'tmp'), { recursive: true, force: true }) -} - -async function writeVerifile (cache, opts) { - const verifile = path.join(cache, '_lastverified') - opts.log.silly('verify', 'writing verifile to ' + verifile) - return writeFile(verifile, `${Date.now()}`) -} - -module.exports.lastRun = lastRun - -async function lastRun (cache) { - const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' }) - return new Date(+data) -} diff --git a/node_modules/node-gyp/node_modules/cacache/package.json b/node_modules/node-gyp/node_modules/cacache/package.json deleted file mode 100644 index ebb0f3f8ed410..0000000000000 --- a/node_modules/node-gyp/node_modules/cacache/package.json +++ /dev/null @@ -1,83 +0,0 @@ -{ - "name": "cacache", - "version": "19.0.1", - "cache-version": { - "content": "2", - "index": "5" - }, - "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.", - "main": "lib/index.js", - "files": [ - "bin/", - "lib/" - ], - "scripts": { - "test": "tap", - "snap": "tap", - "coverage": "tap", - "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test", - "lint": "npm run eslint", - "npmclilint": "npmcli-lint", - "lintfix": "npm run eslint -- --fix", - "postsnap": "npm run lintfix --", - "postlint": "template-oss-check", - "posttest": "npm run lint", - "template-oss-apply": "template-oss-apply --force", - "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/npm/cacache.git" - }, - "keywords": [ - "cache", - "caching", - "content-addressable", - "sri", - "sri hash", - "subresource integrity", - "cache", - "storage", - "store", - "file store", - "filesystem", - "disk cache", - "disk storage" - ], - "license": "ISC", - "dependencies": { - "@npmcli/fs": "^4.0.0", - "fs-minipass": "^3.0.0", - "glob": "^10.2.2", - "lru-cache": "^10.0.1", - "minipass": "^7.0.3", - "minipass-collect": "^2.0.1", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "p-map": "^7.0.2", - "ssri": "^12.0.0", - "tar": "^7.4.3", - "unique-filename": "^4.0.0" - }, - "devDependencies": { - "@npmcli/eslint-config": "^5.0.0", - "@npmcli/template-oss": "4.23.3", - "tap": "^16.0.0" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "windowsCI": false, - "version": "4.23.3", - "publish": "true" - }, - "author": "GitHub Inc.", - "tap": { - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] - } -} diff --git a/node_modules/node-gyp/node_modules/glob/LICENSE b/node_modules/node-gyp/node_modules/glob/LICENSE deleted file mode 100644 index ec7df93329abf..0000000000000 --- a/node_modules/node-gyp/node_modules/glob/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) 2009-2023 Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/node-gyp/node_modules/glob/dist/commonjs/glob.js b/node_modules/node-gyp/node_modules/glob/dist/commonjs/glob.js deleted file mode 100644 index e1339bbbcf57f..0000000000000 --- a/node_modules/node-gyp/node_modules/glob/dist/commonjs/glob.js +++ /dev/null @@ -1,247 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Glob = void 0; -const minimatch_1 = require("minimatch"); -const node_url_1 = require("node:url"); -const path_scurry_1 = require("path-scurry"); -const pattern_js_1 = require("./pattern.js"); -const walker_js_1 = require("./walker.js"); -// if no process global, just call it linux. -// so we default to case-sensitive, / separators -const defaultPlatform = (typeof process === 'object' && - process && - typeof process.platform === 'string') ? - process.platform - : 'linux'; -/** - * An object that can perform glob pattern traversals. - */ -class Glob { - absolute; - cwd; - root; - dot; - dotRelative; - follow; - ignore; - magicalBraces; - mark; - matchBase; - maxDepth; - nobrace; - nocase; - nodir; - noext; - noglobstar; - pattern; - platform; - realpath; - scurry; - stat; - signal; - windowsPathsNoEscape; - withFileTypes; - includeChildMatches; - /** - * The options provided to the constructor. - */ - opts; - /** - * An array of parsed immutable {@link Pattern} objects. - */ - patterns; - /** - * All options are stored as properties on the `Glob` object. - * - * See {@link GlobOptions} for full options descriptions. - * - * Note that a previous `Glob` object can be passed as the - * `GlobOptions` to another `Glob` instantiation to re-use settings - * and caches with a new pattern. - * - * Traversal functions can be called multiple times to run the walk - * again. - */ - constructor(pattern, opts) { - /* c8 ignore start */ - if (!opts) - throw new TypeError('glob options required'); - /* c8 ignore stop */ - this.withFileTypes = !!opts.withFileTypes; - this.signal = opts.signal; - this.follow = !!opts.follow; - this.dot = !!opts.dot; - this.dotRelative = !!opts.dotRelative; - this.nodir = !!opts.nodir; - this.mark = !!opts.mark; - if (!opts.cwd) { - this.cwd = ''; - } - else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) { - opts.cwd = (0, node_url_1.fileURLToPath)(opts.cwd); - } - this.cwd = opts.cwd || ''; - this.root = opts.root; - this.magicalBraces = !!opts.magicalBraces; - this.nobrace = !!opts.nobrace; - this.noext = !!opts.noext; - this.realpath = !!opts.realpath; - this.absolute = opts.absolute; - this.includeChildMatches = opts.includeChildMatches !== false; - this.noglobstar = !!opts.noglobstar; - this.matchBase = !!opts.matchBase; - this.maxDepth = - typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity; - this.stat = !!opts.stat; - this.ignore = opts.ignore; - if (this.withFileTypes && this.absolute !== undefined) { - throw new Error('cannot set absolute and withFileTypes:true'); - } - if (typeof pattern === 'string') { - pattern = [pattern]; - } - this.windowsPathsNoEscape = - !!opts.windowsPathsNoEscape || - opts.allowWindowsEscape === - false; - if (this.windowsPathsNoEscape) { - pattern = pattern.map(p => p.replace(/\\/g, '/')); - } - if (this.matchBase) { - if (opts.noglobstar) { - throw new TypeError('base matching requires globstar'); - } - pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`)); - } - this.pattern = pattern; - this.platform = opts.platform || defaultPlatform; - this.opts = { ...opts, platform: this.platform }; - if (opts.scurry) { - this.scurry = opts.scurry; - if (opts.nocase !== undefined && - opts.nocase !== opts.scurry.nocase) { - throw new Error('nocase option contradicts provided scurry option'); - } - } - else { - const Scurry = opts.platform === 'win32' ? path_scurry_1.PathScurryWin32 - : opts.platform === 'darwin' ? path_scurry_1.PathScurryDarwin - : opts.platform ? path_scurry_1.PathScurryPosix - : path_scurry_1.PathScurry; - this.scurry = new Scurry(this.cwd, { - nocase: opts.nocase, - fs: opts.fs, - }); - } - this.nocase = this.scurry.nocase; - // If you do nocase:true on a case-sensitive file system, then - // we need to use regexps instead of strings for non-magic - // path portions, because statting `aBc` won't return results - // for the file `AbC` for example. - const nocaseMagicOnly = this.platform === 'darwin' || this.platform === 'win32'; - const mmo = { - // default nocase based on platform - ...opts, - dot: this.dot, - matchBase: this.matchBase, - nobrace: this.nobrace, - nocase: this.nocase, - nocaseMagicOnly, - nocomment: true, - noext: this.noext, - nonegate: true, - optimizationLevel: 2, - platform: this.platform, - windowsPathsNoEscape: this.windowsPathsNoEscape, - debug: !!this.opts.debug, - }; - const mms = this.pattern.map(p => new minimatch_1.Minimatch(p, mmo)); - const [matchSet, globParts] = mms.reduce((set, m) => { - set[0].push(...m.set); - set[1].push(...m.globParts); - return set; - }, [[], []]); - this.patterns = matchSet.map((set, i) => { - const g = globParts[i]; - /* c8 ignore start */ - if (!g) - throw new Error('invalid pattern object'); - /* c8 ignore stop */ - return new pattern_js_1.Pattern(set, g, 0, this.platform); - }); - } - async walk() { - // Walkers always return array of Path objects, so we just have to - // coerce them into the right shape. It will have already called - // realpath() if the option was set to do so, so we know that's cached. - // start out knowing the cwd, at least - return [ - ...(await new walker_js_1.GlobWalker(this.patterns, this.scurry.cwd, { - ...this.opts, - maxDepth: this.maxDepth !== Infinity ? - this.maxDepth + this.scurry.cwd.depth() - : Infinity, - platform: this.platform, - nocase: this.nocase, - includeChildMatches: this.includeChildMatches, - }).walk()), - ]; - } - walkSync() { - return [ - ...new walker_js_1.GlobWalker(this.patterns, this.scurry.cwd, { - ...this.opts, - maxDepth: this.maxDepth !== Infinity ? - this.maxDepth + this.scurry.cwd.depth() - : Infinity, - platform: this.platform, - nocase: this.nocase, - includeChildMatches: this.includeChildMatches, - }).walkSync(), - ]; - } - stream() { - return new walker_js_1.GlobStream(this.patterns, this.scurry.cwd, { - ...this.opts, - maxDepth: this.maxDepth !== Infinity ? - this.maxDepth + this.scurry.cwd.depth() - : Infinity, - platform: this.platform, - nocase: this.nocase, - includeChildMatches: this.includeChildMatches, - }).stream(); - } - streamSync() { - return new walker_js_1.GlobStream(this.patterns, this.scurry.cwd, { - ...this.opts, - maxDepth: this.maxDepth !== Infinity ? - this.maxDepth + this.scurry.cwd.depth() - : Infinity, - platform: this.platform, - nocase: this.nocase, - includeChildMatches: this.includeChildMatches, - }).streamSync(); - } - /** - * Default sync iteration function. Returns a Generator that - * iterates over the results. - */ - iterateSync() { - return this.streamSync()[Symbol.iterator](); - } - [Symbol.iterator]() { - return this.iterateSync(); - } - /** - * Default async iteration function. Returns an AsyncGenerator that - * iterates over the results. - */ - iterate() { - return this.stream()[Symbol.asyncIterator](); - } - [Symbol.asyncIterator]() { - return this.iterate(); - } -} -exports.Glob = Glob; -//# sourceMappingURL=glob.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/glob/dist/commonjs/has-magic.js b/node_modules/node-gyp/node_modules/glob/dist/commonjs/has-magic.js deleted file mode 100644 index 0918bd57e0f1c..0000000000000 --- a/node_modules/node-gyp/node_modules/glob/dist/commonjs/has-magic.js +++ /dev/null @@ -1,27 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.hasMagic = void 0; -const minimatch_1 = require("minimatch"); -/** - * Return true if the patterns provided contain any magic glob characters, - * given the options provided. - * - * Brace expansion is not considered "magic" unless the `magicalBraces` option - * is set, as brace expansion just turns one string into an array of strings. - * So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and - * `'xby'` both do not contain any magic glob characters, and it's treated the - * same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true` - * is in the options, brace expansion _is_ treated as a pattern having magic. - */ -const hasMagic = (pattern, options = {}) => { - if (!Array.isArray(pattern)) { - pattern = [pattern]; - } - for (const p of pattern) { - if (new minimatch_1.Minimatch(p, options).hasMagic()) - return true; - } - return false; -}; -exports.hasMagic = hasMagic; -//# sourceMappingURL=has-magic.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/glob/dist/commonjs/ignore.js b/node_modules/node-gyp/node_modules/glob/dist/commonjs/ignore.js deleted file mode 100644 index 5f1fde0680dea..0000000000000 --- a/node_modules/node-gyp/node_modules/glob/dist/commonjs/ignore.js +++ /dev/null @@ -1,119 +0,0 @@ -"use strict"; -// give it a pattern, and it'll be able to tell you if -// a given path should be ignored. -// Ignoring a path ignores its children if the pattern ends in /** -// Ignores are always parsed in dot:true mode -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Ignore = void 0; -const minimatch_1 = require("minimatch"); -const pattern_js_1 = require("./pattern.js"); -const defaultPlatform = (typeof process === 'object' && - process && - typeof process.platform === 'string') ? - process.platform - : 'linux'; -/** - * Class used to process ignored patterns - */ -class Ignore { - relative; - relativeChildren; - absolute; - absoluteChildren; - platform; - mmopts; - constructor(ignored, { nobrace, nocase, noext, noglobstar, platform = defaultPlatform, }) { - this.relative = []; - this.absolute = []; - this.relativeChildren = []; - this.absoluteChildren = []; - this.platform = platform; - this.mmopts = { - dot: true, - nobrace, - nocase, - noext, - noglobstar, - optimizationLevel: 2, - platform, - nocomment: true, - nonegate: true, - }; - for (const ign of ignored) - this.add(ign); - } - add(ign) { - // this is a little weird, but it gives us a clean set of optimized - // minimatch matchers, without getting tripped up if one of them - // ends in /** inside a brace section, and it's only inefficient at - // the start of the walk, not along it. - // It'd be nice if the Pattern class just had a .test() method, but - // handling globstars is a bit of a pita, and that code already lives - // in minimatch anyway. - // Another way would be if maybe Minimatch could take its set/globParts - // as an option, and then we could at least just use Pattern to test - // for absolute-ness. - // Yet another way, Minimatch could take an array of glob strings, and - // a cwd option, and do the right thing. - const mm = new minimatch_1.Minimatch(ign, this.mmopts); - for (let i = 0; i < mm.set.length; i++) { - const parsed = mm.set[i]; - const globParts = mm.globParts[i]; - /* c8 ignore start */ - if (!parsed || !globParts) { - throw new Error('invalid pattern object'); - } - // strip off leading ./ portions - // https://github.com/isaacs/node-glob/issues/570 - while (parsed[0] === '.' && globParts[0] === '.') { - parsed.shift(); - globParts.shift(); - } - /* c8 ignore stop */ - const p = new pattern_js_1.Pattern(parsed, globParts, 0, this.platform); - const m = new minimatch_1.Minimatch(p.globString(), this.mmopts); - const children = globParts[globParts.length - 1] === '**'; - const absolute = p.isAbsolute(); - if (absolute) - this.absolute.push(m); - else - this.relative.push(m); - if (children) { - if (absolute) - this.absoluteChildren.push(m); - else - this.relativeChildren.push(m); - } - } - } - ignored(p) { - const fullpath = p.fullpath(); - const fullpaths = `${fullpath}/`; - const relative = p.relative() || '.'; - const relatives = `${relative}/`; - for (const m of this.relative) { - if (m.match(relative) || m.match(relatives)) - return true; - } - for (const m of this.absolute) { - if (m.match(fullpath) || m.match(fullpaths)) - return true; - } - return false; - } - childrenIgnored(p) { - const fullpath = p.fullpath() + '/'; - const relative = (p.relative() || '.') + '/'; - for (const m of this.relativeChildren) { - if (m.match(relative)) - return true; - } - for (const m of this.absoluteChildren) { - if (m.match(fullpath)) - return true; - } - return false; - } -} -exports.Ignore = Ignore; -//# sourceMappingURL=ignore.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/glob/dist/commonjs/index.js b/node_modules/node-gyp/node_modules/glob/dist/commonjs/index.js deleted file mode 100644 index 151495d170efa..0000000000000 --- a/node_modules/node-gyp/node_modules/glob/dist/commonjs/index.js +++ /dev/null @@ -1,68 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.glob = exports.sync = exports.iterate = exports.iterateSync = exports.stream = exports.streamSync = exports.Ignore = exports.hasMagic = exports.Glob = exports.unescape = exports.escape = void 0; -exports.globStreamSync = globStreamSync; -exports.globStream = globStream; -exports.globSync = globSync; -exports.globIterateSync = globIterateSync; -exports.globIterate = globIterate; -const minimatch_1 = require("minimatch"); -const glob_js_1 = require("./glob.js"); -const has_magic_js_1 = require("./has-magic.js"); -var minimatch_2 = require("minimatch"); -Object.defineProperty(exports, "escape", { enumerable: true, get: function () { return minimatch_2.escape; } }); -Object.defineProperty(exports, "unescape", { enumerable: true, get: function () { return minimatch_2.unescape; } }); -var glob_js_2 = require("./glob.js"); -Object.defineProperty(exports, "Glob", { enumerable: true, get: function () { return glob_js_2.Glob; } }); -var has_magic_js_2 = require("./has-magic.js"); -Object.defineProperty(exports, "hasMagic", { enumerable: true, get: function () { return has_magic_js_2.hasMagic; } }); -var ignore_js_1 = require("./ignore.js"); -Object.defineProperty(exports, "Ignore", { enumerable: true, get: function () { return ignore_js_1.Ignore; } }); -function globStreamSync(pattern, options = {}) { - return new glob_js_1.Glob(pattern, options).streamSync(); -} -function globStream(pattern, options = {}) { - return new glob_js_1.Glob(pattern, options).stream(); -} -function globSync(pattern, options = {}) { - return new glob_js_1.Glob(pattern, options).walkSync(); -} -async function glob_(pattern, options = {}) { - return new glob_js_1.Glob(pattern, options).walk(); -} -function globIterateSync(pattern, options = {}) { - return new glob_js_1.Glob(pattern, options).iterateSync(); -} -function globIterate(pattern, options = {}) { - return new glob_js_1.Glob(pattern, options).iterate(); -} -// aliases: glob.sync.stream() glob.stream.sync() glob.sync() etc -exports.streamSync = globStreamSync; -exports.stream = Object.assign(globStream, { sync: globStreamSync }); -exports.iterateSync = globIterateSync; -exports.iterate = Object.assign(globIterate, { - sync: globIterateSync, -}); -exports.sync = Object.assign(globSync, { - stream: globStreamSync, - iterate: globIterateSync, -}); -exports.glob = Object.assign(glob_, { - glob: glob_, - globSync, - sync: exports.sync, - globStream, - stream: exports.stream, - globStreamSync, - streamSync: exports.streamSync, - globIterate, - iterate: exports.iterate, - globIterateSync, - iterateSync: exports.iterateSync, - Glob: glob_js_1.Glob, - hasMagic: has_magic_js_1.hasMagic, - escape: minimatch_1.escape, - unescape: minimatch_1.unescape, -}); -exports.glob.glob = exports.glob; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/glob/dist/commonjs/package.json b/node_modules/node-gyp/node_modules/glob/dist/commonjs/package.json deleted file mode 100644 index 5bbefffbabee3..0000000000000 --- a/node_modules/node-gyp/node_modules/glob/dist/commonjs/package.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "type": "commonjs" -} diff --git a/node_modules/node-gyp/node_modules/glob/dist/commonjs/pattern.js b/node_modules/node-gyp/node_modules/glob/dist/commonjs/pattern.js deleted file mode 100644 index f0de35fb5bed9..0000000000000 --- a/node_modules/node-gyp/node_modules/glob/dist/commonjs/pattern.js +++ /dev/null @@ -1,219 +0,0 @@ -"use strict"; -// this is just a very light wrapper around 2 arrays with an offset index -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Pattern = void 0; -const minimatch_1 = require("minimatch"); -const isPatternList = (pl) => pl.length >= 1; -const isGlobList = (gl) => gl.length >= 1; -/** - * An immutable-ish view on an array of glob parts and their parsed - * results - */ -class Pattern { - #patternList; - #globList; - #index; - length; - #platform; - #rest; - #globString; - #isDrive; - #isUNC; - #isAbsolute; - #followGlobstar = true; - constructor(patternList, globList, index, platform) { - if (!isPatternList(patternList)) { - throw new TypeError('empty pattern list'); - } - if (!isGlobList(globList)) { - throw new TypeError('empty glob list'); - } - if (globList.length !== patternList.length) { - throw new TypeError('mismatched pattern list and glob list lengths'); - } - this.length = patternList.length; - if (index < 0 || index >= this.length) { - throw new TypeError('index out of range'); - } - this.#patternList = patternList; - this.#globList = globList; - this.#index = index; - this.#platform = platform; - // normalize root entries of absolute patterns on initial creation. - if (this.#index === 0) { - // c: => ['c:/'] - // C:/ => ['C:/'] - // C:/x => ['C:/', 'x'] - // //host/share => ['//host/share/'] - // //host/share/ => ['//host/share/'] - // //host/share/x => ['//host/share/', 'x'] - // /etc => ['/', 'etc'] - // / => ['/'] - if (this.isUNC()) { - // '' / '' / 'host' / 'share' - const [p0, p1, p2, p3, ...prest] = this.#patternList; - const [g0, g1, g2, g3, ...grest] = this.#globList; - if (prest[0] === '') { - // ends in / - prest.shift(); - grest.shift(); - } - const p = [p0, p1, p2, p3, ''].join('/'); - const g = [g0, g1, g2, g3, ''].join('/'); - this.#patternList = [p, ...prest]; - this.#globList = [g, ...grest]; - this.length = this.#patternList.length; - } - else if (this.isDrive() || this.isAbsolute()) { - const [p1, ...prest] = this.#patternList; - const [g1, ...grest] = this.#globList; - if (prest[0] === '') { - // ends in / - prest.shift(); - grest.shift(); - } - const p = p1 + '/'; - const g = g1 + '/'; - this.#patternList = [p, ...prest]; - this.#globList = [g, ...grest]; - this.length = this.#patternList.length; - } - } - } - /** - * The first entry in the parsed list of patterns - */ - pattern() { - return this.#patternList[this.#index]; - } - /** - * true of if pattern() returns a string - */ - isString() { - return typeof this.#patternList[this.#index] === 'string'; - } - /** - * true of if pattern() returns GLOBSTAR - */ - isGlobstar() { - return this.#patternList[this.#index] === minimatch_1.GLOBSTAR; - } - /** - * true if pattern() returns a regexp - */ - isRegExp() { - return this.#patternList[this.#index] instanceof RegExp; - } - /** - * The /-joined set of glob parts that make up this pattern - */ - globString() { - return (this.#globString = - this.#globString || - (this.#index === 0 ? - this.isAbsolute() ? - this.#globList[0] + this.#globList.slice(1).join('/') - : this.#globList.join('/') - : this.#globList.slice(this.#index).join('/'))); - } - /** - * true if there are more pattern parts after this one - */ - hasMore() { - return this.length > this.#index + 1; - } - /** - * The rest of the pattern after this part, or null if this is the end - */ - rest() { - if (this.#rest !== undefined) - return this.#rest; - if (!this.hasMore()) - return (this.#rest = null); - this.#rest = new Pattern(this.#patternList, this.#globList, this.#index + 1, this.#platform); - this.#rest.#isAbsolute = this.#isAbsolute; - this.#rest.#isUNC = this.#isUNC; - this.#rest.#isDrive = this.#isDrive; - return this.#rest; - } - /** - * true if the pattern represents a //unc/path/ on windows - */ - isUNC() { - const pl = this.#patternList; - return this.#isUNC !== undefined ? - this.#isUNC - : (this.#isUNC = - this.#platform === 'win32' && - this.#index === 0 && - pl[0] === '' && - pl[1] === '' && - typeof pl[2] === 'string' && - !!pl[2] && - typeof pl[3] === 'string' && - !!pl[3]); - } - // pattern like C:/... - // split = ['C:', ...] - // XXX: would be nice to handle patterns like `c:*` to test the cwd - // in c: for *, but I don't know of a way to even figure out what that - // cwd is without actually chdir'ing into it? - /** - * True if the pattern starts with a drive letter on Windows - */ - isDrive() { - const pl = this.#patternList; - return this.#isDrive !== undefined ? - this.#isDrive - : (this.#isDrive = - this.#platform === 'win32' && - this.#index === 0 && - this.length > 1 && - typeof pl[0] === 'string' && - /^[a-z]:$/i.test(pl[0])); - } - // pattern = '/' or '/...' or '/x/...' - // split = ['', ''] or ['', ...] or ['', 'x', ...] - // Drive and UNC both considered absolute on windows - /** - * True if the pattern is rooted on an absolute path - */ - isAbsolute() { - const pl = this.#patternList; - return this.#isAbsolute !== undefined ? - this.#isAbsolute - : (this.#isAbsolute = - (pl[0] === '' && pl.length > 1) || - this.isDrive() || - this.isUNC()); - } - /** - * consume the root of the pattern, and return it - */ - root() { - const p = this.#patternList[0]; - return (typeof p === 'string' && this.isAbsolute() && this.#index === 0) ? - p - : ''; - } - /** - * Check to see if the current globstar pattern is allowed to follow - * a symbolic link. - */ - checkFollowGlobstar() { - return !(this.#index === 0 || - !this.isGlobstar() || - !this.#followGlobstar); - } - /** - * Mark that the current globstar pattern is following a symbolic link - */ - markFollowGlobstar() { - if (this.#index === 0 || !this.isGlobstar() || !this.#followGlobstar) - return false; - this.#followGlobstar = false; - return true; - } -} -exports.Pattern = Pattern; -//# sourceMappingURL=pattern.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/glob/dist/commonjs/processor.js b/node_modules/node-gyp/node_modules/glob/dist/commonjs/processor.js deleted file mode 100644 index ee3bb4397e0b2..0000000000000 --- a/node_modules/node-gyp/node_modules/glob/dist/commonjs/processor.js +++ /dev/null @@ -1,301 +0,0 @@ -"use strict"; -// synchronous utility for filtering entries and calculating subwalks -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Processor = exports.SubWalks = exports.MatchRecord = exports.HasWalkedCache = void 0; -const minimatch_1 = require("minimatch"); -/** - * A cache of which patterns have been processed for a given Path - */ -class HasWalkedCache { - store; - constructor(store = new Map()) { - this.store = store; - } - copy() { - return new HasWalkedCache(new Map(this.store)); - } - hasWalked(target, pattern) { - return this.store.get(target.fullpath())?.has(pattern.globString()); - } - storeWalked(target, pattern) { - const fullpath = target.fullpath(); - const cached = this.store.get(fullpath); - if (cached) - cached.add(pattern.globString()); - else - this.store.set(fullpath, new Set([pattern.globString()])); - } -} -exports.HasWalkedCache = HasWalkedCache; -/** - * A record of which paths have been matched in a given walk step, - * and whether they only are considered a match if they are a directory, - * and whether their absolute or relative path should be returned. - */ -class MatchRecord { - store = new Map(); - add(target, absolute, ifDir) { - const n = (absolute ? 2 : 0) | (ifDir ? 1 : 0); - const current = this.store.get(target); - this.store.set(target, current === undefined ? n : n & current); - } - // match, absolute, ifdir - entries() { - return [...this.store.entries()].map(([path, n]) => [ - path, - !!(n & 2), - !!(n & 1), - ]); - } -} -exports.MatchRecord = MatchRecord; -/** - * A collection of patterns that must be processed in a subsequent step - * for a given path. - */ -class SubWalks { - store = new Map(); - add(target, pattern) { - if (!target.canReaddir()) { - return; - } - const subs = this.store.get(target); - if (subs) { - if (!subs.find(p => p.globString() === pattern.globString())) { - subs.push(pattern); - } - } - else - this.store.set(target, [pattern]); - } - get(target) { - const subs = this.store.get(target); - /* c8 ignore start */ - if (!subs) { - throw new Error('attempting to walk unknown path'); - } - /* c8 ignore stop */ - return subs; - } - entries() { - return this.keys().map(k => [k, this.store.get(k)]); - } - keys() { - return [...this.store.keys()].filter(t => t.canReaddir()); - } -} -exports.SubWalks = SubWalks; -/** - * The class that processes patterns for a given path. - * - * Handles child entry filtering, and determining whether a path's - * directory contents must be read. - */ -class Processor { - hasWalkedCache; - matches = new MatchRecord(); - subwalks = new SubWalks(); - patterns; - follow; - dot; - opts; - constructor(opts, hasWalkedCache) { - this.opts = opts; - this.follow = !!opts.follow; - this.dot = !!opts.dot; - this.hasWalkedCache = - hasWalkedCache ? hasWalkedCache.copy() : new HasWalkedCache(); - } - processPatterns(target, patterns) { - this.patterns = patterns; - const processingSet = patterns.map(p => [target, p]); - // map of paths to the magic-starting subwalks they need to walk - // first item in patterns is the filter - for (let [t, pattern] of processingSet) { - this.hasWalkedCache.storeWalked(t, pattern); - const root = pattern.root(); - const absolute = pattern.isAbsolute() && this.opts.absolute !== false; - // start absolute patterns at root - if (root) { - t = t.resolve(root === '/' && this.opts.root !== undefined ? - this.opts.root - : root); - const rest = pattern.rest(); - if (!rest) { - this.matches.add(t, true, false); - continue; - } - else { - pattern = rest; - } - } - if (t.isENOENT()) - continue; - let p; - let rest; - let changed = false; - while (typeof (p = pattern.pattern()) === 'string' && - (rest = pattern.rest())) { - const c = t.resolve(p); - t = c; - pattern = rest; - changed = true; - } - p = pattern.pattern(); - rest = pattern.rest(); - if (changed) { - if (this.hasWalkedCache.hasWalked(t, pattern)) - continue; - this.hasWalkedCache.storeWalked(t, pattern); - } - // now we have either a final string for a known entry, - // more strings for an unknown entry, - // or a pattern starting with magic, mounted on t. - if (typeof p === 'string') { - // must not be final entry, otherwise we would have - // concatenated it earlier. - const ifDir = p === '..' || p === '' || p === '.'; - this.matches.add(t.resolve(p), absolute, ifDir); - continue; - } - else if (p === minimatch_1.GLOBSTAR) { - // if no rest, match and subwalk pattern - // if rest, process rest and subwalk pattern - // if it's a symlink, but we didn't get here by way of a - // globstar match (meaning it's the first time THIS globstar - // has traversed a symlink), then we follow it. Otherwise, stop. - if (!t.isSymbolicLink() || - this.follow || - pattern.checkFollowGlobstar()) { - this.subwalks.add(t, pattern); - } - const rp = rest?.pattern(); - const rrest = rest?.rest(); - if (!rest || ((rp === '' || rp === '.') && !rrest)) { - // only HAS to be a dir if it ends in **/ or **/. - // but ending in ** will match files as well. - this.matches.add(t, absolute, rp === '' || rp === '.'); - } - else { - if (rp === '..') { - // this would mean you're matching **/.. at the fs root, - // and no thanks, I'm not gonna test that specific case. - /* c8 ignore start */ - const tp = t.parent || t; - /* c8 ignore stop */ - if (!rrest) - this.matches.add(tp, absolute, true); - else if (!this.hasWalkedCache.hasWalked(tp, rrest)) { - this.subwalks.add(tp, rrest); - } - } - } - } - else if (p instanceof RegExp) { - this.subwalks.add(t, pattern); - } - } - return this; - } - subwalkTargets() { - return this.subwalks.keys(); - } - child() { - return new Processor(this.opts, this.hasWalkedCache); - } - // return a new Processor containing the subwalks for each - // child entry, and a set of matches, and - // a hasWalkedCache that's a copy of this one - // then we're going to call - filterEntries(parent, entries) { - const patterns = this.subwalks.get(parent); - // put matches and entry walks into the results processor - const results = this.child(); - for (const e of entries) { - for (const pattern of patterns) { - const absolute = pattern.isAbsolute(); - const p = pattern.pattern(); - const rest = pattern.rest(); - if (p === minimatch_1.GLOBSTAR) { - results.testGlobstar(e, pattern, rest, absolute); - } - else if (p instanceof RegExp) { - results.testRegExp(e, p, rest, absolute); - } - else { - results.testString(e, p, rest, absolute); - } - } - } - return results; - } - testGlobstar(e, pattern, rest, absolute) { - if (this.dot || !e.name.startsWith('.')) { - if (!pattern.hasMore()) { - this.matches.add(e, absolute, false); - } - if (e.canReaddir()) { - // if we're in follow mode or it's not a symlink, just keep - // testing the same pattern. If there's more after the globstar, - // then this symlink consumes the globstar. If not, then we can - // follow at most ONE symlink along the way, so we mark it, which - // also checks to ensure that it wasn't already marked. - if (this.follow || !e.isSymbolicLink()) { - this.subwalks.add(e, pattern); - } - else if (e.isSymbolicLink()) { - if (rest && pattern.checkFollowGlobstar()) { - this.subwalks.add(e, rest); - } - else if (pattern.markFollowGlobstar()) { - this.subwalks.add(e, pattern); - } - } - } - } - // if the NEXT thing matches this entry, then also add - // the rest. - if (rest) { - const rp = rest.pattern(); - if (typeof rp === 'string' && - // dots and empty were handled already - rp !== '..' && - rp !== '' && - rp !== '.') { - this.testString(e, rp, rest.rest(), absolute); - } - else if (rp === '..') { - /* c8 ignore start */ - const ep = e.parent || e; - /* c8 ignore stop */ - this.subwalks.add(ep, rest); - } - else if (rp instanceof RegExp) { - this.testRegExp(e, rp, rest.rest(), absolute); - } - } - } - testRegExp(e, p, rest, absolute) { - if (!p.test(e.name)) - return; - if (!rest) { - this.matches.add(e, absolute, false); - } - else { - this.subwalks.add(e, rest); - } - } - testString(e, p, rest, absolute) { - // should never happen? - if (!e.isNamed(p)) - return; - if (!rest) { - this.matches.add(e, absolute, false); - } - else { - this.subwalks.add(e, rest); - } - } -} -exports.Processor = Processor; -//# sourceMappingURL=processor.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/glob/dist/commonjs/walker.js b/node_modules/node-gyp/node_modules/glob/dist/commonjs/walker.js deleted file mode 100644 index cb15946d9a852..0000000000000 --- a/node_modules/node-gyp/node_modules/glob/dist/commonjs/walker.js +++ /dev/null @@ -1,387 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.GlobStream = exports.GlobWalker = exports.GlobUtil = void 0; -/** - * Single-use utility classes to provide functionality to the {@link Glob} - * methods. - * - * @module - */ -const minipass_1 = require("minipass"); -const ignore_js_1 = require("./ignore.js"); -const processor_js_1 = require("./processor.js"); -const makeIgnore = (ignore, opts) => typeof ignore === 'string' ? new ignore_js_1.Ignore([ignore], opts) - : Array.isArray(ignore) ? new ignore_js_1.Ignore(ignore, opts) - : ignore; -/** - * basic walking utilities that all the glob walker types use - */ -class GlobUtil { - path; - patterns; - opts; - seen = new Set(); - paused = false; - aborted = false; - #onResume = []; - #ignore; - #sep; - signal; - maxDepth; - includeChildMatches; - constructor(patterns, path, opts) { - this.patterns = patterns; - this.path = path; - this.opts = opts; - this.#sep = !opts.posix && opts.platform === 'win32' ? '\\' : '/'; - this.includeChildMatches = opts.includeChildMatches !== false; - if (opts.ignore || !this.includeChildMatches) { - this.#ignore = makeIgnore(opts.ignore ?? [], opts); - if (!this.includeChildMatches && - typeof this.#ignore.add !== 'function') { - const m = 'cannot ignore child matches, ignore lacks add() method.'; - throw new Error(m); - } - } - // ignore, always set with maxDepth, but it's optional on the - // GlobOptions type - /* c8 ignore start */ - this.maxDepth = opts.maxDepth || Infinity; - /* c8 ignore stop */ - if (opts.signal) { - this.signal = opts.signal; - this.signal.addEventListener('abort', () => { - this.#onResume.length = 0; - }); - } - } - #ignored(path) { - return this.seen.has(path) || !!this.#ignore?.ignored?.(path); - } - #childrenIgnored(path) { - return !!this.#ignore?.childrenIgnored?.(path); - } - // backpressure mechanism - pause() { - this.paused = true; - } - resume() { - /* c8 ignore start */ - if (this.signal?.aborted) - return; - /* c8 ignore stop */ - this.paused = false; - let fn = undefined; - while (!this.paused && (fn = this.#onResume.shift())) { - fn(); - } - } - onResume(fn) { - if (this.signal?.aborted) - return; - /* c8 ignore start */ - if (!this.paused) { - fn(); - } - else { - /* c8 ignore stop */ - this.#onResume.push(fn); - } - } - // do the requisite realpath/stat checking, and return the path - // to add or undefined to filter it out. - async matchCheck(e, ifDir) { - if (ifDir && this.opts.nodir) - return undefined; - let rpc; - if (this.opts.realpath) { - rpc = e.realpathCached() || (await e.realpath()); - if (!rpc) - return undefined; - e = rpc; - } - const needStat = e.isUnknown() || this.opts.stat; - const s = needStat ? await e.lstat() : e; - if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) { - const target = await s.realpath(); - /* c8 ignore start */ - if (target && (target.isUnknown() || this.opts.stat)) { - await target.lstat(); - } - /* c8 ignore stop */ - } - return this.matchCheckTest(s, ifDir); - } - matchCheckTest(e, ifDir) { - return (e && - (this.maxDepth === Infinity || e.depth() <= this.maxDepth) && - (!ifDir || e.canReaddir()) && - (!this.opts.nodir || !e.isDirectory()) && - (!this.opts.nodir || - !this.opts.follow || - !e.isSymbolicLink() || - !e.realpathCached()?.isDirectory()) && - !this.#ignored(e)) ? - e - : undefined; - } - matchCheckSync(e, ifDir) { - if (ifDir && this.opts.nodir) - return undefined; - let rpc; - if (this.opts.realpath) { - rpc = e.realpathCached() || e.realpathSync(); - if (!rpc) - return undefined; - e = rpc; - } - const needStat = e.isUnknown() || this.opts.stat; - const s = needStat ? e.lstatSync() : e; - if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) { - const target = s.realpathSync(); - if (target && (target?.isUnknown() || this.opts.stat)) { - target.lstatSync(); - } - } - return this.matchCheckTest(s, ifDir); - } - matchFinish(e, absolute) { - if (this.#ignored(e)) - return; - // we know we have an ignore if this is false, but TS doesn't - if (!this.includeChildMatches && this.#ignore?.add) { - const ign = `${e.relativePosix()}/**`; - this.#ignore.add(ign); - } - const abs = this.opts.absolute === undefined ? absolute : this.opts.absolute; - this.seen.add(e); - const mark = this.opts.mark && e.isDirectory() ? this.#sep : ''; - // ok, we have what we need! - if (this.opts.withFileTypes) { - this.matchEmit(e); - } - else if (abs) { - const abs = this.opts.posix ? e.fullpathPosix() : e.fullpath(); - this.matchEmit(abs + mark); - } - else { - const rel = this.opts.posix ? e.relativePosix() : e.relative(); - const pre = this.opts.dotRelative && !rel.startsWith('..' + this.#sep) ? - '.' + this.#sep - : ''; - this.matchEmit(!rel ? '.' + mark : pre + rel + mark); - } - } - async match(e, absolute, ifDir) { - const p = await this.matchCheck(e, ifDir); - if (p) - this.matchFinish(p, absolute); - } - matchSync(e, absolute, ifDir) { - const p = this.matchCheckSync(e, ifDir); - if (p) - this.matchFinish(p, absolute); - } - walkCB(target, patterns, cb) { - /* c8 ignore start */ - if (this.signal?.aborted) - cb(); - /* c8 ignore stop */ - this.walkCB2(target, patterns, new processor_js_1.Processor(this.opts), cb); - } - walkCB2(target, patterns, processor, cb) { - if (this.#childrenIgnored(target)) - return cb(); - if (this.signal?.aborted) - cb(); - if (this.paused) { - this.onResume(() => this.walkCB2(target, patterns, processor, cb)); - return; - } - processor.processPatterns(target, patterns); - // done processing. all of the above is sync, can be abstracted out. - // subwalks is a map of paths to the entry filters they need - // matches is a map of paths to [absolute, ifDir] tuples. - let tasks = 1; - const next = () => { - if (--tasks === 0) - cb(); - }; - for (const [m, absolute, ifDir] of processor.matches.entries()) { - if (this.#ignored(m)) - continue; - tasks++; - this.match(m, absolute, ifDir).then(() => next()); - } - for (const t of processor.subwalkTargets()) { - if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) { - continue; - } - tasks++; - const childrenCached = t.readdirCached(); - if (t.calledReaddir()) - this.walkCB3(t, childrenCached, processor, next); - else { - t.readdirCB((_, entries) => this.walkCB3(t, entries, processor, next), true); - } - } - next(); - } - walkCB3(target, entries, processor, cb) { - processor = processor.filterEntries(target, entries); - let tasks = 1; - const next = () => { - if (--tasks === 0) - cb(); - }; - for (const [m, absolute, ifDir] of processor.matches.entries()) { - if (this.#ignored(m)) - continue; - tasks++; - this.match(m, absolute, ifDir).then(() => next()); - } - for (const [target, patterns] of processor.subwalks.entries()) { - tasks++; - this.walkCB2(target, patterns, processor.child(), next); - } - next(); - } - walkCBSync(target, patterns, cb) { - /* c8 ignore start */ - if (this.signal?.aborted) - cb(); - /* c8 ignore stop */ - this.walkCB2Sync(target, patterns, new processor_js_1.Processor(this.opts), cb); - } - walkCB2Sync(target, patterns, processor, cb) { - if (this.#childrenIgnored(target)) - return cb(); - if (this.signal?.aborted) - cb(); - if (this.paused) { - this.onResume(() => this.walkCB2Sync(target, patterns, processor, cb)); - return; - } - processor.processPatterns(target, patterns); - // done processing. all of the above is sync, can be abstracted out. - // subwalks is a map of paths to the entry filters they need - // matches is a map of paths to [absolute, ifDir] tuples. - let tasks = 1; - const next = () => { - if (--tasks === 0) - cb(); - }; - for (const [m, absolute, ifDir] of processor.matches.entries()) { - if (this.#ignored(m)) - continue; - this.matchSync(m, absolute, ifDir); - } - for (const t of processor.subwalkTargets()) { - if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) { - continue; - } - tasks++; - const children = t.readdirSync(); - this.walkCB3Sync(t, children, processor, next); - } - next(); - } - walkCB3Sync(target, entries, processor, cb) { - processor = processor.filterEntries(target, entries); - let tasks = 1; - const next = () => { - if (--tasks === 0) - cb(); - }; - for (const [m, absolute, ifDir] of processor.matches.entries()) { - if (this.#ignored(m)) - continue; - this.matchSync(m, absolute, ifDir); - } - for (const [target, patterns] of processor.subwalks.entries()) { - tasks++; - this.walkCB2Sync(target, patterns, processor.child(), next); - } - next(); - } -} -exports.GlobUtil = GlobUtil; -class GlobWalker extends GlobUtil { - matches = new Set(); - constructor(patterns, path, opts) { - super(patterns, path, opts); - } - matchEmit(e) { - this.matches.add(e); - } - async walk() { - if (this.signal?.aborted) - throw this.signal.reason; - if (this.path.isUnknown()) { - await this.path.lstat(); - } - await new Promise((res, rej) => { - this.walkCB(this.path, this.patterns, () => { - if (this.signal?.aborted) { - rej(this.signal.reason); - } - else { - res(this.matches); - } - }); - }); - return this.matches; - } - walkSync() { - if (this.signal?.aborted) - throw this.signal.reason; - if (this.path.isUnknown()) { - this.path.lstatSync(); - } - // nothing for the callback to do, because this never pauses - this.walkCBSync(this.path, this.patterns, () => { - if (this.signal?.aborted) - throw this.signal.reason; - }); - return this.matches; - } -} -exports.GlobWalker = GlobWalker; -class GlobStream extends GlobUtil { - results; - constructor(patterns, path, opts) { - super(patterns, path, opts); - this.results = new minipass_1.Minipass({ - signal: this.signal, - objectMode: true, - }); - this.results.on('drain', () => this.resume()); - this.results.on('resume', () => this.resume()); - } - matchEmit(e) { - this.results.write(e); - if (!this.results.flowing) - this.pause(); - } - stream() { - const target = this.path; - if (target.isUnknown()) { - target.lstat().then(() => { - this.walkCB(target, this.patterns, () => this.results.end()); - }); - } - else { - this.walkCB(target, this.patterns, () => this.results.end()); - } - return this.results; - } - streamSync() { - if (this.path.isUnknown()) { - this.path.lstatSync(); - } - this.walkCBSync(this.path, this.patterns, () => this.results.end()); - return this.results; - } -} -exports.GlobStream = GlobStream; -//# sourceMappingURL=walker.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/glob/dist/esm/bin.d.mts b/node_modules/node-gyp/node_modules/glob/dist/esm/bin.d.mts deleted file mode 100644 index 77298e4770817..0000000000000 --- a/node_modules/node-gyp/node_modules/glob/dist/esm/bin.d.mts +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env node -export {}; -//# sourceMappingURL=bin.d.mts.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/glob/dist/esm/bin.mjs b/node_modules/node-gyp/node_modules/glob/dist/esm/bin.mjs deleted file mode 100755 index 5c7bf1e925610..0000000000000 --- a/node_modules/node-gyp/node_modules/glob/dist/esm/bin.mjs +++ /dev/null @@ -1,270 +0,0 @@ -#!/usr/bin/env node -import { foregroundChild } from 'foreground-child'; -import { existsSync } from 'fs'; -import { jack } from 'jackspeak'; -import { loadPackageJson } from 'package-json-from-dist'; -import { join } from 'path'; -import { globStream } from './index.js'; -const { version } = loadPackageJson(import.meta.url, '../package.json'); -const j = jack({ - usage: 'glob [options] [ [ ...]]', -}) - .description(` - Glob v${version} - - Expand the positional glob expression arguments into any matching file - system paths found. - `) - .opt({ - cmd: { - short: 'c', - hint: 'command', - description: `Run the command provided, passing the glob expression - matches as arguments.`, - }, -}) - .opt({ - default: { - short: 'p', - hint: 'pattern', - description: `If no positional arguments are provided, glob will use - this pattern`, - }, -}) - .flag({ - all: { - short: 'A', - description: `By default, the glob cli command will not expand any - arguments that are an exact match to a file on disk. - - This prevents double-expanding, in case the shell expands - an argument whose filename is a glob expression. - - For example, if 'app/*.ts' would match 'app/[id].ts', then - on Windows powershell or cmd.exe, 'glob app/*.ts' will - expand to 'app/[id].ts', as expected. However, in posix - shells such as bash or zsh, the shell will first expand - 'app/*.ts' to a list of filenames. Then glob will look - for a file matching 'app/[id].ts' (ie, 'app/i.ts' or - 'app/d.ts'), which is unexpected. - - Setting '--all' prevents this behavior, causing glob - to treat ALL patterns as glob expressions to be expanded, - even if they are an exact match to a file on disk. - - When setting this option, be sure to enquote arguments - so that the shell will not expand them prior to passing - them to the glob command process. - `, - }, - absolute: { - short: 'a', - description: 'Expand to absolute paths', - }, - 'dot-relative': { - short: 'd', - description: `Prepend './' on relative matches`, - }, - mark: { - short: 'm', - description: `Append a / on any directories matched`, - }, - posix: { - short: 'x', - description: `Always resolve to posix style paths, using '/' as the - directory separator, even on Windows. Drive letter - absolute matches on Windows will be expanded to their - full resolved UNC maths, eg instead of 'C:\\foo\\bar', - it will expand to '//?/C:/foo/bar'. - `, - }, - follow: { - short: 'f', - description: `Follow symlinked directories when expanding '**'`, - }, - realpath: { - short: 'R', - description: `Call 'fs.realpath' on all of the results. In the case - of an entry that cannot be resolved, the entry is - omitted. This incurs a slight performance penalty, of - course, because of the added system calls.`, - }, - stat: { - short: 's', - description: `Call 'fs.lstat' on all entries, whether required or not - to determine if it's a valid match.`, - }, - 'match-base': { - short: 'b', - description: `Perform a basename-only match if the pattern does not - contain any slash characters. That is, '*.js' would be - treated as equivalent to '**/*.js', matching js files - in all directories. - `, - }, - dot: { - description: `Allow patterns to match files/directories that start - with '.', even if the pattern does not start with '.' - `, - }, - nobrace: { - description: 'Do not expand {...} patterns', - }, - nocase: { - description: `Perform a case-insensitive match. This defaults to - 'true' on macOS and Windows platforms, and false on - all others. - - Note: 'nocase' should only be explicitly set when it is - known that the filesystem's case sensitivity differs - from the platform default. If set 'true' on - case-insensitive file systems, then the walk may return - more or less results than expected. - `, - }, - nodir: { - description: `Do not match directories, only files. - - Note: to *only* match directories, append a '/' at the - end of the pattern. - `, - }, - noext: { - description: `Do not expand extglob patterns, such as '+(a|b)'`, - }, - noglobstar: { - description: `Do not expand '**' against multiple path portions. - Ie, treat it as a normal '*' instead.`, - }, - 'windows-path-no-escape': { - description: `Use '\\' as a path separator *only*, and *never* as an - escape character. If set, all '\\' characters are - replaced with '/' in the pattern.`, - }, -}) - .num({ - 'max-depth': { - short: 'D', - description: `Maximum depth to traverse from the current - working directory`, - }, -}) - .opt({ - cwd: { - short: 'C', - description: 'Current working directory to execute/match in', - default: process.cwd(), - }, - root: { - short: 'r', - description: `A string path resolved against the 'cwd', which is - used as the starting point for absolute patterns that - start with '/' (but not drive letters or UNC paths - on Windows). - - Note that this *doesn't* necessarily limit the walk to - the 'root' directory, and doesn't affect the cwd - starting point for non-absolute patterns. A pattern - containing '..' will still be able to traverse out of - the root directory, if it is not an actual root directory - on the filesystem, and any non-absolute patterns will - still be matched in the 'cwd'. - - To start absolute and non-absolute patterns in the same - path, you can use '--root=' to set it to the empty - string. However, be aware that on Windows systems, a - pattern like 'x:/*' or '//host/share/*' will *always* - start in the 'x:/' or '//host/share/' directory, - regardless of the --root setting. - `, - }, - platform: { - description: `Defaults to the value of 'process.platform' if - available, or 'linux' if not. Setting --platform=win32 - on non-Windows systems may cause strange behavior!`, - validOptions: [ - 'aix', - 'android', - 'darwin', - 'freebsd', - 'haiku', - 'linux', - 'openbsd', - 'sunos', - 'win32', - 'cygwin', - 'netbsd', - ], - }, -}) - .optList({ - ignore: { - short: 'i', - description: `Glob patterns to ignore`, - }, -}) - .flag({ - debug: { - short: 'v', - description: `Output a huge amount of noisy debug information about - patterns as they are parsed and used to match files.`, - }, -}) - .flag({ - help: { - short: 'h', - description: 'Show this usage information', - }, -}); -try { - const { positionals, values } = j.parse(); - if (values.help) { - console.log(j.usage()); - process.exit(0); - } - if (positionals.length === 0 && !values.default) - throw 'No patterns provided'; - if (positionals.length === 0 && values.default) - positionals.push(values.default); - const patterns = values.all ? positionals : positionals.filter(p => !existsSync(p)); - const matches = values.all ? - [] - : positionals.filter(p => existsSync(p)).map(p => join(p)); - const stream = globStream(patterns, { - absolute: values.absolute, - cwd: values.cwd, - dot: values.dot, - dotRelative: values['dot-relative'], - follow: values.follow, - ignore: values.ignore, - mark: values.mark, - matchBase: values['match-base'], - maxDepth: values['max-depth'], - nobrace: values.nobrace, - nocase: values.nocase, - nodir: values.nodir, - noext: values.noext, - noglobstar: values.noglobstar, - platform: values.platform, - realpath: values.realpath, - root: values.root, - stat: values.stat, - debug: values.debug, - posix: values.posix, - }); - const cmd = values.cmd; - if (!cmd) { - matches.forEach(m => console.log(m)); - stream.on('data', f => console.log(f)); - } - else { - stream.on('data', f => matches.push(f)); - stream.on('end', () => foregroundChild(cmd, matches, { shell: true })); - } -} -catch (e) { - console.error(j.usage()); - console.error(e instanceof Error ? e.message : String(e)); - process.exit(1); -} -//# sourceMappingURL=bin.mjs.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/glob/dist/esm/glob.js b/node_modules/node-gyp/node_modules/glob/dist/esm/glob.js deleted file mode 100644 index c9ff3b0036d94..0000000000000 --- a/node_modules/node-gyp/node_modules/glob/dist/esm/glob.js +++ /dev/null @@ -1,243 +0,0 @@ -import { Minimatch } from 'minimatch'; -import { fileURLToPath } from 'node:url'; -import { PathScurry, PathScurryDarwin, PathScurryPosix, PathScurryWin32, } from 'path-scurry'; -import { Pattern } from './pattern.js'; -import { GlobStream, GlobWalker } from './walker.js'; -// if no process global, just call it linux. -// so we default to case-sensitive, / separators -const defaultPlatform = (typeof process === 'object' && - process && - typeof process.platform === 'string') ? - process.platform - : 'linux'; -/** - * An object that can perform glob pattern traversals. - */ -export class Glob { - absolute; - cwd; - root; - dot; - dotRelative; - follow; - ignore; - magicalBraces; - mark; - matchBase; - maxDepth; - nobrace; - nocase; - nodir; - noext; - noglobstar; - pattern; - platform; - realpath; - scurry; - stat; - signal; - windowsPathsNoEscape; - withFileTypes; - includeChildMatches; - /** - * The options provided to the constructor. - */ - opts; - /** - * An array of parsed immutable {@link Pattern} objects. - */ - patterns; - /** - * All options are stored as properties on the `Glob` object. - * - * See {@link GlobOptions} for full options descriptions. - * - * Note that a previous `Glob` object can be passed as the - * `GlobOptions` to another `Glob` instantiation to re-use settings - * and caches with a new pattern. - * - * Traversal functions can be called multiple times to run the walk - * again. - */ - constructor(pattern, opts) { - /* c8 ignore start */ - if (!opts) - throw new TypeError('glob options required'); - /* c8 ignore stop */ - this.withFileTypes = !!opts.withFileTypes; - this.signal = opts.signal; - this.follow = !!opts.follow; - this.dot = !!opts.dot; - this.dotRelative = !!opts.dotRelative; - this.nodir = !!opts.nodir; - this.mark = !!opts.mark; - if (!opts.cwd) { - this.cwd = ''; - } - else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) { - opts.cwd = fileURLToPath(opts.cwd); - } - this.cwd = opts.cwd || ''; - this.root = opts.root; - this.magicalBraces = !!opts.magicalBraces; - this.nobrace = !!opts.nobrace; - this.noext = !!opts.noext; - this.realpath = !!opts.realpath; - this.absolute = opts.absolute; - this.includeChildMatches = opts.includeChildMatches !== false; - this.noglobstar = !!opts.noglobstar; - this.matchBase = !!opts.matchBase; - this.maxDepth = - typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity; - this.stat = !!opts.stat; - this.ignore = opts.ignore; - if (this.withFileTypes && this.absolute !== undefined) { - throw new Error('cannot set absolute and withFileTypes:true'); - } - if (typeof pattern === 'string') { - pattern = [pattern]; - } - this.windowsPathsNoEscape = - !!opts.windowsPathsNoEscape || - opts.allowWindowsEscape === - false; - if (this.windowsPathsNoEscape) { - pattern = pattern.map(p => p.replace(/\\/g, '/')); - } - if (this.matchBase) { - if (opts.noglobstar) { - throw new TypeError('base matching requires globstar'); - } - pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`)); - } - this.pattern = pattern; - this.platform = opts.platform || defaultPlatform; - this.opts = { ...opts, platform: this.platform }; - if (opts.scurry) { - this.scurry = opts.scurry; - if (opts.nocase !== undefined && - opts.nocase !== opts.scurry.nocase) { - throw new Error('nocase option contradicts provided scurry option'); - } - } - else { - const Scurry = opts.platform === 'win32' ? PathScurryWin32 - : opts.platform === 'darwin' ? PathScurryDarwin - : opts.platform ? PathScurryPosix - : PathScurry; - this.scurry = new Scurry(this.cwd, { - nocase: opts.nocase, - fs: opts.fs, - }); - } - this.nocase = this.scurry.nocase; - // If you do nocase:true on a case-sensitive file system, then - // we need to use regexps instead of strings for non-magic - // path portions, because statting `aBc` won't return results - // for the file `AbC` for example. - const nocaseMagicOnly = this.platform === 'darwin' || this.platform === 'win32'; - const mmo = { - // default nocase based on platform - ...opts, - dot: this.dot, - matchBase: this.matchBase, - nobrace: this.nobrace, - nocase: this.nocase, - nocaseMagicOnly, - nocomment: true, - noext: this.noext, - nonegate: true, - optimizationLevel: 2, - platform: this.platform, - windowsPathsNoEscape: this.windowsPathsNoEscape, - debug: !!this.opts.debug, - }; - const mms = this.pattern.map(p => new Minimatch(p, mmo)); - const [matchSet, globParts] = mms.reduce((set, m) => { - set[0].push(...m.set); - set[1].push(...m.globParts); - return set; - }, [[], []]); - this.patterns = matchSet.map((set, i) => { - const g = globParts[i]; - /* c8 ignore start */ - if (!g) - throw new Error('invalid pattern object'); - /* c8 ignore stop */ - return new Pattern(set, g, 0, this.platform); - }); - } - async walk() { - // Walkers always return array of Path objects, so we just have to - // coerce them into the right shape. It will have already called - // realpath() if the option was set to do so, so we know that's cached. - // start out knowing the cwd, at least - return [ - ...(await new GlobWalker(this.patterns, this.scurry.cwd, { - ...this.opts, - maxDepth: this.maxDepth !== Infinity ? - this.maxDepth + this.scurry.cwd.depth() - : Infinity, - platform: this.platform, - nocase: this.nocase, - includeChildMatches: this.includeChildMatches, - }).walk()), - ]; - } - walkSync() { - return [ - ...new GlobWalker(this.patterns, this.scurry.cwd, { - ...this.opts, - maxDepth: this.maxDepth !== Infinity ? - this.maxDepth + this.scurry.cwd.depth() - : Infinity, - platform: this.platform, - nocase: this.nocase, - includeChildMatches: this.includeChildMatches, - }).walkSync(), - ]; - } - stream() { - return new GlobStream(this.patterns, this.scurry.cwd, { - ...this.opts, - maxDepth: this.maxDepth !== Infinity ? - this.maxDepth + this.scurry.cwd.depth() - : Infinity, - platform: this.platform, - nocase: this.nocase, - includeChildMatches: this.includeChildMatches, - }).stream(); - } - streamSync() { - return new GlobStream(this.patterns, this.scurry.cwd, { - ...this.opts, - maxDepth: this.maxDepth !== Infinity ? - this.maxDepth + this.scurry.cwd.depth() - : Infinity, - platform: this.platform, - nocase: this.nocase, - includeChildMatches: this.includeChildMatches, - }).streamSync(); - } - /** - * Default sync iteration function. Returns a Generator that - * iterates over the results. - */ - iterateSync() { - return this.streamSync()[Symbol.iterator](); - } - [Symbol.iterator]() { - return this.iterateSync(); - } - /** - * Default async iteration function. Returns an AsyncGenerator that - * iterates over the results. - */ - iterate() { - return this.stream()[Symbol.asyncIterator](); - } - [Symbol.asyncIterator]() { - return this.iterate(); - } -} -//# sourceMappingURL=glob.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/glob/dist/esm/has-magic.js b/node_modules/node-gyp/node_modules/glob/dist/esm/has-magic.js deleted file mode 100644 index ba2321ab868d0..0000000000000 --- a/node_modules/node-gyp/node_modules/glob/dist/esm/has-magic.js +++ /dev/null @@ -1,23 +0,0 @@ -import { Minimatch } from 'minimatch'; -/** - * Return true if the patterns provided contain any magic glob characters, - * given the options provided. - * - * Brace expansion is not considered "magic" unless the `magicalBraces` option - * is set, as brace expansion just turns one string into an array of strings. - * So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and - * `'xby'` both do not contain any magic glob characters, and it's treated the - * same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true` - * is in the options, brace expansion _is_ treated as a pattern having magic. - */ -export const hasMagic = (pattern, options = {}) => { - if (!Array.isArray(pattern)) { - pattern = [pattern]; - } - for (const p of pattern) { - if (new Minimatch(p, options).hasMagic()) - return true; - } - return false; -}; -//# sourceMappingURL=has-magic.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/glob/dist/esm/ignore.js b/node_modules/node-gyp/node_modules/glob/dist/esm/ignore.js deleted file mode 100644 index 539c4a4fdebc4..0000000000000 --- a/node_modules/node-gyp/node_modules/glob/dist/esm/ignore.js +++ /dev/null @@ -1,115 +0,0 @@ -// give it a pattern, and it'll be able to tell you if -// a given path should be ignored. -// Ignoring a path ignores its children if the pattern ends in /** -// Ignores are always parsed in dot:true mode -import { Minimatch } from 'minimatch'; -import { Pattern } from './pattern.js'; -const defaultPlatform = (typeof process === 'object' && - process && - typeof process.platform === 'string') ? - process.platform - : 'linux'; -/** - * Class used to process ignored patterns - */ -export class Ignore { - relative; - relativeChildren; - absolute; - absoluteChildren; - platform; - mmopts; - constructor(ignored, { nobrace, nocase, noext, noglobstar, platform = defaultPlatform, }) { - this.relative = []; - this.absolute = []; - this.relativeChildren = []; - this.absoluteChildren = []; - this.platform = platform; - this.mmopts = { - dot: true, - nobrace, - nocase, - noext, - noglobstar, - optimizationLevel: 2, - platform, - nocomment: true, - nonegate: true, - }; - for (const ign of ignored) - this.add(ign); - } - add(ign) { - // this is a little weird, but it gives us a clean set of optimized - // minimatch matchers, without getting tripped up if one of them - // ends in /** inside a brace section, and it's only inefficient at - // the start of the walk, not along it. - // It'd be nice if the Pattern class just had a .test() method, but - // handling globstars is a bit of a pita, and that code already lives - // in minimatch anyway. - // Another way would be if maybe Minimatch could take its set/globParts - // as an option, and then we could at least just use Pattern to test - // for absolute-ness. - // Yet another way, Minimatch could take an array of glob strings, and - // a cwd option, and do the right thing. - const mm = new Minimatch(ign, this.mmopts); - for (let i = 0; i < mm.set.length; i++) { - const parsed = mm.set[i]; - const globParts = mm.globParts[i]; - /* c8 ignore start */ - if (!parsed || !globParts) { - throw new Error('invalid pattern object'); - } - // strip off leading ./ portions - // https://github.com/isaacs/node-glob/issues/570 - while (parsed[0] === '.' && globParts[0] === '.') { - parsed.shift(); - globParts.shift(); - } - /* c8 ignore stop */ - const p = new Pattern(parsed, globParts, 0, this.platform); - const m = new Minimatch(p.globString(), this.mmopts); - const children = globParts[globParts.length - 1] === '**'; - const absolute = p.isAbsolute(); - if (absolute) - this.absolute.push(m); - else - this.relative.push(m); - if (children) { - if (absolute) - this.absoluteChildren.push(m); - else - this.relativeChildren.push(m); - } - } - } - ignored(p) { - const fullpath = p.fullpath(); - const fullpaths = `${fullpath}/`; - const relative = p.relative() || '.'; - const relatives = `${relative}/`; - for (const m of this.relative) { - if (m.match(relative) || m.match(relatives)) - return true; - } - for (const m of this.absolute) { - if (m.match(fullpath) || m.match(fullpaths)) - return true; - } - return false; - } - childrenIgnored(p) { - const fullpath = p.fullpath() + '/'; - const relative = (p.relative() || '.') + '/'; - for (const m of this.relativeChildren) { - if (m.match(relative)) - return true; - } - for (const m of this.absoluteChildren) { - if (m.match(fullpath)) - return true; - } - return false; - } -} -//# sourceMappingURL=ignore.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/glob/dist/esm/index.js b/node_modules/node-gyp/node_modules/glob/dist/esm/index.js deleted file mode 100644 index e15c1f9c4cb03..0000000000000 --- a/node_modules/node-gyp/node_modules/glob/dist/esm/index.js +++ /dev/null @@ -1,55 +0,0 @@ -import { escape, unescape } from 'minimatch'; -import { Glob } from './glob.js'; -import { hasMagic } from './has-magic.js'; -export { escape, unescape } from 'minimatch'; -export { Glob } from './glob.js'; -export { hasMagic } from './has-magic.js'; -export { Ignore } from './ignore.js'; -export function globStreamSync(pattern, options = {}) { - return new Glob(pattern, options).streamSync(); -} -export function globStream(pattern, options = {}) { - return new Glob(pattern, options).stream(); -} -export function globSync(pattern, options = {}) { - return new Glob(pattern, options).walkSync(); -} -async function glob_(pattern, options = {}) { - return new Glob(pattern, options).walk(); -} -export function globIterateSync(pattern, options = {}) { - return new Glob(pattern, options).iterateSync(); -} -export function globIterate(pattern, options = {}) { - return new Glob(pattern, options).iterate(); -} -// aliases: glob.sync.stream() glob.stream.sync() glob.sync() etc -export const streamSync = globStreamSync; -export const stream = Object.assign(globStream, { sync: globStreamSync }); -export const iterateSync = globIterateSync; -export const iterate = Object.assign(globIterate, { - sync: globIterateSync, -}); -export const sync = Object.assign(globSync, { - stream: globStreamSync, - iterate: globIterateSync, -}); -export const glob = Object.assign(glob_, { - glob: glob_, - globSync, - sync, - globStream, - stream, - globStreamSync, - streamSync, - globIterate, - iterate, - globIterateSync, - iterateSync, - Glob, - hasMagic, - escape, - unescape, -}); -glob.glob = glob; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/glob/dist/esm/package.json b/node_modules/node-gyp/node_modules/glob/dist/esm/package.json deleted file mode 100644 index 3dbc1ca591c05..0000000000000 --- a/node_modules/node-gyp/node_modules/glob/dist/esm/package.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "type": "module" -} diff --git a/node_modules/node-gyp/node_modules/glob/dist/esm/pattern.js b/node_modules/node-gyp/node_modules/glob/dist/esm/pattern.js deleted file mode 100644 index b41defa10c6a3..0000000000000 --- a/node_modules/node-gyp/node_modules/glob/dist/esm/pattern.js +++ /dev/null @@ -1,215 +0,0 @@ -// this is just a very light wrapper around 2 arrays with an offset index -import { GLOBSTAR } from 'minimatch'; -const isPatternList = (pl) => pl.length >= 1; -const isGlobList = (gl) => gl.length >= 1; -/** - * An immutable-ish view on an array of glob parts and their parsed - * results - */ -export class Pattern { - #patternList; - #globList; - #index; - length; - #platform; - #rest; - #globString; - #isDrive; - #isUNC; - #isAbsolute; - #followGlobstar = true; - constructor(patternList, globList, index, platform) { - if (!isPatternList(patternList)) { - throw new TypeError('empty pattern list'); - } - if (!isGlobList(globList)) { - throw new TypeError('empty glob list'); - } - if (globList.length !== patternList.length) { - throw new TypeError('mismatched pattern list and glob list lengths'); - } - this.length = patternList.length; - if (index < 0 || index >= this.length) { - throw new TypeError('index out of range'); - } - this.#patternList = patternList; - this.#globList = globList; - this.#index = index; - this.#platform = platform; - // normalize root entries of absolute patterns on initial creation. - if (this.#index === 0) { - // c: => ['c:/'] - // C:/ => ['C:/'] - // C:/x => ['C:/', 'x'] - // //host/share => ['//host/share/'] - // //host/share/ => ['//host/share/'] - // //host/share/x => ['//host/share/', 'x'] - // /etc => ['/', 'etc'] - // / => ['/'] - if (this.isUNC()) { - // '' / '' / 'host' / 'share' - const [p0, p1, p2, p3, ...prest] = this.#patternList; - const [g0, g1, g2, g3, ...grest] = this.#globList; - if (prest[0] === '') { - // ends in / - prest.shift(); - grest.shift(); - } - const p = [p0, p1, p2, p3, ''].join('/'); - const g = [g0, g1, g2, g3, ''].join('/'); - this.#patternList = [p, ...prest]; - this.#globList = [g, ...grest]; - this.length = this.#patternList.length; - } - else if (this.isDrive() || this.isAbsolute()) { - const [p1, ...prest] = this.#patternList; - const [g1, ...grest] = this.#globList; - if (prest[0] === '') { - // ends in / - prest.shift(); - grest.shift(); - } - const p = p1 + '/'; - const g = g1 + '/'; - this.#patternList = [p, ...prest]; - this.#globList = [g, ...grest]; - this.length = this.#patternList.length; - } - } - } - /** - * The first entry in the parsed list of patterns - */ - pattern() { - return this.#patternList[this.#index]; - } - /** - * true of if pattern() returns a string - */ - isString() { - return typeof this.#patternList[this.#index] === 'string'; - } - /** - * true of if pattern() returns GLOBSTAR - */ - isGlobstar() { - return this.#patternList[this.#index] === GLOBSTAR; - } - /** - * true if pattern() returns a regexp - */ - isRegExp() { - return this.#patternList[this.#index] instanceof RegExp; - } - /** - * The /-joined set of glob parts that make up this pattern - */ - globString() { - return (this.#globString = - this.#globString || - (this.#index === 0 ? - this.isAbsolute() ? - this.#globList[0] + this.#globList.slice(1).join('/') - : this.#globList.join('/') - : this.#globList.slice(this.#index).join('/'))); - } - /** - * true if there are more pattern parts after this one - */ - hasMore() { - return this.length > this.#index + 1; - } - /** - * The rest of the pattern after this part, or null if this is the end - */ - rest() { - if (this.#rest !== undefined) - return this.#rest; - if (!this.hasMore()) - return (this.#rest = null); - this.#rest = new Pattern(this.#patternList, this.#globList, this.#index + 1, this.#platform); - this.#rest.#isAbsolute = this.#isAbsolute; - this.#rest.#isUNC = this.#isUNC; - this.#rest.#isDrive = this.#isDrive; - return this.#rest; - } - /** - * true if the pattern represents a //unc/path/ on windows - */ - isUNC() { - const pl = this.#patternList; - return this.#isUNC !== undefined ? - this.#isUNC - : (this.#isUNC = - this.#platform === 'win32' && - this.#index === 0 && - pl[0] === '' && - pl[1] === '' && - typeof pl[2] === 'string' && - !!pl[2] && - typeof pl[3] === 'string' && - !!pl[3]); - } - // pattern like C:/... - // split = ['C:', ...] - // XXX: would be nice to handle patterns like `c:*` to test the cwd - // in c: for *, but I don't know of a way to even figure out what that - // cwd is without actually chdir'ing into it? - /** - * True if the pattern starts with a drive letter on Windows - */ - isDrive() { - const pl = this.#patternList; - return this.#isDrive !== undefined ? - this.#isDrive - : (this.#isDrive = - this.#platform === 'win32' && - this.#index === 0 && - this.length > 1 && - typeof pl[0] === 'string' && - /^[a-z]:$/i.test(pl[0])); - } - // pattern = '/' or '/...' or '/x/...' - // split = ['', ''] or ['', ...] or ['', 'x', ...] - // Drive and UNC both considered absolute on windows - /** - * True if the pattern is rooted on an absolute path - */ - isAbsolute() { - const pl = this.#patternList; - return this.#isAbsolute !== undefined ? - this.#isAbsolute - : (this.#isAbsolute = - (pl[0] === '' && pl.length > 1) || - this.isDrive() || - this.isUNC()); - } - /** - * consume the root of the pattern, and return it - */ - root() { - const p = this.#patternList[0]; - return (typeof p === 'string' && this.isAbsolute() && this.#index === 0) ? - p - : ''; - } - /** - * Check to see if the current globstar pattern is allowed to follow - * a symbolic link. - */ - checkFollowGlobstar() { - return !(this.#index === 0 || - !this.isGlobstar() || - !this.#followGlobstar); - } - /** - * Mark that the current globstar pattern is following a symbolic link - */ - markFollowGlobstar() { - if (this.#index === 0 || !this.isGlobstar() || !this.#followGlobstar) - return false; - this.#followGlobstar = false; - return true; - } -} -//# sourceMappingURL=pattern.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/glob/dist/esm/processor.js b/node_modules/node-gyp/node_modules/glob/dist/esm/processor.js deleted file mode 100644 index f874892ffed0c..0000000000000 --- a/node_modules/node-gyp/node_modules/glob/dist/esm/processor.js +++ /dev/null @@ -1,294 +0,0 @@ -// synchronous utility for filtering entries and calculating subwalks -import { GLOBSTAR } from 'minimatch'; -/** - * A cache of which patterns have been processed for a given Path - */ -export class HasWalkedCache { - store; - constructor(store = new Map()) { - this.store = store; - } - copy() { - return new HasWalkedCache(new Map(this.store)); - } - hasWalked(target, pattern) { - return this.store.get(target.fullpath())?.has(pattern.globString()); - } - storeWalked(target, pattern) { - const fullpath = target.fullpath(); - const cached = this.store.get(fullpath); - if (cached) - cached.add(pattern.globString()); - else - this.store.set(fullpath, new Set([pattern.globString()])); - } -} -/** - * A record of which paths have been matched in a given walk step, - * and whether they only are considered a match if they are a directory, - * and whether their absolute or relative path should be returned. - */ -export class MatchRecord { - store = new Map(); - add(target, absolute, ifDir) { - const n = (absolute ? 2 : 0) | (ifDir ? 1 : 0); - const current = this.store.get(target); - this.store.set(target, current === undefined ? n : n & current); - } - // match, absolute, ifdir - entries() { - return [...this.store.entries()].map(([path, n]) => [ - path, - !!(n & 2), - !!(n & 1), - ]); - } -} -/** - * A collection of patterns that must be processed in a subsequent step - * for a given path. - */ -export class SubWalks { - store = new Map(); - add(target, pattern) { - if (!target.canReaddir()) { - return; - } - const subs = this.store.get(target); - if (subs) { - if (!subs.find(p => p.globString() === pattern.globString())) { - subs.push(pattern); - } - } - else - this.store.set(target, [pattern]); - } - get(target) { - const subs = this.store.get(target); - /* c8 ignore start */ - if (!subs) { - throw new Error('attempting to walk unknown path'); - } - /* c8 ignore stop */ - return subs; - } - entries() { - return this.keys().map(k => [k, this.store.get(k)]); - } - keys() { - return [...this.store.keys()].filter(t => t.canReaddir()); - } -} -/** - * The class that processes patterns for a given path. - * - * Handles child entry filtering, and determining whether a path's - * directory contents must be read. - */ -export class Processor { - hasWalkedCache; - matches = new MatchRecord(); - subwalks = new SubWalks(); - patterns; - follow; - dot; - opts; - constructor(opts, hasWalkedCache) { - this.opts = opts; - this.follow = !!opts.follow; - this.dot = !!opts.dot; - this.hasWalkedCache = - hasWalkedCache ? hasWalkedCache.copy() : new HasWalkedCache(); - } - processPatterns(target, patterns) { - this.patterns = patterns; - const processingSet = patterns.map(p => [target, p]); - // map of paths to the magic-starting subwalks they need to walk - // first item in patterns is the filter - for (let [t, pattern] of processingSet) { - this.hasWalkedCache.storeWalked(t, pattern); - const root = pattern.root(); - const absolute = pattern.isAbsolute() && this.opts.absolute !== false; - // start absolute patterns at root - if (root) { - t = t.resolve(root === '/' && this.opts.root !== undefined ? - this.opts.root - : root); - const rest = pattern.rest(); - if (!rest) { - this.matches.add(t, true, false); - continue; - } - else { - pattern = rest; - } - } - if (t.isENOENT()) - continue; - let p; - let rest; - let changed = false; - while (typeof (p = pattern.pattern()) === 'string' && - (rest = pattern.rest())) { - const c = t.resolve(p); - t = c; - pattern = rest; - changed = true; - } - p = pattern.pattern(); - rest = pattern.rest(); - if (changed) { - if (this.hasWalkedCache.hasWalked(t, pattern)) - continue; - this.hasWalkedCache.storeWalked(t, pattern); - } - // now we have either a final string for a known entry, - // more strings for an unknown entry, - // or a pattern starting with magic, mounted on t. - if (typeof p === 'string') { - // must not be final entry, otherwise we would have - // concatenated it earlier. - const ifDir = p === '..' || p === '' || p === '.'; - this.matches.add(t.resolve(p), absolute, ifDir); - continue; - } - else if (p === GLOBSTAR) { - // if no rest, match and subwalk pattern - // if rest, process rest and subwalk pattern - // if it's a symlink, but we didn't get here by way of a - // globstar match (meaning it's the first time THIS globstar - // has traversed a symlink), then we follow it. Otherwise, stop. - if (!t.isSymbolicLink() || - this.follow || - pattern.checkFollowGlobstar()) { - this.subwalks.add(t, pattern); - } - const rp = rest?.pattern(); - const rrest = rest?.rest(); - if (!rest || ((rp === '' || rp === '.') && !rrest)) { - // only HAS to be a dir if it ends in **/ or **/. - // but ending in ** will match files as well. - this.matches.add(t, absolute, rp === '' || rp === '.'); - } - else { - if (rp === '..') { - // this would mean you're matching **/.. at the fs root, - // and no thanks, I'm not gonna test that specific case. - /* c8 ignore start */ - const tp = t.parent || t; - /* c8 ignore stop */ - if (!rrest) - this.matches.add(tp, absolute, true); - else if (!this.hasWalkedCache.hasWalked(tp, rrest)) { - this.subwalks.add(tp, rrest); - } - } - } - } - else if (p instanceof RegExp) { - this.subwalks.add(t, pattern); - } - } - return this; - } - subwalkTargets() { - return this.subwalks.keys(); - } - child() { - return new Processor(this.opts, this.hasWalkedCache); - } - // return a new Processor containing the subwalks for each - // child entry, and a set of matches, and - // a hasWalkedCache that's a copy of this one - // then we're going to call - filterEntries(parent, entries) { - const patterns = this.subwalks.get(parent); - // put matches and entry walks into the results processor - const results = this.child(); - for (const e of entries) { - for (const pattern of patterns) { - const absolute = pattern.isAbsolute(); - const p = pattern.pattern(); - const rest = pattern.rest(); - if (p === GLOBSTAR) { - results.testGlobstar(e, pattern, rest, absolute); - } - else if (p instanceof RegExp) { - results.testRegExp(e, p, rest, absolute); - } - else { - results.testString(e, p, rest, absolute); - } - } - } - return results; - } - testGlobstar(e, pattern, rest, absolute) { - if (this.dot || !e.name.startsWith('.')) { - if (!pattern.hasMore()) { - this.matches.add(e, absolute, false); - } - if (e.canReaddir()) { - // if we're in follow mode or it's not a symlink, just keep - // testing the same pattern. If there's more after the globstar, - // then this symlink consumes the globstar. If not, then we can - // follow at most ONE symlink along the way, so we mark it, which - // also checks to ensure that it wasn't already marked. - if (this.follow || !e.isSymbolicLink()) { - this.subwalks.add(e, pattern); - } - else if (e.isSymbolicLink()) { - if (rest && pattern.checkFollowGlobstar()) { - this.subwalks.add(e, rest); - } - else if (pattern.markFollowGlobstar()) { - this.subwalks.add(e, pattern); - } - } - } - } - // if the NEXT thing matches this entry, then also add - // the rest. - if (rest) { - const rp = rest.pattern(); - if (typeof rp === 'string' && - // dots and empty were handled already - rp !== '..' && - rp !== '' && - rp !== '.') { - this.testString(e, rp, rest.rest(), absolute); - } - else if (rp === '..') { - /* c8 ignore start */ - const ep = e.parent || e; - /* c8 ignore stop */ - this.subwalks.add(ep, rest); - } - else if (rp instanceof RegExp) { - this.testRegExp(e, rp, rest.rest(), absolute); - } - } - } - testRegExp(e, p, rest, absolute) { - if (!p.test(e.name)) - return; - if (!rest) { - this.matches.add(e, absolute, false); - } - else { - this.subwalks.add(e, rest); - } - } - testString(e, p, rest, absolute) { - // should never happen? - if (!e.isNamed(p)) - return; - if (!rest) { - this.matches.add(e, absolute, false); - } - else { - this.subwalks.add(e, rest); - } - } -} -//# sourceMappingURL=processor.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/glob/dist/esm/walker.js b/node_modules/node-gyp/node_modules/glob/dist/esm/walker.js deleted file mode 100644 index 3d68196c4f175..0000000000000 --- a/node_modules/node-gyp/node_modules/glob/dist/esm/walker.js +++ /dev/null @@ -1,381 +0,0 @@ -/** - * Single-use utility classes to provide functionality to the {@link Glob} - * methods. - * - * @module - */ -import { Minipass } from 'minipass'; -import { Ignore } from './ignore.js'; -import { Processor } from './processor.js'; -const makeIgnore = (ignore, opts) => typeof ignore === 'string' ? new Ignore([ignore], opts) - : Array.isArray(ignore) ? new Ignore(ignore, opts) - : ignore; -/** - * basic walking utilities that all the glob walker types use - */ -export class GlobUtil { - path; - patterns; - opts; - seen = new Set(); - paused = false; - aborted = false; - #onResume = []; - #ignore; - #sep; - signal; - maxDepth; - includeChildMatches; - constructor(patterns, path, opts) { - this.patterns = patterns; - this.path = path; - this.opts = opts; - this.#sep = !opts.posix && opts.platform === 'win32' ? '\\' : '/'; - this.includeChildMatches = opts.includeChildMatches !== false; - if (opts.ignore || !this.includeChildMatches) { - this.#ignore = makeIgnore(opts.ignore ?? [], opts); - if (!this.includeChildMatches && - typeof this.#ignore.add !== 'function') { - const m = 'cannot ignore child matches, ignore lacks add() method.'; - throw new Error(m); - } - } - // ignore, always set with maxDepth, but it's optional on the - // GlobOptions type - /* c8 ignore start */ - this.maxDepth = opts.maxDepth || Infinity; - /* c8 ignore stop */ - if (opts.signal) { - this.signal = opts.signal; - this.signal.addEventListener('abort', () => { - this.#onResume.length = 0; - }); - } - } - #ignored(path) { - return this.seen.has(path) || !!this.#ignore?.ignored?.(path); - } - #childrenIgnored(path) { - return !!this.#ignore?.childrenIgnored?.(path); - } - // backpressure mechanism - pause() { - this.paused = true; - } - resume() { - /* c8 ignore start */ - if (this.signal?.aborted) - return; - /* c8 ignore stop */ - this.paused = false; - let fn = undefined; - while (!this.paused && (fn = this.#onResume.shift())) { - fn(); - } - } - onResume(fn) { - if (this.signal?.aborted) - return; - /* c8 ignore start */ - if (!this.paused) { - fn(); - } - else { - /* c8 ignore stop */ - this.#onResume.push(fn); - } - } - // do the requisite realpath/stat checking, and return the path - // to add or undefined to filter it out. - async matchCheck(e, ifDir) { - if (ifDir && this.opts.nodir) - return undefined; - let rpc; - if (this.opts.realpath) { - rpc = e.realpathCached() || (await e.realpath()); - if (!rpc) - return undefined; - e = rpc; - } - const needStat = e.isUnknown() || this.opts.stat; - const s = needStat ? await e.lstat() : e; - if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) { - const target = await s.realpath(); - /* c8 ignore start */ - if (target && (target.isUnknown() || this.opts.stat)) { - await target.lstat(); - } - /* c8 ignore stop */ - } - return this.matchCheckTest(s, ifDir); - } - matchCheckTest(e, ifDir) { - return (e && - (this.maxDepth === Infinity || e.depth() <= this.maxDepth) && - (!ifDir || e.canReaddir()) && - (!this.opts.nodir || !e.isDirectory()) && - (!this.opts.nodir || - !this.opts.follow || - !e.isSymbolicLink() || - !e.realpathCached()?.isDirectory()) && - !this.#ignored(e)) ? - e - : undefined; - } - matchCheckSync(e, ifDir) { - if (ifDir && this.opts.nodir) - return undefined; - let rpc; - if (this.opts.realpath) { - rpc = e.realpathCached() || e.realpathSync(); - if (!rpc) - return undefined; - e = rpc; - } - const needStat = e.isUnknown() || this.opts.stat; - const s = needStat ? e.lstatSync() : e; - if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) { - const target = s.realpathSync(); - if (target && (target?.isUnknown() || this.opts.stat)) { - target.lstatSync(); - } - } - return this.matchCheckTest(s, ifDir); - } - matchFinish(e, absolute) { - if (this.#ignored(e)) - return; - // we know we have an ignore if this is false, but TS doesn't - if (!this.includeChildMatches && this.#ignore?.add) { - const ign = `${e.relativePosix()}/**`; - this.#ignore.add(ign); - } - const abs = this.opts.absolute === undefined ? absolute : this.opts.absolute; - this.seen.add(e); - const mark = this.opts.mark && e.isDirectory() ? this.#sep : ''; - // ok, we have what we need! - if (this.opts.withFileTypes) { - this.matchEmit(e); - } - else if (abs) { - const abs = this.opts.posix ? e.fullpathPosix() : e.fullpath(); - this.matchEmit(abs + mark); - } - else { - const rel = this.opts.posix ? e.relativePosix() : e.relative(); - const pre = this.opts.dotRelative && !rel.startsWith('..' + this.#sep) ? - '.' + this.#sep - : ''; - this.matchEmit(!rel ? '.' + mark : pre + rel + mark); - } - } - async match(e, absolute, ifDir) { - const p = await this.matchCheck(e, ifDir); - if (p) - this.matchFinish(p, absolute); - } - matchSync(e, absolute, ifDir) { - const p = this.matchCheckSync(e, ifDir); - if (p) - this.matchFinish(p, absolute); - } - walkCB(target, patterns, cb) { - /* c8 ignore start */ - if (this.signal?.aborted) - cb(); - /* c8 ignore stop */ - this.walkCB2(target, patterns, new Processor(this.opts), cb); - } - walkCB2(target, patterns, processor, cb) { - if (this.#childrenIgnored(target)) - return cb(); - if (this.signal?.aborted) - cb(); - if (this.paused) { - this.onResume(() => this.walkCB2(target, patterns, processor, cb)); - return; - } - processor.processPatterns(target, patterns); - // done processing. all of the above is sync, can be abstracted out. - // subwalks is a map of paths to the entry filters they need - // matches is a map of paths to [absolute, ifDir] tuples. - let tasks = 1; - const next = () => { - if (--tasks === 0) - cb(); - }; - for (const [m, absolute, ifDir] of processor.matches.entries()) { - if (this.#ignored(m)) - continue; - tasks++; - this.match(m, absolute, ifDir).then(() => next()); - } - for (const t of processor.subwalkTargets()) { - if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) { - continue; - } - tasks++; - const childrenCached = t.readdirCached(); - if (t.calledReaddir()) - this.walkCB3(t, childrenCached, processor, next); - else { - t.readdirCB((_, entries) => this.walkCB3(t, entries, processor, next), true); - } - } - next(); - } - walkCB3(target, entries, processor, cb) { - processor = processor.filterEntries(target, entries); - let tasks = 1; - const next = () => { - if (--tasks === 0) - cb(); - }; - for (const [m, absolute, ifDir] of processor.matches.entries()) { - if (this.#ignored(m)) - continue; - tasks++; - this.match(m, absolute, ifDir).then(() => next()); - } - for (const [target, patterns] of processor.subwalks.entries()) { - tasks++; - this.walkCB2(target, patterns, processor.child(), next); - } - next(); - } - walkCBSync(target, patterns, cb) { - /* c8 ignore start */ - if (this.signal?.aborted) - cb(); - /* c8 ignore stop */ - this.walkCB2Sync(target, patterns, new Processor(this.opts), cb); - } - walkCB2Sync(target, patterns, processor, cb) { - if (this.#childrenIgnored(target)) - return cb(); - if (this.signal?.aborted) - cb(); - if (this.paused) { - this.onResume(() => this.walkCB2Sync(target, patterns, processor, cb)); - return; - } - processor.processPatterns(target, patterns); - // done processing. all of the above is sync, can be abstracted out. - // subwalks is a map of paths to the entry filters they need - // matches is a map of paths to [absolute, ifDir] tuples. - let tasks = 1; - const next = () => { - if (--tasks === 0) - cb(); - }; - for (const [m, absolute, ifDir] of processor.matches.entries()) { - if (this.#ignored(m)) - continue; - this.matchSync(m, absolute, ifDir); - } - for (const t of processor.subwalkTargets()) { - if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) { - continue; - } - tasks++; - const children = t.readdirSync(); - this.walkCB3Sync(t, children, processor, next); - } - next(); - } - walkCB3Sync(target, entries, processor, cb) { - processor = processor.filterEntries(target, entries); - let tasks = 1; - const next = () => { - if (--tasks === 0) - cb(); - }; - for (const [m, absolute, ifDir] of processor.matches.entries()) { - if (this.#ignored(m)) - continue; - this.matchSync(m, absolute, ifDir); - } - for (const [target, patterns] of processor.subwalks.entries()) { - tasks++; - this.walkCB2Sync(target, patterns, processor.child(), next); - } - next(); - } -} -export class GlobWalker extends GlobUtil { - matches = new Set(); - constructor(patterns, path, opts) { - super(patterns, path, opts); - } - matchEmit(e) { - this.matches.add(e); - } - async walk() { - if (this.signal?.aborted) - throw this.signal.reason; - if (this.path.isUnknown()) { - await this.path.lstat(); - } - await new Promise((res, rej) => { - this.walkCB(this.path, this.patterns, () => { - if (this.signal?.aborted) { - rej(this.signal.reason); - } - else { - res(this.matches); - } - }); - }); - return this.matches; - } - walkSync() { - if (this.signal?.aborted) - throw this.signal.reason; - if (this.path.isUnknown()) { - this.path.lstatSync(); - } - // nothing for the callback to do, because this never pauses - this.walkCBSync(this.path, this.patterns, () => { - if (this.signal?.aborted) - throw this.signal.reason; - }); - return this.matches; - } -} -export class GlobStream extends GlobUtil { - results; - constructor(patterns, path, opts) { - super(patterns, path, opts); - this.results = new Minipass({ - signal: this.signal, - objectMode: true, - }); - this.results.on('drain', () => this.resume()); - this.results.on('resume', () => this.resume()); - } - matchEmit(e) { - this.results.write(e); - if (!this.results.flowing) - this.pause(); - } - stream() { - const target = this.path; - if (target.isUnknown()) { - target.lstat().then(() => { - this.walkCB(target, this.patterns, () => this.results.end()); - }); - } - else { - this.walkCB(target, this.patterns, () => this.results.end()); - } - return this.results; - } - streamSync() { - if (this.path.isUnknown()) { - this.path.lstatSync(); - } - this.walkCBSync(this.path, this.patterns, () => this.results.end()); - return this.results; - } -} -//# sourceMappingURL=walker.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/glob/package.json b/node_modules/node-gyp/node_modules/glob/package.json deleted file mode 100644 index 6d4893b5f327b..0000000000000 --- a/node_modules/node-gyp/node_modules/glob/package.json +++ /dev/null @@ -1,99 +0,0 @@ -{ - "author": "Isaac Z. Schlueter (https://blog.izs.me/)", - "publishConfig": { - "tag": "legacy-v10" - }, - "name": "glob", - "description": "the most correct and second fastest glob implementation in JavaScript", - "version": "10.4.5", - "type": "module", - "tshy": { - "main": true, - "exports": { - "./package.json": "./package.json", - ".": "./src/index.ts" - } - }, - "bin": "./dist/esm/bin.mjs", - "main": "./dist/commonjs/index.js", - "types": "./dist/commonjs/index.d.ts", - "exports": { - "./package.json": "./package.json", - ".": { - "import": { - "types": "./dist/esm/index.d.ts", - "default": "./dist/esm/index.js" - }, - "require": { - "types": "./dist/commonjs/index.d.ts", - "default": "./dist/commonjs/index.js" - } - } - }, - "repository": { - "type": "git", - "url": "git://github.com/isaacs/node-glob.git" - }, - "files": [ - "dist" - ], - "scripts": { - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags", - "prepare": "tshy", - "pretest": "npm run prepare", - "presnap": "npm run prepare", - "test": "tap", - "snap": "tap", - "format": "prettier --write . --log-level warn", - "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts", - "prepublish": "npm run benchclean", - "profclean": "rm -f v8.log profile.txt", - "test-regen": "npm run profclean && TEST_REGEN=1 node --no-warnings --loader ts-node/esm test/00-setup.ts", - "prebench": "npm run prepare", - "bench": "bash benchmark.sh", - "preprof": "npm run prepare", - "prof": "bash prof.sh", - "benchclean": "node benchclean.cjs" - }, - "prettier": { - "experimentalTernaries": true, - "semi": false, - "printWidth": 75, - "tabWidth": 2, - "useTabs": false, - "singleQuote": true, - "jsxSingleQuote": false, - "bracketSameLine": true, - "arrowParens": "avoid", - "endOfLine": "lf" - }, - "dependencies": { - "foreground-child": "^3.1.0", - "jackspeak": "^3.1.2", - "minimatch": "^9.0.4", - "minipass": "^7.1.2", - "package-json-from-dist": "^1.0.0", - "path-scurry": "^1.11.1" - }, - "devDependencies": { - "@types/node": "^20.11.30", - "memfs": "^3.4.13", - "mkdirp": "^3.0.1", - "prettier": "^3.2.5", - "rimraf": "^5.0.7", - "sync-content": "^1.0.2", - "tap": "^19.0.0", - "tshy": "^1.14.0", - "typedoc": "^0.25.12" - }, - "tap": { - "before": "test/00-setup.ts" - }, - "license": "ISC", - "funding": { - "url": "https://github.com/sponsors/isaacs" - }, - "module": "./dist/esm/index.js" -} diff --git a/node_modules/node-gyp/node_modules/jackspeak/dist/commonjs/index.js b/node_modules/node-gyp/node_modules/jackspeak/dist/commonjs/index.js deleted file mode 100644 index f7fc9cb69a2af..0000000000000 --- a/node_modules/node-gyp/node_modules/jackspeak/dist/commonjs/index.js +++ /dev/null @@ -1,1010 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.jack = exports.Jack = exports.isConfigOption = exports.isConfigType = void 0; -const node_util_1 = require("node:util"); -const parse_args_js_1 = require("./parse-args.js"); -// it's a tiny API, just cast it inline, it's fine -//@ts-ignore -const cliui_1 = __importDefault(require("@isaacs/cliui")); -const node_path_1 = require("node:path"); -const width = Math.min((process && process.stdout && process.stdout.columns) || 80, 80); -// indentation spaces from heading level -const indent = (n) => (n - 1) * 2; -const toEnvKey = (pref, key) => { - return [pref, key.replace(/[^a-zA-Z0-9]+/g, ' ')] - .join(' ') - .trim() - .toUpperCase() - .replace(/ /g, '_'); -}; -const toEnvVal = (value, delim = '\n') => { - const str = typeof value === 'string' ? value - : typeof value === 'boolean' ? - value ? '1' - : '0' - : typeof value === 'number' ? String(value) - : Array.isArray(value) ? - value.map((v) => toEnvVal(v)).join(delim) - : /* c8 ignore start */ undefined; - if (typeof str !== 'string') { - throw new Error(`could not serialize value to environment: ${JSON.stringify(value)}`); - } - /* c8 ignore stop */ - return str; -}; -const fromEnvVal = (env, type, multiple, delim = '\n') => (multiple ? - env ? env.split(delim).map(v => fromEnvVal(v, type, false)) - : [] - : type === 'string' ? env - : type === 'boolean' ? env === '1' - : +env.trim()); -const isConfigType = (t) => typeof t === 'string' && - (t === 'string' || t === 'number' || t === 'boolean'); -exports.isConfigType = isConfigType; -const undefOrType = (v, t) => v === undefined || typeof v === t; -const undefOrTypeArray = (v, t) => v === undefined || (Array.isArray(v) && v.every(x => typeof x === t)); -const isValidOption = (v, vo) => Array.isArray(v) ? v.every(x => isValidOption(x, vo)) : vo.includes(v); -// print the value type, for error message reporting -const valueType = (v) => typeof v === 'string' ? 'string' - : typeof v === 'boolean' ? 'boolean' - : typeof v === 'number' ? 'number' - : Array.isArray(v) ? - joinTypes([...new Set(v.map(v => valueType(v)))]) + '[]' - : `${v.type}${v.multiple ? '[]' : ''}`; -const joinTypes = (types) => types.length === 1 && typeof types[0] === 'string' ? - types[0] - : `(${types.join('|')})`; -const isValidValue = (v, type, multi) => { - if (multi) { - if (!Array.isArray(v)) - return false; - return !v.some((v) => !isValidValue(v, type, false)); - } - if (Array.isArray(v)) - return false; - return typeof v === type; -}; -const isConfigOption = (o, type, multi) => !!o && - typeof o === 'object' && - (0, exports.isConfigType)(o.type) && - o.type === type && - undefOrType(o.short, 'string') && - undefOrType(o.description, 'string') && - undefOrType(o.hint, 'string') && - undefOrType(o.validate, 'function') && - (o.type === 'boolean' ? - o.validOptions === undefined - : undefOrTypeArray(o.validOptions, o.type)) && - (o.default === undefined || isValidValue(o.default, type, multi)) && - !!o.multiple === multi; -exports.isConfigOption = isConfigOption; -function num(o = {}) { - const { default: def, validate: val, validOptions, ...rest } = o; - if (def !== undefined && !isValidValue(def, 'number', false)) { - throw new TypeError('invalid default value', { - cause: { - found: def, - wanted: 'number', - }, - }); - } - if (!undefOrTypeArray(validOptions, 'number')) { - throw new TypeError('invalid validOptions', { - cause: { - found: validOptions, - wanted: 'number[]', - }, - }); - } - const validate = val ? - val - : undefined; - return { - ...rest, - default: def, - validate, - validOptions, - type: 'number', - multiple: false, - }; -} -function numList(o = {}) { - const { default: def, validate: val, validOptions, ...rest } = o; - if (def !== undefined && !isValidValue(def, 'number', true)) { - throw new TypeError('invalid default value', { - cause: { - found: def, - wanted: 'number[]', - }, - }); - } - if (!undefOrTypeArray(validOptions, 'number')) { - throw new TypeError('invalid validOptions', { - cause: { - found: validOptions, - wanted: 'number[]', - }, - }); - } - const validate = val ? - val - : undefined; - return { - ...rest, - default: def, - validate, - validOptions, - type: 'number', - multiple: true, - }; -} -function opt(o = {}) { - const { default: def, validate: val, validOptions, ...rest } = o; - if (def !== undefined && !isValidValue(def, 'string', false)) { - throw new TypeError('invalid default value', { - cause: { - found: def, - wanted: 'string', - }, - }); - } - if (!undefOrTypeArray(validOptions, 'string')) { - throw new TypeError('invalid validOptions', { - cause: { - found: validOptions, - wanted: 'string[]', - }, - }); - } - const validate = val ? - val - : undefined; - return { - ...rest, - default: def, - validate, - validOptions, - type: 'string', - multiple: false, - }; -} -function optList(o = {}) { - const { default: def, validate: val, validOptions, ...rest } = o; - if (def !== undefined && !isValidValue(def, 'string', true)) { - throw new TypeError('invalid default value', { - cause: { - found: def, - wanted: 'string[]', - }, - }); - } - if (!undefOrTypeArray(validOptions, 'string')) { - throw new TypeError('invalid validOptions', { - cause: { - found: validOptions, - wanted: 'string[]', - }, - }); - } - const validate = val ? - val - : undefined; - return { - ...rest, - default: def, - validate, - validOptions, - type: 'string', - multiple: true, - }; -} -function flag(o = {}) { - const { hint, default: def, validate: val, ...rest } = o; - delete rest.validOptions; - if (def !== undefined && !isValidValue(def, 'boolean', false)) { - throw new TypeError('invalid default value'); - } - const validate = val ? - val - : undefined; - if (hint !== undefined) { - throw new TypeError('cannot provide hint for flag'); - } - return { - ...rest, - default: def, - validate, - type: 'boolean', - multiple: false, - }; -} -function flagList(o = {}) { - const { hint, default: def, validate: val, ...rest } = o; - delete rest.validOptions; - if (def !== undefined && !isValidValue(def, 'boolean', true)) { - throw new TypeError('invalid default value'); - } - const validate = val ? - val - : undefined; - if (hint !== undefined) { - throw new TypeError('cannot provide hint for flag list'); - } - return { - ...rest, - default: def, - validate, - type: 'boolean', - multiple: true, - }; -} -const toParseArgsOptionsConfig = (options) => { - const c = {}; - for (const longOption in options) { - const config = options[longOption]; - /* c8 ignore start */ - if (!config) { - throw new Error('config must be an object: ' + longOption); - } - /* c8 ignore start */ - if ((0, exports.isConfigOption)(config, 'number', true)) { - c[longOption] = { - type: 'string', - multiple: true, - default: config.default?.map(c => String(c)), - }; - } - else if ((0, exports.isConfigOption)(config, 'number', false)) { - c[longOption] = { - type: 'string', - multiple: false, - default: config.default === undefined ? - undefined - : String(config.default), - }; - } - else { - const conf = config; - c[longOption] = { - type: conf.type, - multiple: !!conf.multiple, - default: conf.default, - }; - } - const clo = c[longOption]; - if (typeof config.short === 'string') { - clo.short = config.short; - } - if (config.type === 'boolean' && - !longOption.startsWith('no-') && - !options[`no-${longOption}`]) { - c[`no-${longOption}`] = { - type: 'boolean', - multiple: config.multiple, - }; - } - } - return c; -}; -const isHeading = (r) => r.type === 'heading'; -const isDescription = (r) => r.type === 'description'; -/** - * Class returned by the {@link jack} function and all configuration - * definition methods. This is what gets chained together. - */ -class Jack { - #configSet; - #shorts; - #options; - #fields = []; - #env; - #envPrefix; - #allowPositionals; - #usage; - #usageMarkdown; - constructor(options = {}) { - this.#options = options; - this.#allowPositionals = options.allowPositionals !== false; - this.#env = - this.#options.env === undefined ? process.env : this.#options.env; - this.#envPrefix = options.envPrefix; - // We need to fib a little, because it's always the same object, but it - // starts out as having an empty config set. Then each method that adds - // fields returns `this as Jack` - this.#configSet = Object.create(null); - this.#shorts = Object.create(null); - } - /** - * Set the default value (which will still be overridden by env or cli) - * as if from a parsed config file. The optional `source` param, if - * provided, will be included in error messages if a value is invalid or - * unknown. - */ - setConfigValues(values, source = '') { - try { - this.validate(values); - } - catch (er) { - const e = er; - if (source && e && typeof e === 'object') { - if (e.cause && typeof e.cause === 'object') { - Object.assign(e.cause, { path: source }); - } - else { - e.cause = { path: source }; - } - } - throw e; - } - for (const [field, value] of Object.entries(values)) { - const my = this.#configSet[field]; - // already validated, just for TS's benefit - /* c8 ignore start */ - if (!my) { - throw new Error('unexpected field in config set: ' + field, { - cause: { found: field }, - }); - } - /* c8 ignore stop */ - my.default = value; - } - return this; - } - /** - * Parse a string of arguments, and return the resulting - * `{ values, positionals }` object. - * - * If an {@link JackOptions#envPrefix} is set, then it will read default - * values from the environment, and write the resulting values back - * to the environment as well. - * - * Environment values always take precedence over any other value, except - * an explicit CLI setting. - */ - parse(args = process.argv) { - this.loadEnvDefaults(); - const p = this.parseRaw(args); - this.applyDefaults(p); - this.writeEnv(p); - return p; - } - loadEnvDefaults() { - if (this.#envPrefix) { - for (const [field, my] of Object.entries(this.#configSet)) { - const ek = toEnvKey(this.#envPrefix, field); - const env = this.#env[ek]; - if (env !== undefined) { - my.default = fromEnvVal(env, my.type, !!my.multiple, my.delim); - } - } - } - } - applyDefaults(p) { - for (const [field, c] of Object.entries(this.#configSet)) { - if (c.default !== undefined && !(field in p.values)) { - //@ts-ignore - p.values[field] = c.default; - } - } - } - /** - * Only parse the command line arguments passed in. - * Does not strip off the `node script.js` bits, so it must be just the - * arguments you wish to have parsed. - * Does not read from or write to the environment, or set defaults. - */ - parseRaw(args) { - if (args === process.argv) { - args = args.slice(process._eval !== undefined ? 1 : 2); - } - const options = toParseArgsOptionsConfig(this.#configSet); - const result = (0, parse_args_js_1.parseArgs)({ - args, - options, - // always strict, but using our own logic - strict: false, - allowPositionals: this.#allowPositionals, - tokens: true, - }); - const p = { - values: {}, - positionals: [], - }; - for (const token of result.tokens) { - if (token.kind === 'positional') { - p.positionals.push(token.value); - if (this.#options.stopAtPositional || - this.#options.stopAtPositionalTest?.(token.value)) { - p.positionals.push(...args.slice(token.index + 1)); - break; - } - } - else if (token.kind === 'option') { - let value = undefined; - if (token.name.startsWith('no-')) { - const my = this.#configSet[token.name]; - const pname = token.name.substring('no-'.length); - const pos = this.#configSet[pname]; - if (pos && - pos.type === 'boolean' && - (!my || - (my.type === 'boolean' && !!my.multiple === !!pos.multiple))) { - value = false; - token.name = pname; - } - } - const my = this.#configSet[token.name]; - if (!my) { - throw new Error(`Unknown option '${token.rawName}'. ` + - `To specify a positional argument starting with a '-', ` + - `place it at the end of the command after '--', as in ` + - `'-- ${token.rawName}'`, { - cause: { - found: token.rawName + (token.value ? `=${token.value}` : ''), - }, - }); - } - if (value === undefined) { - if (token.value === undefined) { - if (my.type !== 'boolean') { - throw new Error(`No value provided for ${token.rawName}, expected ${my.type}`, { - cause: { - name: token.rawName, - wanted: valueType(my), - }, - }); - } - value = true; - } - else { - if (my.type === 'boolean') { - throw new Error(`Flag ${token.rawName} does not take a value, received '${token.value}'`, { cause: { found: token } }); - } - if (my.type === 'string') { - value = token.value; - } - else { - value = +token.value; - if (value !== value) { - throw new Error(`Invalid value '${token.value}' provided for ` + - `'${token.rawName}' option, expected number`, { - cause: { - name: token.rawName, - found: token.value, - wanted: 'number', - }, - }); - } - } - } - } - if (my.multiple) { - const pv = p.values; - const tn = pv[token.name] ?? []; - pv[token.name] = tn; - tn.push(value); - } - else { - const pv = p.values; - pv[token.name] = value; - } - } - } - for (const [field, value] of Object.entries(p.values)) { - const valid = this.#configSet[field]?.validate; - const validOptions = this.#configSet[field]?.validOptions; - let cause; - if (validOptions && !isValidOption(value, validOptions)) { - cause = { name: field, found: value, validOptions: validOptions }; - } - if (valid && !valid(value)) { - cause = cause || { name: field, found: value }; - } - if (cause) { - throw new Error(`Invalid value provided for --${field}: ${JSON.stringify(value)}`, { cause }); - } - } - return p; - } - /** - * do not set fields as 'no-foo' if 'foo' exists and both are bools - * just set foo. - */ - #noNoFields(f, val, s = f) { - if (!f.startsWith('no-') || typeof val !== 'boolean') - return; - const yes = f.substring('no-'.length); - // recurse so we get the core config key we care about. - this.#noNoFields(yes, val, s); - if (this.#configSet[yes]?.type === 'boolean') { - throw new Error(`do not set '${s}', instead set '${yes}' as desired.`, { cause: { found: s, wanted: yes } }); - } - } - /** - * Validate that any arbitrary object is a valid configuration `values` - * object. Useful when loading config files or other sources. - */ - validate(o) { - if (!o || typeof o !== 'object') { - throw new Error('Invalid config: not an object', { - cause: { found: o }, - }); - } - const opts = o; - for (const field in o) { - const value = opts[field]; - /* c8 ignore next - for TS */ - if (value === undefined) - continue; - this.#noNoFields(field, value); - const config = this.#configSet[field]; - if (!config) { - throw new Error(`Unknown config option: ${field}`, { - cause: { found: field }, - }); - } - if (!isValidValue(value, config.type, !!config.multiple)) { - throw new Error(`Invalid value ${valueType(value)} for ${field}, expected ${valueType(config)}`, { - cause: { - name: field, - found: value, - wanted: valueType(config), - }, - }); - } - let cause; - if (config.validOptions && - !isValidOption(value, config.validOptions)) { - cause = { - name: field, - found: value, - validOptions: config.validOptions, - }; - } - if (config.validate && !config.validate(value)) { - cause = cause || { name: field, found: value }; - } - if (cause) { - throw new Error(`Invalid config value for ${field}: ${value}`, { - cause, - }); - } - } - } - writeEnv(p) { - if (!this.#env || !this.#envPrefix) - return; - for (const [field, value] of Object.entries(p.values)) { - const my = this.#configSet[field]; - this.#env[toEnvKey(this.#envPrefix, field)] = toEnvVal(value, my?.delim); - } - } - /** - * Add a heading to the usage output banner - */ - heading(text, level, { pre = false } = {}) { - if (level === undefined) { - level = this.#fields.some(r => isHeading(r)) ? 2 : 1; - } - this.#fields.push({ type: 'heading', text, level, pre }); - return this; - } - /** - * Add a long-form description to the usage output at this position. - */ - description(text, { pre } = {}) { - this.#fields.push({ type: 'description', text, pre }); - return this; - } - /** - * Add one or more number fields. - */ - num(fields) { - return this.#addFields(fields, num); - } - /** - * Add one or more multiple number fields. - */ - numList(fields) { - return this.#addFields(fields, numList); - } - /** - * Add one or more string option fields. - */ - opt(fields) { - return this.#addFields(fields, opt); - } - /** - * Add one or more multiple string option fields. - */ - optList(fields) { - return this.#addFields(fields, optList); - } - /** - * Add one or more flag fields. - */ - flag(fields) { - return this.#addFields(fields, flag); - } - /** - * Add one or more multiple flag fields. - */ - flagList(fields) { - return this.#addFields(fields, flagList); - } - /** - * Generic field definition method. Similar to flag/flagList/number/etc, - * but you must specify the `type` (and optionally `multiple` and `delim`) - * fields on each one, or Jack won't know how to define them. - */ - addFields(fields) { - const next = this; - for (const [name, field] of Object.entries(fields)) { - this.#validateName(name, field); - next.#fields.push({ - type: 'config', - name, - value: field, - }); - } - Object.assign(next.#configSet, fields); - return next; - } - #addFields(fields, fn) { - const next = this; - Object.assign(next.#configSet, Object.fromEntries(Object.entries(fields).map(([name, field]) => { - this.#validateName(name, field); - const option = fn(field); - next.#fields.push({ - type: 'config', - name, - value: option, - }); - return [name, option]; - }))); - return next; - } - #validateName(name, field) { - if (!/^[a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?$/.test(name)) { - throw new TypeError(`Invalid option name: ${name}, ` + - `must be '-' delimited ASCII alphanumeric`); - } - if (this.#configSet[name]) { - throw new TypeError(`Cannot redefine option ${field}`); - } - if (this.#shorts[name]) { - throw new TypeError(`Cannot redefine option ${name}, already ` + - `in use for ${this.#shorts[name]}`); - } - if (field.short) { - if (!/^[a-zA-Z0-9]$/.test(field.short)) { - throw new TypeError(`Invalid ${name} short option: ${field.short}, ` + - 'must be 1 ASCII alphanumeric character'); - } - if (this.#shorts[field.short]) { - throw new TypeError(`Invalid ${name} short option: ${field.short}, ` + - `already in use for ${this.#shorts[field.short]}`); - } - this.#shorts[field.short] = name; - this.#shorts[name] = name; - } - } - /** - * Return the usage banner for the given configuration - */ - usage() { - if (this.#usage) - return this.#usage; - let headingLevel = 1; - const ui = (0, cliui_1.default)({ width }); - const first = this.#fields[0]; - let start = first?.type === 'heading' ? 1 : 0; - if (first?.type === 'heading') { - ui.div({ - padding: [0, 0, 0, 0], - text: normalize(first.text), - }); - } - ui.div({ padding: [0, 0, 0, 0], text: 'Usage:' }); - if (this.#options.usage) { - ui.div({ - text: this.#options.usage, - padding: [0, 0, 0, 2], - }); - } - else { - const cmd = (0, node_path_1.basename)(String(process.argv[1])); - const shortFlags = []; - const shorts = []; - const flags = []; - const opts = []; - for (const [field, config] of Object.entries(this.#configSet)) { - if (config.short) { - if (config.type === 'boolean') - shortFlags.push(config.short); - else - shorts.push([config.short, config.hint || field]); - } - else { - if (config.type === 'boolean') - flags.push(field); - else - opts.push([field, config.hint || field]); - } - } - const sf = shortFlags.length ? ' -' + shortFlags.join('') : ''; - const so = shorts.map(([k, v]) => ` --${k}=<${v}>`).join(''); - const lf = flags.map(k => ` --${k}`).join(''); - const lo = opts.map(([k, v]) => ` --${k}=<${v}>`).join(''); - const usage = `${cmd}${sf}${so}${lf}${lo}`.trim(); - ui.div({ - text: usage, - padding: [0, 0, 0, 2], - }); - } - ui.div({ padding: [0, 0, 0, 0], text: '' }); - const maybeDesc = this.#fields[start]; - if (maybeDesc && isDescription(maybeDesc)) { - const print = normalize(maybeDesc.text, maybeDesc.pre); - start++; - ui.div({ padding: [0, 0, 0, 0], text: print }); - ui.div({ padding: [0, 0, 0, 0], text: '' }); - } - const { rows, maxWidth } = this.#usageRows(start); - // every heading/description after the first gets indented by 2 - // extra spaces. - for (const row of rows) { - if (row.left) { - // If the row is too long, don't wrap it - // Bump the right-hand side down a line to make room - const configIndent = indent(Math.max(headingLevel, 2)); - if (row.left.length > maxWidth - 3) { - ui.div({ text: row.left, padding: [0, 0, 0, configIndent] }); - ui.div({ text: row.text, padding: [0, 0, 0, maxWidth] }); - } - else { - ui.div({ - text: row.left, - padding: [0, 1, 0, configIndent], - width: maxWidth, - }, { padding: [0, 0, 0, 0], text: row.text }); - } - if (row.skipLine) { - ui.div({ padding: [0, 0, 0, 0], text: '' }); - } - } - else { - if (isHeading(row)) { - const { level } = row; - headingLevel = level; - // only h1 and h2 have bottom padding - // h3-h6 do not - const b = level <= 2 ? 1 : 0; - ui.div({ ...row, padding: [0, 0, b, indent(level)] }); - } - else { - ui.div({ ...row, padding: [0, 0, 1, indent(headingLevel + 1)] }); - } - } - } - return (this.#usage = ui.toString()); - } - /** - * Return the usage banner markdown for the given configuration - */ - usageMarkdown() { - if (this.#usageMarkdown) - return this.#usageMarkdown; - const out = []; - let headingLevel = 1; - const first = this.#fields[0]; - let start = first?.type === 'heading' ? 1 : 0; - if (first?.type === 'heading') { - out.push(`# ${normalizeOneLine(first.text)}`); - } - out.push('Usage:'); - if (this.#options.usage) { - out.push(normalizeMarkdown(this.#options.usage, true)); - } - else { - const cmd = (0, node_path_1.basename)(String(process.argv[1])); - const shortFlags = []; - const shorts = []; - const flags = []; - const opts = []; - for (const [field, config] of Object.entries(this.#configSet)) { - if (config.short) { - if (config.type === 'boolean') - shortFlags.push(config.short); - else - shorts.push([config.short, config.hint || field]); - } - else { - if (config.type === 'boolean') - flags.push(field); - else - opts.push([field, config.hint || field]); - } - } - const sf = shortFlags.length ? ' -' + shortFlags.join('') : ''; - const so = shorts.map(([k, v]) => ` --${k}=<${v}>`).join(''); - const lf = flags.map(k => ` --${k}`).join(''); - const lo = opts.map(([k, v]) => ` --${k}=<${v}>`).join(''); - const usage = `${cmd}${sf}${so}${lf}${lo}`.trim(); - out.push(normalizeMarkdown(usage, true)); - } - const maybeDesc = this.#fields[start]; - if (maybeDesc && isDescription(maybeDesc)) { - out.push(normalizeMarkdown(maybeDesc.text, maybeDesc.pre)); - start++; - } - const { rows } = this.#usageRows(start); - // heading level in markdown is number of # ahead of text - for (const row of rows) { - if (row.left) { - out.push('#'.repeat(headingLevel + 1) + - ' ' + - normalizeOneLine(row.left, true)); - if (row.text) - out.push(normalizeMarkdown(row.text)); - } - else if (isHeading(row)) { - const { level } = row; - headingLevel = level; - out.push(`${'#'.repeat(headingLevel)} ${normalizeOneLine(row.text, row.pre)}`); - } - else { - out.push(normalizeMarkdown(row.text, !!row.pre)); - } - } - return (this.#usageMarkdown = out.join('\n\n') + '\n'); - } - #usageRows(start) { - // turn each config type into a row, and figure out the width of the - // left hand indentation for the option descriptions. - let maxMax = Math.max(12, Math.min(26, Math.floor(width / 3))); - let maxWidth = 8; - let prev = undefined; - const rows = []; - for (const field of this.#fields.slice(start)) { - if (field.type !== 'config') { - if (prev?.type === 'config') - prev.skipLine = true; - prev = undefined; - field.text = normalize(field.text, !!field.pre); - rows.push(field); - continue; - } - const { value } = field; - const desc = value.description || ''; - const mult = value.multiple ? 'Can be set multiple times' : ''; - const opts = value.validOptions?.length ? - `Valid options:${value.validOptions.map(v => ` ${JSON.stringify(v)}`)}` - : ''; - const dmDelim = desc.includes('\n') ? '\n\n' : '\n'; - const extra = [opts, mult].join(dmDelim).trim(); - const text = (normalize(desc) + dmDelim + extra).trim(); - const hint = value.hint || - (value.type === 'number' ? 'n' - : value.type === 'string' ? field.name - : undefined); - const short = !value.short ? '' - : value.type === 'boolean' ? `-${value.short} ` - : `-${value.short}<${hint}> `; - const left = value.type === 'boolean' ? - `${short}--${field.name}` - : `${short}--${field.name}=<${hint}>`; - const row = { text, left, type: 'config' }; - if (text.length > width - maxMax) { - row.skipLine = true; - } - if (prev && left.length > maxMax) - prev.skipLine = true; - prev = row; - const len = left.length + 4; - if (len > maxWidth && len < maxMax) { - maxWidth = len; - } - rows.push(row); - } - return { rows, maxWidth }; - } - /** - * Return the configuration options as a plain object - */ - toJSON() { - return Object.fromEntries(Object.entries(this.#configSet).map(([field, def]) => [ - field, - { - type: def.type, - ...(def.multiple ? { multiple: true } : {}), - ...(def.delim ? { delim: def.delim } : {}), - ...(def.short ? { short: def.short } : {}), - ...(def.description ? - { description: normalize(def.description) } - : {}), - ...(def.validate ? { validate: def.validate } : {}), - ...(def.validOptions ? { validOptions: def.validOptions } : {}), - ...(def.default !== undefined ? { default: def.default } : {}), - ...(def.hint ? { hint: def.hint } : {}), - }, - ])); - } - /** - * Custom printer for `util.inspect` - */ - [node_util_1.inspect.custom](_, options) { - return `Jack ${(0, node_util_1.inspect)(this.toJSON(), options)}`; - } -} -exports.Jack = Jack; -// Unwrap and un-indent, so we can wrap description -// strings however makes them look nice in the code. -const normalize = (s, pre = false) => { - if (pre) - // prepend a ZWSP to each line so cliui doesn't strip it. - return s - .split('\n') - .map(l => `\u200b${l}`) - .join('\n'); - return s - .split(/^\s*```\s*$/gm) - .map((s, i) => { - if (i % 2 === 1) { - if (!s.trim()) { - return `\`\`\`\n\`\`\`\n`; - } - // outdent the ``` blocks, but preserve whitespace otherwise. - const split = s.split('\n'); - // throw out the \n at the start and end - split.pop(); - split.shift(); - const si = split.reduce((shortest, l) => { - /* c8 ignore next */ - const ind = l.match(/^\s*/)?.[0] ?? ''; - if (ind.length) - return Math.min(ind.length, shortest); - else - return shortest; - }, Infinity); - /* c8 ignore next */ - const i = isFinite(si) ? si : 0; - return ('\n```\n' + - split.map(s => `\u200b${s.substring(i)}`).join('\n') + - '\n```\n'); - } - return (s - // remove single line breaks, except for lists - .replace(/([^\n])\n[ \t]*([^\n])/g, (_, $1, $2) => !/^[-*]/.test($2) ? `${$1} ${$2}` : `${$1}\n${$2}`) - // normalize mid-line whitespace - .replace(/([^\n])[ \t]+([^\n])/g, '$1 $2') - // two line breaks are enough - .replace(/\n{3,}/g, '\n\n') - // remove any spaces at the start of a line - .replace(/\n[ \t]+/g, '\n') - .trim()); - }) - .join('\n'); -}; -// normalize for markdown printing, remove leading spaces on lines -const normalizeMarkdown = (s, pre = false) => { - const n = normalize(s, pre).replace(/\\/g, '\\\\'); - return pre ? - `\`\`\`\n${n.replace(/\u200b/g, '')}\n\`\`\`` - : n.replace(/\n +/g, '\n').trim(); -}; -const normalizeOneLine = (s, pre = false) => { - const n = normalize(s, pre) - .replace(/[\s\u200b]+/g, ' ') - .trim(); - return pre ? `\`${n}\`` : n; -}; -/** - * Main entry point. Create and return a {@link Jack} object. - */ -const jack = (options = {}) => new Jack(options); -exports.jack = jack; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/jackspeak/dist/commonjs/package.json b/node_modules/node-gyp/node_modules/jackspeak/dist/commonjs/package.json deleted file mode 100644 index 5bbefffbabee3..0000000000000 --- a/node_modules/node-gyp/node_modules/jackspeak/dist/commonjs/package.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "type": "commonjs" -} diff --git a/node_modules/node-gyp/node_modules/jackspeak/dist/commonjs/parse-args.js b/node_modules/node-gyp/node_modules/jackspeak/dist/commonjs/parse-args.js deleted file mode 100644 index fc918a41fe603..0000000000000 --- a/node_modules/node-gyp/node_modules/jackspeak/dist/commonjs/parse-args.js +++ /dev/null @@ -1,50 +0,0 @@ -"use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.parseArgs = void 0; -const util = __importStar(require("util")); -const pv = (typeof process === 'object' && - !!process && - typeof process.version === 'string') ? - process.version - : 'v0.0.0'; -const pvs = pv - .replace(/^v/, '') - .split('.') - .map(s => parseInt(s, 10)); -/* c8 ignore start */ -const [major = 0, minor = 0] = pvs; -/* c8 ignore stop */ -let { parseArgs: pa } = util; -/* c8 ignore start */ -if (!pa || - major < 16 || - (major === 18 && minor < 11) || - (major === 16 && minor < 19)) { - /* c8 ignore stop */ - pa = require('@pkgjs/parseargs').parseArgs; -} -exports.parseArgs = pa; -//# sourceMappingURL=parse-args-cjs.cjs.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/jackspeak/dist/esm/index.js b/node_modules/node-gyp/node_modules/jackspeak/dist/esm/index.js deleted file mode 100644 index 78fdfa8155472..0000000000000 --- a/node_modules/node-gyp/node_modules/jackspeak/dist/esm/index.js +++ /dev/null @@ -1,1000 +0,0 @@ -import { inspect } from 'node:util'; -import { parseArgs } from './parse-args.js'; -// it's a tiny API, just cast it inline, it's fine -//@ts-ignore -import cliui from '@isaacs/cliui'; -import { basename } from 'node:path'; -const width = Math.min((process && process.stdout && process.stdout.columns) || 80, 80); -// indentation spaces from heading level -const indent = (n) => (n - 1) * 2; -const toEnvKey = (pref, key) => { - return [pref, key.replace(/[^a-zA-Z0-9]+/g, ' ')] - .join(' ') - .trim() - .toUpperCase() - .replace(/ /g, '_'); -}; -const toEnvVal = (value, delim = '\n') => { - const str = typeof value === 'string' ? value - : typeof value === 'boolean' ? - value ? '1' - : '0' - : typeof value === 'number' ? String(value) - : Array.isArray(value) ? - value.map((v) => toEnvVal(v)).join(delim) - : /* c8 ignore start */ undefined; - if (typeof str !== 'string') { - throw new Error(`could not serialize value to environment: ${JSON.stringify(value)}`); - } - /* c8 ignore stop */ - return str; -}; -const fromEnvVal = (env, type, multiple, delim = '\n') => (multiple ? - env ? env.split(delim).map(v => fromEnvVal(v, type, false)) - : [] - : type === 'string' ? env - : type === 'boolean' ? env === '1' - : +env.trim()); -export const isConfigType = (t) => typeof t === 'string' && - (t === 'string' || t === 'number' || t === 'boolean'); -const undefOrType = (v, t) => v === undefined || typeof v === t; -const undefOrTypeArray = (v, t) => v === undefined || (Array.isArray(v) && v.every(x => typeof x === t)); -const isValidOption = (v, vo) => Array.isArray(v) ? v.every(x => isValidOption(x, vo)) : vo.includes(v); -// print the value type, for error message reporting -const valueType = (v) => typeof v === 'string' ? 'string' - : typeof v === 'boolean' ? 'boolean' - : typeof v === 'number' ? 'number' - : Array.isArray(v) ? - joinTypes([...new Set(v.map(v => valueType(v)))]) + '[]' - : `${v.type}${v.multiple ? '[]' : ''}`; -const joinTypes = (types) => types.length === 1 && typeof types[0] === 'string' ? - types[0] - : `(${types.join('|')})`; -const isValidValue = (v, type, multi) => { - if (multi) { - if (!Array.isArray(v)) - return false; - return !v.some((v) => !isValidValue(v, type, false)); - } - if (Array.isArray(v)) - return false; - return typeof v === type; -}; -export const isConfigOption = (o, type, multi) => !!o && - typeof o === 'object' && - isConfigType(o.type) && - o.type === type && - undefOrType(o.short, 'string') && - undefOrType(o.description, 'string') && - undefOrType(o.hint, 'string') && - undefOrType(o.validate, 'function') && - (o.type === 'boolean' ? - o.validOptions === undefined - : undefOrTypeArray(o.validOptions, o.type)) && - (o.default === undefined || isValidValue(o.default, type, multi)) && - !!o.multiple === multi; -function num(o = {}) { - const { default: def, validate: val, validOptions, ...rest } = o; - if (def !== undefined && !isValidValue(def, 'number', false)) { - throw new TypeError('invalid default value', { - cause: { - found: def, - wanted: 'number', - }, - }); - } - if (!undefOrTypeArray(validOptions, 'number')) { - throw new TypeError('invalid validOptions', { - cause: { - found: validOptions, - wanted: 'number[]', - }, - }); - } - const validate = val ? - val - : undefined; - return { - ...rest, - default: def, - validate, - validOptions, - type: 'number', - multiple: false, - }; -} -function numList(o = {}) { - const { default: def, validate: val, validOptions, ...rest } = o; - if (def !== undefined && !isValidValue(def, 'number', true)) { - throw new TypeError('invalid default value', { - cause: { - found: def, - wanted: 'number[]', - }, - }); - } - if (!undefOrTypeArray(validOptions, 'number')) { - throw new TypeError('invalid validOptions', { - cause: { - found: validOptions, - wanted: 'number[]', - }, - }); - } - const validate = val ? - val - : undefined; - return { - ...rest, - default: def, - validate, - validOptions, - type: 'number', - multiple: true, - }; -} -function opt(o = {}) { - const { default: def, validate: val, validOptions, ...rest } = o; - if (def !== undefined && !isValidValue(def, 'string', false)) { - throw new TypeError('invalid default value', { - cause: { - found: def, - wanted: 'string', - }, - }); - } - if (!undefOrTypeArray(validOptions, 'string')) { - throw new TypeError('invalid validOptions', { - cause: { - found: validOptions, - wanted: 'string[]', - }, - }); - } - const validate = val ? - val - : undefined; - return { - ...rest, - default: def, - validate, - validOptions, - type: 'string', - multiple: false, - }; -} -function optList(o = {}) { - const { default: def, validate: val, validOptions, ...rest } = o; - if (def !== undefined && !isValidValue(def, 'string', true)) { - throw new TypeError('invalid default value', { - cause: { - found: def, - wanted: 'string[]', - }, - }); - } - if (!undefOrTypeArray(validOptions, 'string')) { - throw new TypeError('invalid validOptions', { - cause: { - found: validOptions, - wanted: 'string[]', - }, - }); - } - const validate = val ? - val - : undefined; - return { - ...rest, - default: def, - validate, - validOptions, - type: 'string', - multiple: true, - }; -} -function flag(o = {}) { - const { hint, default: def, validate: val, ...rest } = o; - delete rest.validOptions; - if (def !== undefined && !isValidValue(def, 'boolean', false)) { - throw new TypeError('invalid default value'); - } - const validate = val ? - val - : undefined; - if (hint !== undefined) { - throw new TypeError('cannot provide hint for flag'); - } - return { - ...rest, - default: def, - validate, - type: 'boolean', - multiple: false, - }; -} -function flagList(o = {}) { - const { hint, default: def, validate: val, ...rest } = o; - delete rest.validOptions; - if (def !== undefined && !isValidValue(def, 'boolean', true)) { - throw new TypeError('invalid default value'); - } - const validate = val ? - val - : undefined; - if (hint !== undefined) { - throw new TypeError('cannot provide hint for flag list'); - } - return { - ...rest, - default: def, - validate, - type: 'boolean', - multiple: true, - }; -} -const toParseArgsOptionsConfig = (options) => { - const c = {}; - for (const longOption in options) { - const config = options[longOption]; - /* c8 ignore start */ - if (!config) { - throw new Error('config must be an object: ' + longOption); - } - /* c8 ignore start */ - if (isConfigOption(config, 'number', true)) { - c[longOption] = { - type: 'string', - multiple: true, - default: config.default?.map(c => String(c)), - }; - } - else if (isConfigOption(config, 'number', false)) { - c[longOption] = { - type: 'string', - multiple: false, - default: config.default === undefined ? - undefined - : String(config.default), - }; - } - else { - const conf = config; - c[longOption] = { - type: conf.type, - multiple: !!conf.multiple, - default: conf.default, - }; - } - const clo = c[longOption]; - if (typeof config.short === 'string') { - clo.short = config.short; - } - if (config.type === 'boolean' && - !longOption.startsWith('no-') && - !options[`no-${longOption}`]) { - c[`no-${longOption}`] = { - type: 'boolean', - multiple: config.multiple, - }; - } - } - return c; -}; -const isHeading = (r) => r.type === 'heading'; -const isDescription = (r) => r.type === 'description'; -/** - * Class returned by the {@link jack} function and all configuration - * definition methods. This is what gets chained together. - */ -export class Jack { - #configSet; - #shorts; - #options; - #fields = []; - #env; - #envPrefix; - #allowPositionals; - #usage; - #usageMarkdown; - constructor(options = {}) { - this.#options = options; - this.#allowPositionals = options.allowPositionals !== false; - this.#env = - this.#options.env === undefined ? process.env : this.#options.env; - this.#envPrefix = options.envPrefix; - // We need to fib a little, because it's always the same object, but it - // starts out as having an empty config set. Then each method that adds - // fields returns `this as Jack` - this.#configSet = Object.create(null); - this.#shorts = Object.create(null); - } - /** - * Set the default value (which will still be overridden by env or cli) - * as if from a parsed config file. The optional `source` param, if - * provided, will be included in error messages if a value is invalid or - * unknown. - */ - setConfigValues(values, source = '') { - try { - this.validate(values); - } - catch (er) { - const e = er; - if (source && e && typeof e === 'object') { - if (e.cause && typeof e.cause === 'object') { - Object.assign(e.cause, { path: source }); - } - else { - e.cause = { path: source }; - } - } - throw e; - } - for (const [field, value] of Object.entries(values)) { - const my = this.#configSet[field]; - // already validated, just for TS's benefit - /* c8 ignore start */ - if (!my) { - throw new Error('unexpected field in config set: ' + field, { - cause: { found: field }, - }); - } - /* c8 ignore stop */ - my.default = value; - } - return this; - } - /** - * Parse a string of arguments, and return the resulting - * `{ values, positionals }` object. - * - * If an {@link JackOptions#envPrefix} is set, then it will read default - * values from the environment, and write the resulting values back - * to the environment as well. - * - * Environment values always take precedence over any other value, except - * an explicit CLI setting. - */ - parse(args = process.argv) { - this.loadEnvDefaults(); - const p = this.parseRaw(args); - this.applyDefaults(p); - this.writeEnv(p); - return p; - } - loadEnvDefaults() { - if (this.#envPrefix) { - for (const [field, my] of Object.entries(this.#configSet)) { - const ek = toEnvKey(this.#envPrefix, field); - const env = this.#env[ek]; - if (env !== undefined) { - my.default = fromEnvVal(env, my.type, !!my.multiple, my.delim); - } - } - } - } - applyDefaults(p) { - for (const [field, c] of Object.entries(this.#configSet)) { - if (c.default !== undefined && !(field in p.values)) { - //@ts-ignore - p.values[field] = c.default; - } - } - } - /** - * Only parse the command line arguments passed in. - * Does not strip off the `node script.js` bits, so it must be just the - * arguments you wish to have parsed. - * Does not read from or write to the environment, or set defaults. - */ - parseRaw(args) { - if (args === process.argv) { - args = args.slice(process._eval !== undefined ? 1 : 2); - } - const options = toParseArgsOptionsConfig(this.#configSet); - const result = parseArgs({ - args, - options, - // always strict, but using our own logic - strict: false, - allowPositionals: this.#allowPositionals, - tokens: true, - }); - const p = { - values: {}, - positionals: [], - }; - for (const token of result.tokens) { - if (token.kind === 'positional') { - p.positionals.push(token.value); - if (this.#options.stopAtPositional || - this.#options.stopAtPositionalTest?.(token.value)) { - p.positionals.push(...args.slice(token.index + 1)); - break; - } - } - else if (token.kind === 'option') { - let value = undefined; - if (token.name.startsWith('no-')) { - const my = this.#configSet[token.name]; - const pname = token.name.substring('no-'.length); - const pos = this.#configSet[pname]; - if (pos && - pos.type === 'boolean' && - (!my || - (my.type === 'boolean' && !!my.multiple === !!pos.multiple))) { - value = false; - token.name = pname; - } - } - const my = this.#configSet[token.name]; - if (!my) { - throw new Error(`Unknown option '${token.rawName}'. ` + - `To specify a positional argument starting with a '-', ` + - `place it at the end of the command after '--', as in ` + - `'-- ${token.rawName}'`, { - cause: { - found: token.rawName + (token.value ? `=${token.value}` : ''), - }, - }); - } - if (value === undefined) { - if (token.value === undefined) { - if (my.type !== 'boolean') { - throw new Error(`No value provided for ${token.rawName}, expected ${my.type}`, { - cause: { - name: token.rawName, - wanted: valueType(my), - }, - }); - } - value = true; - } - else { - if (my.type === 'boolean') { - throw new Error(`Flag ${token.rawName} does not take a value, received '${token.value}'`, { cause: { found: token } }); - } - if (my.type === 'string') { - value = token.value; - } - else { - value = +token.value; - if (value !== value) { - throw new Error(`Invalid value '${token.value}' provided for ` + - `'${token.rawName}' option, expected number`, { - cause: { - name: token.rawName, - found: token.value, - wanted: 'number', - }, - }); - } - } - } - } - if (my.multiple) { - const pv = p.values; - const tn = pv[token.name] ?? []; - pv[token.name] = tn; - tn.push(value); - } - else { - const pv = p.values; - pv[token.name] = value; - } - } - } - for (const [field, value] of Object.entries(p.values)) { - const valid = this.#configSet[field]?.validate; - const validOptions = this.#configSet[field]?.validOptions; - let cause; - if (validOptions && !isValidOption(value, validOptions)) { - cause = { name: field, found: value, validOptions: validOptions }; - } - if (valid && !valid(value)) { - cause = cause || { name: field, found: value }; - } - if (cause) { - throw new Error(`Invalid value provided for --${field}: ${JSON.stringify(value)}`, { cause }); - } - } - return p; - } - /** - * do not set fields as 'no-foo' if 'foo' exists and both are bools - * just set foo. - */ - #noNoFields(f, val, s = f) { - if (!f.startsWith('no-') || typeof val !== 'boolean') - return; - const yes = f.substring('no-'.length); - // recurse so we get the core config key we care about. - this.#noNoFields(yes, val, s); - if (this.#configSet[yes]?.type === 'boolean') { - throw new Error(`do not set '${s}', instead set '${yes}' as desired.`, { cause: { found: s, wanted: yes } }); - } - } - /** - * Validate that any arbitrary object is a valid configuration `values` - * object. Useful when loading config files or other sources. - */ - validate(o) { - if (!o || typeof o !== 'object') { - throw new Error('Invalid config: not an object', { - cause: { found: o }, - }); - } - const opts = o; - for (const field in o) { - const value = opts[field]; - /* c8 ignore next - for TS */ - if (value === undefined) - continue; - this.#noNoFields(field, value); - const config = this.#configSet[field]; - if (!config) { - throw new Error(`Unknown config option: ${field}`, { - cause: { found: field }, - }); - } - if (!isValidValue(value, config.type, !!config.multiple)) { - throw new Error(`Invalid value ${valueType(value)} for ${field}, expected ${valueType(config)}`, { - cause: { - name: field, - found: value, - wanted: valueType(config), - }, - }); - } - let cause; - if (config.validOptions && - !isValidOption(value, config.validOptions)) { - cause = { - name: field, - found: value, - validOptions: config.validOptions, - }; - } - if (config.validate && !config.validate(value)) { - cause = cause || { name: field, found: value }; - } - if (cause) { - throw new Error(`Invalid config value for ${field}: ${value}`, { - cause, - }); - } - } - } - writeEnv(p) { - if (!this.#env || !this.#envPrefix) - return; - for (const [field, value] of Object.entries(p.values)) { - const my = this.#configSet[field]; - this.#env[toEnvKey(this.#envPrefix, field)] = toEnvVal(value, my?.delim); - } - } - /** - * Add a heading to the usage output banner - */ - heading(text, level, { pre = false } = {}) { - if (level === undefined) { - level = this.#fields.some(r => isHeading(r)) ? 2 : 1; - } - this.#fields.push({ type: 'heading', text, level, pre }); - return this; - } - /** - * Add a long-form description to the usage output at this position. - */ - description(text, { pre } = {}) { - this.#fields.push({ type: 'description', text, pre }); - return this; - } - /** - * Add one or more number fields. - */ - num(fields) { - return this.#addFields(fields, num); - } - /** - * Add one or more multiple number fields. - */ - numList(fields) { - return this.#addFields(fields, numList); - } - /** - * Add one or more string option fields. - */ - opt(fields) { - return this.#addFields(fields, opt); - } - /** - * Add one or more multiple string option fields. - */ - optList(fields) { - return this.#addFields(fields, optList); - } - /** - * Add one or more flag fields. - */ - flag(fields) { - return this.#addFields(fields, flag); - } - /** - * Add one or more multiple flag fields. - */ - flagList(fields) { - return this.#addFields(fields, flagList); - } - /** - * Generic field definition method. Similar to flag/flagList/number/etc, - * but you must specify the `type` (and optionally `multiple` and `delim`) - * fields on each one, or Jack won't know how to define them. - */ - addFields(fields) { - const next = this; - for (const [name, field] of Object.entries(fields)) { - this.#validateName(name, field); - next.#fields.push({ - type: 'config', - name, - value: field, - }); - } - Object.assign(next.#configSet, fields); - return next; - } - #addFields(fields, fn) { - const next = this; - Object.assign(next.#configSet, Object.fromEntries(Object.entries(fields).map(([name, field]) => { - this.#validateName(name, field); - const option = fn(field); - next.#fields.push({ - type: 'config', - name, - value: option, - }); - return [name, option]; - }))); - return next; - } - #validateName(name, field) { - if (!/^[a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?$/.test(name)) { - throw new TypeError(`Invalid option name: ${name}, ` + - `must be '-' delimited ASCII alphanumeric`); - } - if (this.#configSet[name]) { - throw new TypeError(`Cannot redefine option ${field}`); - } - if (this.#shorts[name]) { - throw new TypeError(`Cannot redefine option ${name}, already ` + - `in use for ${this.#shorts[name]}`); - } - if (field.short) { - if (!/^[a-zA-Z0-9]$/.test(field.short)) { - throw new TypeError(`Invalid ${name} short option: ${field.short}, ` + - 'must be 1 ASCII alphanumeric character'); - } - if (this.#shorts[field.short]) { - throw new TypeError(`Invalid ${name} short option: ${field.short}, ` + - `already in use for ${this.#shorts[field.short]}`); - } - this.#shorts[field.short] = name; - this.#shorts[name] = name; - } - } - /** - * Return the usage banner for the given configuration - */ - usage() { - if (this.#usage) - return this.#usage; - let headingLevel = 1; - const ui = cliui({ width }); - const first = this.#fields[0]; - let start = first?.type === 'heading' ? 1 : 0; - if (first?.type === 'heading') { - ui.div({ - padding: [0, 0, 0, 0], - text: normalize(first.text), - }); - } - ui.div({ padding: [0, 0, 0, 0], text: 'Usage:' }); - if (this.#options.usage) { - ui.div({ - text: this.#options.usage, - padding: [0, 0, 0, 2], - }); - } - else { - const cmd = basename(String(process.argv[1])); - const shortFlags = []; - const shorts = []; - const flags = []; - const opts = []; - for (const [field, config] of Object.entries(this.#configSet)) { - if (config.short) { - if (config.type === 'boolean') - shortFlags.push(config.short); - else - shorts.push([config.short, config.hint || field]); - } - else { - if (config.type === 'boolean') - flags.push(field); - else - opts.push([field, config.hint || field]); - } - } - const sf = shortFlags.length ? ' -' + shortFlags.join('') : ''; - const so = shorts.map(([k, v]) => ` --${k}=<${v}>`).join(''); - const lf = flags.map(k => ` --${k}`).join(''); - const lo = opts.map(([k, v]) => ` --${k}=<${v}>`).join(''); - const usage = `${cmd}${sf}${so}${lf}${lo}`.trim(); - ui.div({ - text: usage, - padding: [0, 0, 0, 2], - }); - } - ui.div({ padding: [0, 0, 0, 0], text: '' }); - const maybeDesc = this.#fields[start]; - if (maybeDesc && isDescription(maybeDesc)) { - const print = normalize(maybeDesc.text, maybeDesc.pre); - start++; - ui.div({ padding: [0, 0, 0, 0], text: print }); - ui.div({ padding: [0, 0, 0, 0], text: '' }); - } - const { rows, maxWidth } = this.#usageRows(start); - // every heading/description after the first gets indented by 2 - // extra spaces. - for (const row of rows) { - if (row.left) { - // If the row is too long, don't wrap it - // Bump the right-hand side down a line to make room - const configIndent = indent(Math.max(headingLevel, 2)); - if (row.left.length > maxWidth - 3) { - ui.div({ text: row.left, padding: [0, 0, 0, configIndent] }); - ui.div({ text: row.text, padding: [0, 0, 0, maxWidth] }); - } - else { - ui.div({ - text: row.left, - padding: [0, 1, 0, configIndent], - width: maxWidth, - }, { padding: [0, 0, 0, 0], text: row.text }); - } - if (row.skipLine) { - ui.div({ padding: [0, 0, 0, 0], text: '' }); - } - } - else { - if (isHeading(row)) { - const { level } = row; - headingLevel = level; - // only h1 and h2 have bottom padding - // h3-h6 do not - const b = level <= 2 ? 1 : 0; - ui.div({ ...row, padding: [0, 0, b, indent(level)] }); - } - else { - ui.div({ ...row, padding: [0, 0, 1, indent(headingLevel + 1)] }); - } - } - } - return (this.#usage = ui.toString()); - } - /** - * Return the usage banner markdown for the given configuration - */ - usageMarkdown() { - if (this.#usageMarkdown) - return this.#usageMarkdown; - const out = []; - let headingLevel = 1; - const first = this.#fields[0]; - let start = first?.type === 'heading' ? 1 : 0; - if (first?.type === 'heading') { - out.push(`# ${normalizeOneLine(first.text)}`); - } - out.push('Usage:'); - if (this.#options.usage) { - out.push(normalizeMarkdown(this.#options.usage, true)); - } - else { - const cmd = basename(String(process.argv[1])); - const shortFlags = []; - const shorts = []; - const flags = []; - const opts = []; - for (const [field, config] of Object.entries(this.#configSet)) { - if (config.short) { - if (config.type === 'boolean') - shortFlags.push(config.short); - else - shorts.push([config.short, config.hint || field]); - } - else { - if (config.type === 'boolean') - flags.push(field); - else - opts.push([field, config.hint || field]); - } - } - const sf = shortFlags.length ? ' -' + shortFlags.join('') : ''; - const so = shorts.map(([k, v]) => ` --${k}=<${v}>`).join(''); - const lf = flags.map(k => ` --${k}`).join(''); - const lo = opts.map(([k, v]) => ` --${k}=<${v}>`).join(''); - const usage = `${cmd}${sf}${so}${lf}${lo}`.trim(); - out.push(normalizeMarkdown(usage, true)); - } - const maybeDesc = this.#fields[start]; - if (maybeDesc && isDescription(maybeDesc)) { - out.push(normalizeMarkdown(maybeDesc.text, maybeDesc.pre)); - start++; - } - const { rows } = this.#usageRows(start); - // heading level in markdown is number of # ahead of text - for (const row of rows) { - if (row.left) { - out.push('#'.repeat(headingLevel + 1) + - ' ' + - normalizeOneLine(row.left, true)); - if (row.text) - out.push(normalizeMarkdown(row.text)); - } - else if (isHeading(row)) { - const { level } = row; - headingLevel = level; - out.push(`${'#'.repeat(headingLevel)} ${normalizeOneLine(row.text, row.pre)}`); - } - else { - out.push(normalizeMarkdown(row.text, !!row.pre)); - } - } - return (this.#usageMarkdown = out.join('\n\n') + '\n'); - } - #usageRows(start) { - // turn each config type into a row, and figure out the width of the - // left hand indentation for the option descriptions. - let maxMax = Math.max(12, Math.min(26, Math.floor(width / 3))); - let maxWidth = 8; - let prev = undefined; - const rows = []; - for (const field of this.#fields.slice(start)) { - if (field.type !== 'config') { - if (prev?.type === 'config') - prev.skipLine = true; - prev = undefined; - field.text = normalize(field.text, !!field.pre); - rows.push(field); - continue; - } - const { value } = field; - const desc = value.description || ''; - const mult = value.multiple ? 'Can be set multiple times' : ''; - const opts = value.validOptions?.length ? - `Valid options:${value.validOptions.map(v => ` ${JSON.stringify(v)}`)}` - : ''; - const dmDelim = desc.includes('\n') ? '\n\n' : '\n'; - const extra = [opts, mult].join(dmDelim).trim(); - const text = (normalize(desc) + dmDelim + extra).trim(); - const hint = value.hint || - (value.type === 'number' ? 'n' - : value.type === 'string' ? field.name - : undefined); - const short = !value.short ? '' - : value.type === 'boolean' ? `-${value.short} ` - : `-${value.short}<${hint}> `; - const left = value.type === 'boolean' ? - `${short}--${field.name}` - : `${short}--${field.name}=<${hint}>`; - const row = { text, left, type: 'config' }; - if (text.length > width - maxMax) { - row.skipLine = true; - } - if (prev && left.length > maxMax) - prev.skipLine = true; - prev = row; - const len = left.length + 4; - if (len > maxWidth && len < maxMax) { - maxWidth = len; - } - rows.push(row); - } - return { rows, maxWidth }; - } - /** - * Return the configuration options as a plain object - */ - toJSON() { - return Object.fromEntries(Object.entries(this.#configSet).map(([field, def]) => [ - field, - { - type: def.type, - ...(def.multiple ? { multiple: true } : {}), - ...(def.delim ? { delim: def.delim } : {}), - ...(def.short ? { short: def.short } : {}), - ...(def.description ? - { description: normalize(def.description) } - : {}), - ...(def.validate ? { validate: def.validate } : {}), - ...(def.validOptions ? { validOptions: def.validOptions } : {}), - ...(def.default !== undefined ? { default: def.default } : {}), - ...(def.hint ? { hint: def.hint } : {}), - }, - ])); - } - /** - * Custom printer for `util.inspect` - */ - [inspect.custom](_, options) { - return `Jack ${inspect(this.toJSON(), options)}`; - } -} -// Unwrap and un-indent, so we can wrap description -// strings however makes them look nice in the code. -const normalize = (s, pre = false) => { - if (pre) - // prepend a ZWSP to each line so cliui doesn't strip it. - return s - .split('\n') - .map(l => `\u200b${l}`) - .join('\n'); - return s - .split(/^\s*```\s*$/gm) - .map((s, i) => { - if (i % 2 === 1) { - if (!s.trim()) { - return `\`\`\`\n\`\`\`\n`; - } - // outdent the ``` blocks, but preserve whitespace otherwise. - const split = s.split('\n'); - // throw out the \n at the start and end - split.pop(); - split.shift(); - const si = split.reduce((shortest, l) => { - /* c8 ignore next */ - const ind = l.match(/^\s*/)?.[0] ?? ''; - if (ind.length) - return Math.min(ind.length, shortest); - else - return shortest; - }, Infinity); - /* c8 ignore next */ - const i = isFinite(si) ? si : 0; - return ('\n```\n' + - split.map(s => `\u200b${s.substring(i)}`).join('\n') + - '\n```\n'); - } - return (s - // remove single line breaks, except for lists - .replace(/([^\n])\n[ \t]*([^\n])/g, (_, $1, $2) => !/^[-*]/.test($2) ? `${$1} ${$2}` : `${$1}\n${$2}`) - // normalize mid-line whitespace - .replace(/([^\n])[ \t]+([^\n])/g, '$1 $2') - // two line breaks are enough - .replace(/\n{3,}/g, '\n\n') - // remove any spaces at the start of a line - .replace(/\n[ \t]+/g, '\n') - .trim()); - }) - .join('\n'); -}; -// normalize for markdown printing, remove leading spaces on lines -const normalizeMarkdown = (s, pre = false) => { - const n = normalize(s, pre).replace(/\\/g, '\\\\'); - return pre ? - `\`\`\`\n${n.replace(/\u200b/g, '')}\n\`\`\`` - : n.replace(/\n +/g, '\n').trim(); -}; -const normalizeOneLine = (s, pre = false) => { - const n = normalize(s, pre) - .replace(/[\s\u200b]+/g, ' ') - .trim(); - return pre ? `\`${n}\`` : n; -}; -/** - * Main entry point. Create and return a {@link Jack} object. - */ -export const jack = (options = {}) => new Jack(options); -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/jackspeak/dist/esm/package.json b/node_modules/node-gyp/node_modules/jackspeak/dist/esm/package.json deleted file mode 100644 index 3dbc1ca591c05..0000000000000 --- a/node_modules/node-gyp/node_modules/jackspeak/dist/esm/package.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "type": "module" -} diff --git a/node_modules/node-gyp/node_modules/jackspeak/dist/esm/parse-args.js b/node_modules/node-gyp/node_modules/jackspeak/dist/esm/parse-args.js deleted file mode 100644 index a4be7153de1f1..0000000000000 --- a/node_modules/node-gyp/node_modules/jackspeak/dist/esm/parse-args.js +++ /dev/null @@ -1,26 +0,0 @@ -import * as util from 'util'; -const pv = (typeof process === 'object' && - !!process && - typeof process.version === 'string') ? - process.version - : 'v0.0.0'; -const pvs = pv - .replace(/^v/, '') - .split('.') - .map(s => parseInt(s, 10)); -/* c8 ignore start */ -const [major = 0, minor = 0] = pvs; -/* c8 ignore stop */ -let { parseArgs: pa, } = util; -/* c8 ignore start - version specific */ -if (!pa || - major < 16 || - (major === 18 && minor < 11) || - (major === 16 && minor < 19)) { - // Ignore because we will clobber it for commonjs - //@ts-ignore - pa = (await import('@pkgjs/parseargs')).parseArgs; -} -/* c8 ignore stop */ -export const parseArgs = pa; -//# sourceMappingURL=parse-args.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/jackspeak/package.json b/node_modules/node-gyp/node_modules/jackspeak/package.json deleted file mode 100644 index 51eaabdf35469..0000000000000 --- a/node_modules/node-gyp/node_modules/jackspeak/package.json +++ /dev/null @@ -1,95 +0,0 @@ -{ - "name": "jackspeak", - "publishConfig": { - "tag": "v3-legacy" - }, - "version": "3.4.3", - "description": "A very strict and proper argument parser.", - "tshy": { - "main": true, - "exports": { - "./package.json": "./package.json", - ".": "./src/index.js" - } - }, - "main": "./dist/commonjs/index.js", - "types": "./dist/commonjs/index.d.ts", - "type": "module", - "exports": { - "./package.json": "./package.json", - ".": { - "import": { - "types": "./dist/esm/index.d.ts", - "default": "./dist/esm/index.js" - }, - "require": { - "types": "./dist/commonjs/index.d.ts", - "default": "./dist/commonjs/index.js" - } - } - }, - "files": [ - "dist" - ], - "scripts": { - "build-examples": "for i in examples/*.js ; do node $i -h > ${i/.js/.txt}; done", - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags", - "prepare": "tshy", - "pretest": "npm run prepare", - "presnap": "npm run prepare", - "test": "tap", - "snap": "tap", - "format": "prettier --write . --log-level warn", - "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts" - }, - "license": "BlueOak-1.0.0", - "prettier": { - "experimentalTernaries": true, - "semi": false, - "printWidth": 75, - "tabWidth": 2, - "useTabs": false, - "singleQuote": true, - "jsxSingleQuote": false, - "bracketSameLine": true, - "arrowParens": "avoid", - "endOfLine": "lf" - }, - "devDependencies": { - "@types/node": "^20.7.0", - "@types/pkgjs__parseargs": "^0.10.1", - "prettier": "^3.2.5", - "tap": "^18.8.0", - "tshy": "^1.14.0", - "typedoc": "^0.25.1", - "typescript": "^5.2.2" - }, - "dependencies": { - "@isaacs/cliui": "^8.0.2" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/isaacs/jackspeak.git" - }, - "keywords": [ - "argument", - "parser", - "args", - "option", - "flag", - "cli", - "command", - "line", - "parse", - "parsing" - ], - "author": "Isaac Z. Schlueter ", - "optionalDependencies": { - "@pkgjs/parseargs": "^0.11.0" - } -} diff --git a/node_modules/node-gyp/node_modules/lru-cache/LICENSE b/node_modules/node-gyp/node_modules/lru-cache/LICENSE deleted file mode 100644 index f785757cd63f8..0000000000000 --- a/node_modules/node-gyp/node_modules/lru-cache/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/node-gyp/node_modules/lru-cache/dist/commonjs/index.js b/node_modules/node-gyp/node_modules/lru-cache/dist/commonjs/index.js deleted file mode 100644 index 0589231885c68..0000000000000 --- a/node_modules/node-gyp/node_modules/lru-cache/dist/commonjs/index.js +++ /dev/null @@ -1,1546 +0,0 @@ -"use strict"; -/** - * @module LRUCache - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.LRUCache = void 0; -const perf = typeof performance === 'object' && - performance && - typeof performance.now === 'function' - ? performance - : Date; -const warned = new Set(); -/* c8 ignore start */ -const PROCESS = (typeof process === 'object' && !!process ? process : {}); -/* c8 ignore start */ -const emitWarning = (msg, type, code, fn) => { - typeof PROCESS.emitWarning === 'function' - ? PROCESS.emitWarning(msg, type, code, fn) - : console.error(`[${code}] ${type}: ${msg}`); -}; -let AC = globalThis.AbortController; -let AS = globalThis.AbortSignal; -/* c8 ignore start */ -if (typeof AC === 'undefined') { - //@ts-ignore - AS = class AbortSignal { - onabort; - _onabort = []; - reason; - aborted = false; - addEventListener(_, fn) { - this._onabort.push(fn); - } - }; - //@ts-ignore - AC = class AbortController { - constructor() { - warnACPolyfill(); - } - signal = new AS(); - abort(reason) { - if (this.signal.aborted) - return; - //@ts-ignore - this.signal.reason = reason; - //@ts-ignore - this.signal.aborted = true; - //@ts-ignore - for (const fn of this.signal._onabort) { - fn(reason); - } - this.signal.onabort?.(reason); - } - }; - let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1'; - const warnACPolyfill = () => { - if (!printACPolyfillWarning) - return; - printACPolyfillWarning = false; - emitWarning('AbortController is not defined. If using lru-cache in ' + - 'node 14, load an AbortController polyfill from the ' + - '`node-abort-controller` package. A minimal polyfill is ' + - 'provided for use by LRUCache.fetch(), but it should not be ' + - 'relied upon in other contexts (eg, passing it to other APIs that ' + - 'use AbortController/AbortSignal might have undesirable effects). ' + - 'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill); - }; -} -/* c8 ignore stop */ -const shouldWarn = (code) => !warned.has(code); -const TYPE = Symbol('type'); -const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n); -/* c8 ignore start */ -// This is a little bit ridiculous, tbh. -// The maximum array length is 2^32-1 or thereabouts on most JS impls. -// And well before that point, you're caching the entire world, I mean, -// that's ~32GB of just integers for the next/prev links, plus whatever -// else to hold that many keys and values. Just filling the memory with -// zeroes at init time is brutal when you get that big. -// But why not be complete? -// Maybe in the future, these limits will have expanded. -const getUintArray = (max) => !isPosInt(max) - ? null - : max <= Math.pow(2, 8) - ? Uint8Array - : max <= Math.pow(2, 16) - ? Uint16Array - : max <= Math.pow(2, 32) - ? Uint32Array - : max <= Number.MAX_SAFE_INTEGER - ? ZeroArray - : null; -/* c8 ignore stop */ -class ZeroArray extends Array { - constructor(size) { - super(size); - this.fill(0); - } -} -class Stack { - heap; - length; - // private constructor - static #constructing = false; - static create(max) { - const HeapCls = getUintArray(max); - if (!HeapCls) - return []; - Stack.#constructing = true; - const s = new Stack(max, HeapCls); - Stack.#constructing = false; - return s; - } - constructor(max, HeapCls) { - /* c8 ignore start */ - if (!Stack.#constructing) { - throw new TypeError('instantiate Stack using Stack.create(n)'); - } - /* c8 ignore stop */ - this.heap = new HeapCls(max); - this.length = 0; - } - push(n) { - this.heap[this.length++] = n; - } - pop() { - return this.heap[--this.length]; - } -} -/** - * Default export, the thing you're using this module to get. - * - * The `K` and `V` types define the key and value types, respectively. The - * optional `FC` type defines the type of the `context` object passed to - * `cache.fetch()` and `cache.memo()`. - * - * Keys and values **must not** be `null` or `undefined`. - * - * All properties from the options object (with the exception of `max`, - * `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are - * added as normal public members. (The listed options are read-only getters.) - * - * Changing any of these will alter the defaults for subsequent method calls. - */ -class LRUCache { - // options that cannot be changed without disaster - #max; - #maxSize; - #dispose; - #disposeAfter; - #fetchMethod; - #memoMethod; - /** - * {@link LRUCache.OptionsBase.ttl} - */ - ttl; - /** - * {@link LRUCache.OptionsBase.ttlResolution} - */ - ttlResolution; - /** - * {@link LRUCache.OptionsBase.ttlAutopurge} - */ - ttlAutopurge; - /** - * {@link LRUCache.OptionsBase.updateAgeOnGet} - */ - updateAgeOnGet; - /** - * {@link LRUCache.OptionsBase.updateAgeOnHas} - */ - updateAgeOnHas; - /** - * {@link LRUCache.OptionsBase.allowStale} - */ - allowStale; - /** - * {@link LRUCache.OptionsBase.noDisposeOnSet} - */ - noDisposeOnSet; - /** - * {@link LRUCache.OptionsBase.noUpdateTTL} - */ - noUpdateTTL; - /** - * {@link LRUCache.OptionsBase.maxEntrySize} - */ - maxEntrySize; - /** - * {@link LRUCache.OptionsBase.sizeCalculation} - */ - sizeCalculation; - /** - * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection} - */ - noDeleteOnFetchRejection; - /** - * {@link LRUCache.OptionsBase.noDeleteOnStaleGet} - */ - noDeleteOnStaleGet; - /** - * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort} - */ - allowStaleOnFetchAbort; - /** - * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} - */ - allowStaleOnFetchRejection; - /** - * {@link LRUCache.OptionsBase.ignoreFetchAbort} - */ - ignoreFetchAbort; - // computed properties - #size; - #calculatedSize; - #keyMap; - #keyList; - #valList; - #next; - #prev; - #head; - #tail; - #free; - #disposed; - #sizes; - #starts; - #ttls; - #hasDispose; - #hasFetchMethod; - #hasDisposeAfter; - /** - * Do not call this method unless you need to inspect the - * inner workings of the cache. If anything returned by this - * object is modified in any way, strange breakage may occur. - * - * These fields are private for a reason! - * - * @internal - */ - static unsafeExposeInternals(c) { - return { - // properties - starts: c.#starts, - ttls: c.#ttls, - sizes: c.#sizes, - keyMap: c.#keyMap, - keyList: c.#keyList, - valList: c.#valList, - next: c.#next, - prev: c.#prev, - get head() { - return c.#head; - }, - get tail() { - return c.#tail; - }, - free: c.#free, - // methods - isBackgroundFetch: (p) => c.#isBackgroundFetch(p), - backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context), - moveToTail: (index) => c.#moveToTail(index), - indexes: (options) => c.#indexes(options), - rindexes: (options) => c.#rindexes(options), - isStale: (index) => c.#isStale(index), - }; - } - // Protected read-only members - /** - * {@link LRUCache.OptionsBase.max} (read-only) - */ - get max() { - return this.#max; - } - /** - * {@link LRUCache.OptionsBase.maxSize} (read-only) - */ - get maxSize() { - return this.#maxSize; - } - /** - * The total computed size of items in the cache (read-only) - */ - get calculatedSize() { - return this.#calculatedSize; - } - /** - * The number of items stored in the cache (read-only) - */ - get size() { - return this.#size; - } - /** - * {@link LRUCache.OptionsBase.fetchMethod} (read-only) - */ - get fetchMethod() { - return this.#fetchMethod; - } - get memoMethod() { - return this.#memoMethod; - } - /** - * {@link LRUCache.OptionsBase.dispose} (read-only) - */ - get dispose() { - return this.#dispose; - } - /** - * {@link LRUCache.OptionsBase.disposeAfter} (read-only) - */ - get disposeAfter() { - return this.#disposeAfter; - } - constructor(options) { - const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options; - if (max !== 0 && !isPosInt(max)) { - throw new TypeError('max option must be a nonnegative integer'); - } - const UintArray = max ? getUintArray(max) : Array; - if (!UintArray) { - throw new Error('invalid max value: ' + max); - } - this.#max = max; - this.#maxSize = maxSize; - this.maxEntrySize = maxEntrySize || this.#maxSize; - this.sizeCalculation = sizeCalculation; - if (this.sizeCalculation) { - if (!this.#maxSize && !this.maxEntrySize) { - throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize'); - } - if (typeof this.sizeCalculation !== 'function') { - throw new TypeError('sizeCalculation set to non-function'); - } - } - if (memoMethod !== undefined && - typeof memoMethod !== 'function') { - throw new TypeError('memoMethod must be a function if defined'); - } - this.#memoMethod = memoMethod; - if (fetchMethod !== undefined && - typeof fetchMethod !== 'function') { - throw new TypeError('fetchMethod must be a function if specified'); - } - this.#fetchMethod = fetchMethod; - this.#hasFetchMethod = !!fetchMethod; - this.#keyMap = new Map(); - this.#keyList = new Array(max).fill(undefined); - this.#valList = new Array(max).fill(undefined); - this.#next = new UintArray(max); - this.#prev = new UintArray(max); - this.#head = 0; - this.#tail = 0; - this.#free = Stack.create(max); - this.#size = 0; - this.#calculatedSize = 0; - if (typeof dispose === 'function') { - this.#dispose = dispose; - } - if (typeof disposeAfter === 'function') { - this.#disposeAfter = disposeAfter; - this.#disposed = []; - } - else { - this.#disposeAfter = undefined; - this.#disposed = undefined; - } - this.#hasDispose = !!this.#dispose; - this.#hasDisposeAfter = !!this.#disposeAfter; - this.noDisposeOnSet = !!noDisposeOnSet; - this.noUpdateTTL = !!noUpdateTTL; - this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection; - this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection; - this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort; - this.ignoreFetchAbort = !!ignoreFetchAbort; - // NB: maxEntrySize is set to maxSize if it's set - if (this.maxEntrySize !== 0) { - if (this.#maxSize !== 0) { - if (!isPosInt(this.#maxSize)) { - throw new TypeError('maxSize must be a positive integer if specified'); - } - } - if (!isPosInt(this.maxEntrySize)) { - throw new TypeError('maxEntrySize must be a positive integer if specified'); - } - this.#initializeSizeTracking(); - } - this.allowStale = !!allowStale; - this.noDeleteOnStaleGet = !!noDeleteOnStaleGet; - this.updateAgeOnGet = !!updateAgeOnGet; - this.updateAgeOnHas = !!updateAgeOnHas; - this.ttlResolution = - isPosInt(ttlResolution) || ttlResolution === 0 - ? ttlResolution - : 1; - this.ttlAutopurge = !!ttlAutopurge; - this.ttl = ttl || 0; - if (this.ttl) { - if (!isPosInt(this.ttl)) { - throw new TypeError('ttl must be a positive integer if specified'); - } - this.#initializeTTLTracking(); - } - // do not allow completely unbounded caches - if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) { - throw new TypeError('At least one of max, maxSize, or ttl is required'); - } - if (!this.ttlAutopurge && !this.#max && !this.#maxSize) { - const code = 'LRU_CACHE_UNBOUNDED'; - if (shouldWarn(code)) { - warned.add(code); - const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' + - 'result in unbounded memory consumption.'; - emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache); - } - } - } - /** - * Return the number of ms left in the item's TTL. If item is not in cache, - * returns `0`. Returns `Infinity` if item is in cache without a defined TTL. - */ - getRemainingTTL(key) { - return this.#keyMap.has(key) ? Infinity : 0; - } - #initializeTTLTracking() { - const ttls = new ZeroArray(this.#max); - const starts = new ZeroArray(this.#max); - this.#ttls = ttls; - this.#starts = starts; - this.#setItemTTL = (index, ttl, start = perf.now()) => { - starts[index] = ttl !== 0 ? start : 0; - ttls[index] = ttl; - if (ttl !== 0 && this.ttlAutopurge) { - const t = setTimeout(() => { - if (this.#isStale(index)) { - this.#delete(this.#keyList[index], 'expire'); - } - }, ttl + 1); - // unref() not supported on all platforms - /* c8 ignore start */ - if (t.unref) { - t.unref(); - } - /* c8 ignore stop */ - } - }; - this.#updateItemAge = index => { - starts[index] = ttls[index] !== 0 ? perf.now() : 0; - }; - this.#statusTTL = (status, index) => { - if (ttls[index]) { - const ttl = ttls[index]; - const start = starts[index]; - /* c8 ignore next */ - if (!ttl || !start) - return; - status.ttl = ttl; - status.start = start; - status.now = cachedNow || getNow(); - const age = status.now - start; - status.remainingTTL = ttl - age; - } - }; - // debounce calls to perf.now() to 1s so we're not hitting - // that costly call repeatedly. - let cachedNow = 0; - const getNow = () => { - const n = perf.now(); - if (this.ttlResolution > 0) { - cachedNow = n; - const t = setTimeout(() => (cachedNow = 0), this.ttlResolution); - // not available on all platforms - /* c8 ignore start */ - if (t.unref) { - t.unref(); - } - /* c8 ignore stop */ - } - return n; - }; - this.getRemainingTTL = key => { - const index = this.#keyMap.get(key); - if (index === undefined) { - return 0; - } - const ttl = ttls[index]; - const start = starts[index]; - if (!ttl || !start) { - return Infinity; - } - const age = (cachedNow || getNow()) - start; - return ttl - age; - }; - this.#isStale = index => { - const s = starts[index]; - const t = ttls[index]; - return !!t && !!s && (cachedNow || getNow()) - s > t; - }; - } - // conditionally set private methods related to TTL - #updateItemAge = () => { }; - #statusTTL = () => { }; - #setItemTTL = () => { }; - /* c8 ignore stop */ - #isStale = () => false; - #initializeSizeTracking() { - const sizes = new ZeroArray(this.#max); - this.#calculatedSize = 0; - this.#sizes = sizes; - this.#removeItemSize = index => { - this.#calculatedSize -= sizes[index]; - sizes[index] = 0; - }; - this.#requireSize = (k, v, size, sizeCalculation) => { - // provisionally accept background fetches. - // actual value size will be checked when they return. - if (this.#isBackgroundFetch(v)) { - return 0; - } - if (!isPosInt(size)) { - if (sizeCalculation) { - if (typeof sizeCalculation !== 'function') { - throw new TypeError('sizeCalculation must be a function'); - } - size = sizeCalculation(v, k); - if (!isPosInt(size)) { - throw new TypeError('sizeCalculation return invalid (expect positive integer)'); - } - } - else { - throw new TypeError('invalid size value (must be positive integer). ' + - 'When maxSize or maxEntrySize is used, sizeCalculation ' + - 'or size must be set.'); - } - } - return size; - }; - this.#addItemSize = (index, size, status) => { - sizes[index] = size; - if (this.#maxSize) { - const maxSize = this.#maxSize - sizes[index]; - while (this.#calculatedSize > maxSize) { - this.#evict(true); - } - } - this.#calculatedSize += sizes[index]; - if (status) { - status.entrySize = size; - status.totalCalculatedSize = this.#calculatedSize; - } - }; - } - #removeItemSize = _i => { }; - #addItemSize = (_i, _s, _st) => { }; - #requireSize = (_k, _v, size, sizeCalculation) => { - if (size || sizeCalculation) { - throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache'); - } - return 0; - }; - *#indexes({ allowStale = this.allowStale } = {}) { - if (this.#size) { - for (let i = this.#tail; true;) { - if (!this.#isValidIndex(i)) { - break; - } - if (allowStale || !this.#isStale(i)) { - yield i; - } - if (i === this.#head) { - break; - } - else { - i = this.#prev[i]; - } - } - } - } - *#rindexes({ allowStale = this.allowStale } = {}) { - if (this.#size) { - for (let i = this.#head; true;) { - if (!this.#isValidIndex(i)) { - break; - } - if (allowStale || !this.#isStale(i)) { - yield i; - } - if (i === this.#tail) { - break; - } - else { - i = this.#next[i]; - } - } - } - } - #isValidIndex(index) { - return (index !== undefined && - this.#keyMap.get(this.#keyList[index]) === index); - } - /** - * Return a generator yielding `[key, value]` pairs, - * in order from most recently used to least recently used. - */ - *entries() { - for (const i of this.#indexes()) { - if (this.#valList[i] !== undefined && - this.#keyList[i] !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield [this.#keyList[i], this.#valList[i]]; - } - } - } - /** - * Inverse order version of {@link LRUCache.entries} - * - * Return a generator yielding `[key, value]` pairs, - * in order from least recently used to most recently used. - */ - *rentries() { - for (const i of this.#rindexes()) { - if (this.#valList[i] !== undefined && - this.#keyList[i] !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield [this.#keyList[i], this.#valList[i]]; - } - } - } - /** - * Return a generator yielding the keys in the cache, - * in order from most recently used to least recently used. - */ - *keys() { - for (const i of this.#indexes()) { - const k = this.#keyList[i]; - if (k !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield k; - } - } - } - /** - * Inverse order version of {@link LRUCache.keys} - * - * Return a generator yielding the keys in the cache, - * in order from least recently used to most recently used. - */ - *rkeys() { - for (const i of this.#rindexes()) { - const k = this.#keyList[i]; - if (k !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield k; - } - } - } - /** - * Return a generator yielding the values in the cache, - * in order from most recently used to least recently used. - */ - *values() { - for (const i of this.#indexes()) { - const v = this.#valList[i]; - if (v !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield this.#valList[i]; - } - } - } - /** - * Inverse order version of {@link LRUCache.values} - * - * Return a generator yielding the values in the cache, - * in order from least recently used to most recently used. - */ - *rvalues() { - for (const i of this.#rindexes()) { - const v = this.#valList[i]; - if (v !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield this.#valList[i]; - } - } - } - /** - * Iterating over the cache itself yields the same results as - * {@link LRUCache.entries} - */ - [Symbol.iterator]() { - return this.entries(); - } - /** - * A String value that is used in the creation of the default string - * description of an object. Called by the built-in method - * `Object.prototype.toString`. - */ - [Symbol.toStringTag] = 'LRUCache'; - /** - * Find a value for which the supplied fn method returns a truthy value, - * similar to `Array.find()`. fn is called as `fn(value, key, cache)`. - */ - find(fn, getOptions = {}) { - for (const i of this.#indexes()) { - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined) - continue; - if (fn(value, this.#keyList[i], this)) { - return this.get(this.#keyList[i], getOptions); - } - } - } - /** - * Call the supplied function on each item in the cache, in order from most - * recently used to least recently used. - * - * `fn` is called as `fn(value, key, cache)`. - * - * If `thisp` is provided, function will be called in the `this`-context of - * the provided object, or the cache if no `thisp` object is provided. - * - * Does not update age or recenty of use, or iterate over stale values. - */ - forEach(fn, thisp = this) { - for (const i of this.#indexes()) { - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined) - continue; - fn.call(thisp, value, this.#keyList[i], this); - } - } - /** - * The same as {@link LRUCache.forEach} but items are iterated over in - * reverse order. (ie, less recently used items are iterated over first.) - */ - rforEach(fn, thisp = this) { - for (const i of this.#rindexes()) { - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined) - continue; - fn.call(thisp, value, this.#keyList[i], this); - } - } - /** - * Delete any stale entries. Returns true if anything was removed, - * false otherwise. - */ - purgeStale() { - let deleted = false; - for (const i of this.#rindexes({ allowStale: true })) { - if (this.#isStale(i)) { - this.#delete(this.#keyList[i], 'expire'); - deleted = true; - } - } - return deleted; - } - /** - * Get the extended info about a given entry, to get its value, size, and - * TTL info simultaneously. Returns `undefined` if the key is not present. - * - * Unlike {@link LRUCache#dump}, which is designed to be portable and survive - * serialization, the `start` value is always the current timestamp, and the - * `ttl` is a calculated remaining time to live (negative if expired). - * - * Always returns stale values, if their info is found in the cache, so be - * sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl}) - * if relevant. - */ - info(key) { - const i = this.#keyMap.get(key); - if (i === undefined) - return undefined; - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined) - return undefined; - const entry = { value }; - if (this.#ttls && this.#starts) { - const ttl = this.#ttls[i]; - const start = this.#starts[i]; - if (ttl && start) { - const remain = ttl - (perf.now() - start); - entry.ttl = remain; - entry.start = Date.now(); - } - } - if (this.#sizes) { - entry.size = this.#sizes[i]; - } - return entry; - } - /** - * Return an array of [key, {@link LRUCache.Entry}] tuples which can be - * passed to {@link LRLUCache#load}. - * - * The `start` fields are calculated relative to a portable `Date.now()` - * timestamp, even if `performance.now()` is available. - * - * Stale entries are always included in the `dump`, even if - * {@link LRUCache.OptionsBase.allowStale} is false. - * - * Note: this returns an actual array, not a generator, so it can be more - * easily passed around. - */ - dump() { - const arr = []; - for (const i of this.#indexes({ allowStale: true })) { - const key = this.#keyList[i]; - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined || key === undefined) - continue; - const entry = { value }; - if (this.#ttls && this.#starts) { - entry.ttl = this.#ttls[i]; - // always dump the start relative to a portable timestamp - // it's ok for this to be a bit slow, it's a rare operation. - const age = perf.now() - this.#starts[i]; - entry.start = Math.floor(Date.now() - age); - } - if (this.#sizes) { - entry.size = this.#sizes[i]; - } - arr.unshift([key, entry]); - } - return arr; - } - /** - * Reset the cache and load in the items in entries in the order listed. - * - * The shape of the resulting cache may be different if the same options are - * not used in both caches. - * - * The `start` fields are assumed to be calculated relative to a portable - * `Date.now()` timestamp, even if `performance.now()` is available. - */ - load(arr) { - this.clear(); - for (const [key, entry] of arr) { - if (entry.start) { - // entry.start is a portable timestamp, but we may be using - // node's performance.now(), so calculate the offset, so that - // we get the intended remaining TTL, no matter how long it's - // been on ice. - // - // it's ok for this to be a bit slow, it's a rare operation. - const age = Date.now() - entry.start; - entry.start = perf.now() - age; - } - this.set(key, entry.value, entry); - } - } - /** - * Add a value to the cache. - * - * Note: if `undefined` is specified as a value, this is an alias for - * {@link LRUCache#delete} - * - * Fields on the {@link LRUCache.SetOptions} options param will override - * their corresponding values in the constructor options for the scope - * of this single `set()` operation. - * - * If `start` is provided, then that will set the effective start - * time for the TTL calculation. Note that this must be a previous - * value of `performance.now()` if supported, or a previous value of - * `Date.now()` if not. - * - * Options object may also include `size`, which will prevent - * calling the `sizeCalculation` function and just use the specified - * number if it is a positive integer, and `noDisposeOnSet` which - * will prevent calling a `dispose` function in the case of - * overwrites. - * - * If the `size` (or return value of `sizeCalculation`) for a given - * entry is greater than `maxEntrySize`, then the item will not be - * added to the cache. - * - * Will update the recency of the entry. - * - * If the value is `undefined`, then this is an alias for - * `cache.delete(key)`. `undefined` is never stored in the cache. - */ - set(k, v, setOptions = {}) { - if (v === undefined) { - this.delete(k); - return this; - } - const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions; - let { noUpdateTTL = this.noUpdateTTL } = setOptions; - const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation); - // if the item doesn't fit, don't do anything - // NB: maxEntrySize set to maxSize by default - if (this.maxEntrySize && size > this.maxEntrySize) { - if (status) { - status.set = 'miss'; - status.maxEntrySizeExceeded = true; - } - // have to delete, in case something is there already. - this.#delete(k, 'set'); - return this; - } - let index = this.#size === 0 ? undefined : this.#keyMap.get(k); - if (index === undefined) { - // addition - index = (this.#size === 0 - ? this.#tail - : this.#free.length !== 0 - ? this.#free.pop() - : this.#size === this.#max - ? this.#evict(false) - : this.#size); - this.#keyList[index] = k; - this.#valList[index] = v; - this.#keyMap.set(k, index); - this.#next[this.#tail] = index; - this.#prev[index] = this.#tail; - this.#tail = index; - this.#size++; - this.#addItemSize(index, size, status); - if (status) - status.set = 'add'; - noUpdateTTL = false; - } - else { - // update - this.#moveToTail(index); - const oldVal = this.#valList[index]; - if (v !== oldVal) { - if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) { - oldVal.__abortController.abort(new Error('replaced')); - const { __staleWhileFetching: s } = oldVal; - if (s !== undefined && !noDisposeOnSet) { - if (this.#hasDispose) { - this.#dispose?.(s, k, 'set'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([s, k, 'set']); - } - } - } - else if (!noDisposeOnSet) { - if (this.#hasDispose) { - this.#dispose?.(oldVal, k, 'set'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([oldVal, k, 'set']); - } - } - this.#removeItemSize(index); - this.#addItemSize(index, size, status); - this.#valList[index] = v; - if (status) { - status.set = 'replace'; - const oldValue = oldVal && this.#isBackgroundFetch(oldVal) - ? oldVal.__staleWhileFetching - : oldVal; - if (oldValue !== undefined) - status.oldValue = oldValue; - } - } - else if (status) { - status.set = 'update'; - } - } - if (ttl !== 0 && !this.#ttls) { - this.#initializeTTLTracking(); - } - if (this.#ttls) { - if (!noUpdateTTL) { - this.#setItemTTL(index, ttl, start); - } - if (status) - this.#statusTTL(status, index); - } - if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) { - const dt = this.#disposed; - let task; - while ((task = dt?.shift())) { - this.#disposeAfter?.(...task); - } - } - return this; - } - /** - * Evict the least recently used item, returning its value or - * `undefined` if cache is empty. - */ - pop() { - try { - while (this.#size) { - const val = this.#valList[this.#head]; - this.#evict(true); - if (this.#isBackgroundFetch(val)) { - if (val.__staleWhileFetching) { - return val.__staleWhileFetching; - } - } - else if (val !== undefined) { - return val; - } - } - } - finally { - if (this.#hasDisposeAfter && this.#disposed) { - const dt = this.#disposed; - let task; - while ((task = dt?.shift())) { - this.#disposeAfter?.(...task); - } - } - } - } - #evict(free) { - const head = this.#head; - const k = this.#keyList[head]; - const v = this.#valList[head]; - if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) { - v.__abortController.abort(new Error('evicted')); - } - else if (this.#hasDispose || this.#hasDisposeAfter) { - if (this.#hasDispose) { - this.#dispose?.(v, k, 'evict'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([v, k, 'evict']); - } - } - this.#removeItemSize(head); - // if we aren't about to use the index, then null these out - if (free) { - this.#keyList[head] = undefined; - this.#valList[head] = undefined; - this.#free.push(head); - } - if (this.#size === 1) { - this.#head = this.#tail = 0; - this.#free.length = 0; - } - else { - this.#head = this.#next[head]; - } - this.#keyMap.delete(k); - this.#size--; - return head; - } - /** - * Check if a key is in the cache, without updating the recency of use. - * Will return false if the item is stale, even though it is technically - * in the cache. - * - * Check if a key is in the cache, without updating the recency of - * use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set - * to `true` in either the options or the constructor. - * - * Will return `false` if the item is stale, even though it is technically in - * the cache. The difference can be determined (if it matters) by using a - * `status` argument, and inspecting the `has` field. - * - * Will not update item age unless - * {@link LRUCache.OptionsBase.updateAgeOnHas} is set. - */ - has(k, hasOptions = {}) { - const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions; - const index = this.#keyMap.get(k); - if (index !== undefined) { - const v = this.#valList[index]; - if (this.#isBackgroundFetch(v) && - v.__staleWhileFetching === undefined) { - return false; - } - if (!this.#isStale(index)) { - if (updateAgeOnHas) { - this.#updateItemAge(index); - } - if (status) { - status.has = 'hit'; - this.#statusTTL(status, index); - } - return true; - } - else if (status) { - status.has = 'stale'; - this.#statusTTL(status, index); - } - } - else if (status) { - status.has = 'miss'; - } - return false; - } - /** - * Like {@link LRUCache#get} but doesn't update recency or delete stale - * items. - * - * Returns `undefined` if the item is stale, unless - * {@link LRUCache.OptionsBase.allowStale} is set. - */ - peek(k, peekOptions = {}) { - const { allowStale = this.allowStale } = peekOptions; - const index = this.#keyMap.get(k); - if (index === undefined || - (!allowStale && this.#isStale(index))) { - return; - } - const v = this.#valList[index]; - // either stale and allowed, or forcing a refresh of non-stale value - return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; - } - #backgroundFetch(k, index, options, context) { - const v = index === undefined ? undefined : this.#valList[index]; - if (this.#isBackgroundFetch(v)) { - return v; - } - const ac = new AC(); - const { signal } = options; - // when/if our AC signals, then stop listening to theirs. - signal?.addEventListener('abort', () => ac.abort(signal.reason), { - signal: ac.signal, - }); - const fetchOpts = { - signal: ac.signal, - options, - context, - }; - const cb = (v, updateCache = false) => { - const { aborted } = ac.signal; - const ignoreAbort = options.ignoreFetchAbort && v !== undefined; - if (options.status) { - if (aborted && !updateCache) { - options.status.fetchAborted = true; - options.status.fetchError = ac.signal.reason; - if (ignoreAbort) - options.status.fetchAbortIgnored = true; - } - else { - options.status.fetchResolved = true; - } - } - if (aborted && !ignoreAbort && !updateCache) { - return fetchFail(ac.signal.reason); - } - // either we didn't abort, and are still here, or we did, and ignored - const bf = p; - if (this.#valList[index] === p) { - if (v === undefined) { - if (bf.__staleWhileFetching) { - this.#valList[index] = bf.__staleWhileFetching; - } - else { - this.#delete(k, 'fetch'); - } - } - else { - if (options.status) - options.status.fetchUpdated = true; - this.set(k, v, fetchOpts.options); - } - } - return v; - }; - const eb = (er) => { - if (options.status) { - options.status.fetchRejected = true; - options.status.fetchError = er; - } - return fetchFail(er); - }; - const fetchFail = (er) => { - const { aborted } = ac.signal; - const allowStaleAborted = aborted && options.allowStaleOnFetchAbort; - const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection; - const noDelete = allowStale || options.noDeleteOnFetchRejection; - const bf = p; - if (this.#valList[index] === p) { - // if we allow stale on fetch rejections, then we need to ensure that - // the stale value is not removed from the cache when the fetch fails. - const del = !noDelete || bf.__staleWhileFetching === undefined; - if (del) { - this.#delete(k, 'fetch'); - } - else if (!allowStaleAborted) { - // still replace the *promise* with the stale value, - // since we are done with the promise at this point. - // leave it untouched if we're still waiting for an - // aborted background fetch that hasn't yet returned. - this.#valList[index] = bf.__staleWhileFetching; - } - } - if (allowStale) { - if (options.status && bf.__staleWhileFetching !== undefined) { - options.status.returnedStale = true; - } - return bf.__staleWhileFetching; - } - else if (bf.__returned === bf) { - throw er; - } - }; - const pcall = (res, rej) => { - const fmp = this.#fetchMethod?.(k, v, fetchOpts); - if (fmp && fmp instanceof Promise) { - fmp.then(v => res(v === undefined ? undefined : v), rej); - } - // ignored, we go until we finish, regardless. - // defer check until we are actually aborting, - // so fetchMethod can override. - ac.signal.addEventListener('abort', () => { - if (!options.ignoreFetchAbort || - options.allowStaleOnFetchAbort) { - res(undefined); - // when it eventually resolves, update the cache. - if (options.allowStaleOnFetchAbort) { - res = v => cb(v, true); - } - } - }); - }; - if (options.status) - options.status.fetchDispatched = true; - const p = new Promise(pcall).then(cb, eb); - const bf = Object.assign(p, { - __abortController: ac, - __staleWhileFetching: v, - __returned: undefined, - }); - if (index === undefined) { - // internal, don't expose status. - this.set(k, bf, { ...fetchOpts.options, status: undefined }); - index = this.#keyMap.get(k); - } - else { - this.#valList[index] = bf; - } - return bf; - } - #isBackgroundFetch(p) { - if (!this.#hasFetchMethod) - return false; - const b = p; - return (!!b && - b instanceof Promise && - b.hasOwnProperty('__staleWhileFetching') && - b.__abortController instanceof AC); - } - async fetch(k, fetchOptions = {}) { - const { - // get options - allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, - // set options - ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, - // fetch exclusive options - noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions; - if (!this.#hasFetchMethod) { - if (status) - status.fetch = 'get'; - return this.get(k, { - allowStale, - updateAgeOnGet, - noDeleteOnStaleGet, - status, - }); - } - const options = { - allowStale, - updateAgeOnGet, - noDeleteOnStaleGet, - ttl, - noDisposeOnSet, - size, - sizeCalculation, - noUpdateTTL, - noDeleteOnFetchRejection, - allowStaleOnFetchRejection, - allowStaleOnFetchAbort, - ignoreFetchAbort, - status, - signal, - }; - let index = this.#keyMap.get(k); - if (index === undefined) { - if (status) - status.fetch = 'miss'; - const p = this.#backgroundFetch(k, index, options, context); - return (p.__returned = p); - } - else { - // in cache, maybe already fetching - const v = this.#valList[index]; - if (this.#isBackgroundFetch(v)) { - const stale = allowStale && v.__staleWhileFetching !== undefined; - if (status) { - status.fetch = 'inflight'; - if (stale) - status.returnedStale = true; - } - return stale ? v.__staleWhileFetching : (v.__returned = v); - } - // if we force a refresh, that means do NOT serve the cached value, - // unless we are already in the process of refreshing the cache. - const isStale = this.#isStale(index); - if (!forceRefresh && !isStale) { - if (status) - status.fetch = 'hit'; - this.#moveToTail(index); - if (updateAgeOnGet) { - this.#updateItemAge(index); - } - if (status) - this.#statusTTL(status, index); - return v; - } - // ok, it is stale or a forced refresh, and not already fetching. - // refresh the cache. - const p = this.#backgroundFetch(k, index, options, context); - const hasStale = p.__staleWhileFetching !== undefined; - const staleVal = hasStale && allowStale; - if (status) { - status.fetch = isStale ? 'stale' : 'refresh'; - if (staleVal && isStale) - status.returnedStale = true; - } - return staleVal ? p.__staleWhileFetching : (p.__returned = p); - } - } - async forceFetch(k, fetchOptions = {}) { - const v = await this.fetch(k, fetchOptions); - if (v === undefined) - throw new Error('fetch() returned undefined'); - return v; - } - memo(k, memoOptions = {}) { - const memoMethod = this.#memoMethod; - if (!memoMethod) { - throw new Error('no memoMethod provided to constructor'); - } - const { context, forceRefresh, ...options } = memoOptions; - const v = this.get(k, options); - if (!forceRefresh && v !== undefined) - return v; - const vv = memoMethod(k, v, { - options, - context, - }); - this.set(k, vv, options); - return vv; - } - /** - * Return a value from the cache. Will update the recency of the cache - * entry found. - * - * If the key is not found, get() will return `undefined`. - */ - get(k, getOptions = {}) { - const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions; - const index = this.#keyMap.get(k); - if (index !== undefined) { - const value = this.#valList[index]; - const fetching = this.#isBackgroundFetch(value); - if (status) - this.#statusTTL(status, index); - if (this.#isStale(index)) { - if (status) - status.get = 'stale'; - // delete only if not an in-flight background fetch - if (!fetching) { - if (!noDeleteOnStaleGet) { - this.#delete(k, 'expire'); - } - if (status && allowStale) - status.returnedStale = true; - return allowStale ? value : undefined; - } - else { - if (status && - allowStale && - value.__staleWhileFetching !== undefined) { - status.returnedStale = true; - } - return allowStale ? value.__staleWhileFetching : undefined; - } - } - else { - if (status) - status.get = 'hit'; - // if we're currently fetching it, we don't actually have it yet - // it's not stale, which means this isn't a staleWhileRefetching. - // If it's not stale, and fetching, AND has a __staleWhileFetching - // value, then that means the user fetched with {forceRefresh:true}, - // so it's safe to return that value. - if (fetching) { - return value.__staleWhileFetching; - } - this.#moveToTail(index); - if (updateAgeOnGet) { - this.#updateItemAge(index); - } - return value; - } - } - else if (status) { - status.get = 'miss'; - } - } - #connect(p, n) { - this.#prev[n] = p; - this.#next[p] = n; - } - #moveToTail(index) { - // if tail already, nothing to do - // if head, move head to next[index] - // else - // move next[prev[index]] to next[index] (head has no prev) - // move prev[next[index]] to prev[index] - // prev[index] = tail - // next[tail] = index - // tail = index - if (index !== this.#tail) { - if (index === this.#head) { - this.#head = this.#next[index]; - } - else { - this.#connect(this.#prev[index], this.#next[index]); - } - this.#connect(this.#tail, index); - this.#tail = index; - } - } - /** - * Deletes a key out of the cache. - * - * Returns true if the key was deleted, false otherwise. - */ - delete(k) { - return this.#delete(k, 'delete'); - } - #delete(k, reason) { - let deleted = false; - if (this.#size !== 0) { - const index = this.#keyMap.get(k); - if (index !== undefined) { - deleted = true; - if (this.#size === 1) { - this.#clear(reason); - } - else { - this.#removeItemSize(index); - const v = this.#valList[index]; - if (this.#isBackgroundFetch(v)) { - v.__abortController.abort(new Error('deleted')); - } - else if (this.#hasDispose || this.#hasDisposeAfter) { - if (this.#hasDispose) { - this.#dispose?.(v, k, reason); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([v, k, reason]); - } - } - this.#keyMap.delete(k); - this.#keyList[index] = undefined; - this.#valList[index] = undefined; - if (index === this.#tail) { - this.#tail = this.#prev[index]; - } - else if (index === this.#head) { - this.#head = this.#next[index]; - } - else { - const pi = this.#prev[index]; - this.#next[pi] = this.#next[index]; - const ni = this.#next[index]; - this.#prev[ni] = this.#prev[index]; - } - this.#size--; - this.#free.push(index); - } - } - } - if (this.#hasDisposeAfter && this.#disposed?.length) { - const dt = this.#disposed; - let task; - while ((task = dt?.shift())) { - this.#disposeAfter?.(...task); - } - } - return deleted; - } - /** - * Clear the cache entirely, throwing away all values. - */ - clear() { - return this.#clear('delete'); - } - #clear(reason) { - for (const index of this.#rindexes({ allowStale: true })) { - const v = this.#valList[index]; - if (this.#isBackgroundFetch(v)) { - v.__abortController.abort(new Error('deleted')); - } - else { - const k = this.#keyList[index]; - if (this.#hasDispose) { - this.#dispose?.(v, k, reason); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([v, k, reason]); - } - } - } - this.#keyMap.clear(); - this.#valList.fill(undefined); - this.#keyList.fill(undefined); - if (this.#ttls && this.#starts) { - this.#ttls.fill(0); - this.#starts.fill(0); - } - if (this.#sizes) { - this.#sizes.fill(0); - } - this.#head = 0; - this.#tail = 0; - this.#free.length = 0; - this.#calculatedSize = 0; - this.#size = 0; - if (this.#hasDisposeAfter && this.#disposed) { - const dt = this.#disposed; - let task; - while ((task = dt?.shift())) { - this.#disposeAfter?.(...task); - } - } - } -} -exports.LRUCache = LRUCache; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/lru-cache/dist/commonjs/index.min.js b/node_modules/node-gyp/node_modules/lru-cache/dist/commonjs/index.min.js deleted file mode 100644 index ad643b0badc90..0000000000000 --- a/node_modules/node-gyp/node_modules/lru-cache/dist/commonjs/index.min.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict";var G=(l,t,e)=>{if(!t.has(l))throw TypeError("Cannot "+e)};var j=(l,t,e)=>(G(l,t,"read from private field"),e?e.call(l):t.get(l)),I=(l,t,e)=>{if(t.has(l))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(l):t.set(l,e)},x=(l,t,e,i)=>(G(l,t,"write to private field"),i?i.call(l,e):t.set(l,e),e);Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var T=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,P=new Set,U=typeof process=="object"&&process?process:{},H=(l,t,e,i)=>{typeof U.emitWarning=="function"?U.emitWarning(l,t,e,i):console.error(`[${e}] ${t}: ${l}`)},D=globalThis.AbortController,N=globalThis.AbortSignal;if(typeof D>"u"){N=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},D=class{constructor(){t()}signal=new N;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let l=U.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{l&&(l=!1,H("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=l=>!P.has(l),Y=Symbol("type"),A=l=>l&&l===Math.floor(l)&&l>0&&isFinite(l),k=l=>A(l)?l<=Math.pow(2,8)?Uint8Array:l<=Math.pow(2,16)?Uint16Array:l<=Math.pow(2,32)?Uint32Array:l<=Number.MAX_SAFE_INTEGER?E:null:null,E=class extends Array{constructor(t){super(t),this.fill(0)}},v,O=class{heap;length;static create(t){let e=k(t);if(!e)return[];x(O,v,!0);let i=new O(t,e);return x(O,v,!1),i}constructor(t,e){if(!j(O,v))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},W=O;v=new WeakMap,I(W,v,!1);var C=class{#g;#f;#p;#w;#R;#W;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#S;#s;#i;#t;#l;#c;#o;#h;#_;#r;#b;#m;#u;#y;#E;#a;static unsafeExposeInternals(t){return{starts:t.#m,ttls:t.#u,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#_,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#x(e,i,s,n),moveToTail:e=>t.#C(e),indexes:e=>t.#A(e),rindexes:e=>t.#F(e),isStale:e=>t.#d(e)}}get max(){return this.#g}get maxSize(){return this.#f}get calculatedSize(){return this.#S}get size(){return this.#n}get fetchMethod(){return this.#R}get memoMethod(){return this.#W}get dispose(){return this.#p}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,memoMethod:a,noDeleteOnFetchRejection:w,noDeleteOnStaleGet:m,allowStaleOnFetchRejection:p,allowStaleOnFetchAbort:_,ignoreFetchAbort:z}=t;if(e!==0&&!A(e))throw new TypeError("max option must be a nonnegative integer");let y=e?k(e):Array;if(!y)throw new Error("invalid max value: "+e);if(this.#g=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(a!==void 0&&typeof a!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#W=a,S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#R=S,this.#E=!!S,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new y(e),this.#c=new y(e),this.#o=0,this.#h=0,this.#_=W.create(e),this.#n=0,this.#S=0,typeof g=="function"&&(this.#p=g),typeof b=="function"?(this.#w=b,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#y=!!this.#p,this.#a=!!this.#w,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!w,this.allowStaleOnFetchRejection=!!p,this.allowStaleOnFetchAbort=!!_,this.ignoreFetchAbort=!!z,this.maxEntrySize!==0){if(this.#f!==0&&!A(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!A(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#P()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!m,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=A(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!A(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#U()}if(this.#g===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#g&&!this.#f){let R="LRU_CACHE_UNBOUNDED";V(R)&&(P.add(R),H("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",R,C))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#U(){let t=new E(this.#g),e=new E(this.#g);this.#u=t,this.#m=e,this.#M=(n,h,o=T.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#d(n)&&this.#T(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#v=n=>{e[n]=t[n]!==0?T.now():0},this.#O=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=T.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#d=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#v=()=>{};#O=()=>{};#M=()=>{};#d=()=>!1;#P(){let t=new E(this.#g);this.#S=0,this.#b=t,this.#z=e=>{this.#S-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!A(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!A(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#D=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#S>n;)this.#L(!0)}this.#S+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#S)}}#z=t=>{};#D=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#A({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#j(e)||((t||!this.#d(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#j(e)||((t||!this.#d(e))&&(yield e),e===this.#h));)e=this.#l[e]}#j(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#F({allowStale:!0}))this.#d(e)&&(this.#T(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#u&&this.#m){let h=this.#u[e],o=this.#m[e];if(h&&o){let r=h-(T.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#A({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#u&&this.#m){h.ttl=this.#u[e];let o=T.now()-this.#m[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=T.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,o);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#T(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#_.length!==0?this.#_.pop():this.#n===this.#g?this.#L(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#n++,this.#D(f,b,r),r&&(r.set="add"),g=!1;else{this.#C(f);let u=this.#t[f];if(e!==u){if(this.#E&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#y&&this.#p?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#y&&this.#p?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#z(f),this.#D(f,b,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#u&&this.#U(),this.#u&&(g||this.#M(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#w?.(...c)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#L(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#L(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#E&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#y||this.#a)&&(this.#y&&this.#p?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#z(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#_.push(e)),this.#n===1?(this.#o=this.#h=0,this.#_.length=0):this.#o=this.#l[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#d(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#v(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#d(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#x(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new D,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:a}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(a&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),a&&!w&&!S)return f(h.signal.reason);let m=c;return this.#t[e]===c&&(d===void 0?m.__staleWhileFetching?this.#t[e]=m.__staleWhileFetching:this.#T(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},b=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,a=S&&i.allowStaleOnFetchAbort,w=a||i.allowStaleOnFetchRejection,m=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!m||p.__staleWhileFetching===void 0?this.#T(t,"fetch"):a||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let a=this.#R?.(t,n,r);a&&a instanceof Promise&&a.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,b),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=F,F}#e(t){if(!this.#E)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof D}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:a,signal:w}=e;if(!this.#E)return a&&(a.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:a});let m={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:a,signal:w},p=this.#s.get(t);if(p===void 0){a&&(a.fetch="miss");let _=this.#x(t,p,m,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let M=i&&_.__staleWhileFetching!==void 0;return a&&(a.fetch="inflight",M&&(a.returnedStale=!0)),M?_.__staleWhileFetching:_.__returned=_}let z=this.#d(p);if(!S&&!z)return a&&(a.fetch="hit"),this.#C(p),s&&this.#v(p),a&&this.#O(a,p),_;let y=this.#x(t,p,m,d),L=y.__staleWhileFetching!==void 0&&i;return a&&(a.fetch=z?"stale":"refresh",L&&z&&(a.returnedStale=!0)),L?y.__staleWhileFetching:y.__returned=y}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#W;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#O(h,o),this.#d(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#T(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#C(o),s&&this.#v(o),r))}else h&&(h.get="miss")}#I(t,e){this.#c[e]=t,this.#l[t]=e}#C(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#I(this.#c[t],this.#l[t]),this.#I(this.#h,t),this.#h=t)}delete(t){return this.#T(t,"delete")}#T(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#N(e);else{this.#z(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#y||this.#a)&&(this.#y&&this.#p?.(n,t,e),this.#a&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#c[s];else if(s===this.#o)this.#o=this.#l[s];else{let h=this.#c[s];this.#l[h]=this.#l[s];let o=this.#l[s];this.#c[o]=this.#c[s]}this.#n--,this.#_.push(s)}}if(this.#a&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#N("delete")}#N(t){for(let e of this.#F({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#y&&this.#p?.(i,s,t),this.#a&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#u&&this.#m&&(this.#u.fill(0),this.#m.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#_.length=0,this.#S=0,this.#n=0,this.#a&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};exports.LRUCache=C; -//# sourceMappingURL=index.min.js.map diff --git a/node_modules/node-gyp/node_modules/lru-cache/dist/commonjs/package.json b/node_modules/node-gyp/node_modules/lru-cache/dist/commonjs/package.json deleted file mode 100644 index 5bbefffbabee3..0000000000000 --- a/node_modules/node-gyp/node_modules/lru-cache/dist/commonjs/package.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "type": "commonjs" -} diff --git a/node_modules/node-gyp/node_modules/lru-cache/dist/esm/index.js b/node_modules/node-gyp/node_modules/lru-cache/dist/esm/index.js deleted file mode 100644 index 555654a57c4d7..0000000000000 --- a/node_modules/node-gyp/node_modules/lru-cache/dist/esm/index.js +++ /dev/null @@ -1,1542 +0,0 @@ -/** - * @module LRUCache - */ -const perf = typeof performance === 'object' && - performance && - typeof performance.now === 'function' - ? performance - : Date; -const warned = new Set(); -/* c8 ignore start */ -const PROCESS = (typeof process === 'object' && !!process ? process : {}); -/* c8 ignore start */ -const emitWarning = (msg, type, code, fn) => { - typeof PROCESS.emitWarning === 'function' - ? PROCESS.emitWarning(msg, type, code, fn) - : console.error(`[${code}] ${type}: ${msg}`); -}; -let AC = globalThis.AbortController; -let AS = globalThis.AbortSignal; -/* c8 ignore start */ -if (typeof AC === 'undefined') { - //@ts-ignore - AS = class AbortSignal { - onabort; - _onabort = []; - reason; - aborted = false; - addEventListener(_, fn) { - this._onabort.push(fn); - } - }; - //@ts-ignore - AC = class AbortController { - constructor() { - warnACPolyfill(); - } - signal = new AS(); - abort(reason) { - if (this.signal.aborted) - return; - //@ts-ignore - this.signal.reason = reason; - //@ts-ignore - this.signal.aborted = true; - //@ts-ignore - for (const fn of this.signal._onabort) { - fn(reason); - } - this.signal.onabort?.(reason); - } - }; - let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1'; - const warnACPolyfill = () => { - if (!printACPolyfillWarning) - return; - printACPolyfillWarning = false; - emitWarning('AbortController is not defined. If using lru-cache in ' + - 'node 14, load an AbortController polyfill from the ' + - '`node-abort-controller` package. A minimal polyfill is ' + - 'provided for use by LRUCache.fetch(), but it should not be ' + - 'relied upon in other contexts (eg, passing it to other APIs that ' + - 'use AbortController/AbortSignal might have undesirable effects). ' + - 'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill); - }; -} -/* c8 ignore stop */ -const shouldWarn = (code) => !warned.has(code); -const TYPE = Symbol('type'); -const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n); -/* c8 ignore start */ -// This is a little bit ridiculous, tbh. -// The maximum array length is 2^32-1 or thereabouts on most JS impls. -// And well before that point, you're caching the entire world, I mean, -// that's ~32GB of just integers for the next/prev links, plus whatever -// else to hold that many keys and values. Just filling the memory with -// zeroes at init time is brutal when you get that big. -// But why not be complete? -// Maybe in the future, these limits will have expanded. -const getUintArray = (max) => !isPosInt(max) - ? null - : max <= Math.pow(2, 8) - ? Uint8Array - : max <= Math.pow(2, 16) - ? Uint16Array - : max <= Math.pow(2, 32) - ? Uint32Array - : max <= Number.MAX_SAFE_INTEGER - ? ZeroArray - : null; -/* c8 ignore stop */ -class ZeroArray extends Array { - constructor(size) { - super(size); - this.fill(0); - } -} -class Stack { - heap; - length; - // private constructor - static #constructing = false; - static create(max) { - const HeapCls = getUintArray(max); - if (!HeapCls) - return []; - Stack.#constructing = true; - const s = new Stack(max, HeapCls); - Stack.#constructing = false; - return s; - } - constructor(max, HeapCls) { - /* c8 ignore start */ - if (!Stack.#constructing) { - throw new TypeError('instantiate Stack using Stack.create(n)'); - } - /* c8 ignore stop */ - this.heap = new HeapCls(max); - this.length = 0; - } - push(n) { - this.heap[this.length++] = n; - } - pop() { - return this.heap[--this.length]; - } -} -/** - * Default export, the thing you're using this module to get. - * - * The `K` and `V` types define the key and value types, respectively. The - * optional `FC` type defines the type of the `context` object passed to - * `cache.fetch()` and `cache.memo()`. - * - * Keys and values **must not** be `null` or `undefined`. - * - * All properties from the options object (with the exception of `max`, - * `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are - * added as normal public members. (The listed options are read-only getters.) - * - * Changing any of these will alter the defaults for subsequent method calls. - */ -export class LRUCache { - // options that cannot be changed without disaster - #max; - #maxSize; - #dispose; - #disposeAfter; - #fetchMethod; - #memoMethod; - /** - * {@link LRUCache.OptionsBase.ttl} - */ - ttl; - /** - * {@link LRUCache.OptionsBase.ttlResolution} - */ - ttlResolution; - /** - * {@link LRUCache.OptionsBase.ttlAutopurge} - */ - ttlAutopurge; - /** - * {@link LRUCache.OptionsBase.updateAgeOnGet} - */ - updateAgeOnGet; - /** - * {@link LRUCache.OptionsBase.updateAgeOnHas} - */ - updateAgeOnHas; - /** - * {@link LRUCache.OptionsBase.allowStale} - */ - allowStale; - /** - * {@link LRUCache.OptionsBase.noDisposeOnSet} - */ - noDisposeOnSet; - /** - * {@link LRUCache.OptionsBase.noUpdateTTL} - */ - noUpdateTTL; - /** - * {@link LRUCache.OptionsBase.maxEntrySize} - */ - maxEntrySize; - /** - * {@link LRUCache.OptionsBase.sizeCalculation} - */ - sizeCalculation; - /** - * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection} - */ - noDeleteOnFetchRejection; - /** - * {@link LRUCache.OptionsBase.noDeleteOnStaleGet} - */ - noDeleteOnStaleGet; - /** - * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort} - */ - allowStaleOnFetchAbort; - /** - * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} - */ - allowStaleOnFetchRejection; - /** - * {@link LRUCache.OptionsBase.ignoreFetchAbort} - */ - ignoreFetchAbort; - // computed properties - #size; - #calculatedSize; - #keyMap; - #keyList; - #valList; - #next; - #prev; - #head; - #tail; - #free; - #disposed; - #sizes; - #starts; - #ttls; - #hasDispose; - #hasFetchMethod; - #hasDisposeAfter; - /** - * Do not call this method unless you need to inspect the - * inner workings of the cache. If anything returned by this - * object is modified in any way, strange breakage may occur. - * - * These fields are private for a reason! - * - * @internal - */ - static unsafeExposeInternals(c) { - return { - // properties - starts: c.#starts, - ttls: c.#ttls, - sizes: c.#sizes, - keyMap: c.#keyMap, - keyList: c.#keyList, - valList: c.#valList, - next: c.#next, - prev: c.#prev, - get head() { - return c.#head; - }, - get tail() { - return c.#tail; - }, - free: c.#free, - // methods - isBackgroundFetch: (p) => c.#isBackgroundFetch(p), - backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context), - moveToTail: (index) => c.#moveToTail(index), - indexes: (options) => c.#indexes(options), - rindexes: (options) => c.#rindexes(options), - isStale: (index) => c.#isStale(index), - }; - } - // Protected read-only members - /** - * {@link LRUCache.OptionsBase.max} (read-only) - */ - get max() { - return this.#max; - } - /** - * {@link LRUCache.OptionsBase.maxSize} (read-only) - */ - get maxSize() { - return this.#maxSize; - } - /** - * The total computed size of items in the cache (read-only) - */ - get calculatedSize() { - return this.#calculatedSize; - } - /** - * The number of items stored in the cache (read-only) - */ - get size() { - return this.#size; - } - /** - * {@link LRUCache.OptionsBase.fetchMethod} (read-only) - */ - get fetchMethod() { - return this.#fetchMethod; - } - get memoMethod() { - return this.#memoMethod; - } - /** - * {@link LRUCache.OptionsBase.dispose} (read-only) - */ - get dispose() { - return this.#dispose; - } - /** - * {@link LRUCache.OptionsBase.disposeAfter} (read-only) - */ - get disposeAfter() { - return this.#disposeAfter; - } - constructor(options) { - const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options; - if (max !== 0 && !isPosInt(max)) { - throw new TypeError('max option must be a nonnegative integer'); - } - const UintArray = max ? getUintArray(max) : Array; - if (!UintArray) { - throw new Error('invalid max value: ' + max); - } - this.#max = max; - this.#maxSize = maxSize; - this.maxEntrySize = maxEntrySize || this.#maxSize; - this.sizeCalculation = sizeCalculation; - if (this.sizeCalculation) { - if (!this.#maxSize && !this.maxEntrySize) { - throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize'); - } - if (typeof this.sizeCalculation !== 'function') { - throw new TypeError('sizeCalculation set to non-function'); - } - } - if (memoMethod !== undefined && - typeof memoMethod !== 'function') { - throw new TypeError('memoMethod must be a function if defined'); - } - this.#memoMethod = memoMethod; - if (fetchMethod !== undefined && - typeof fetchMethod !== 'function') { - throw new TypeError('fetchMethod must be a function if specified'); - } - this.#fetchMethod = fetchMethod; - this.#hasFetchMethod = !!fetchMethod; - this.#keyMap = new Map(); - this.#keyList = new Array(max).fill(undefined); - this.#valList = new Array(max).fill(undefined); - this.#next = new UintArray(max); - this.#prev = new UintArray(max); - this.#head = 0; - this.#tail = 0; - this.#free = Stack.create(max); - this.#size = 0; - this.#calculatedSize = 0; - if (typeof dispose === 'function') { - this.#dispose = dispose; - } - if (typeof disposeAfter === 'function') { - this.#disposeAfter = disposeAfter; - this.#disposed = []; - } - else { - this.#disposeAfter = undefined; - this.#disposed = undefined; - } - this.#hasDispose = !!this.#dispose; - this.#hasDisposeAfter = !!this.#disposeAfter; - this.noDisposeOnSet = !!noDisposeOnSet; - this.noUpdateTTL = !!noUpdateTTL; - this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection; - this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection; - this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort; - this.ignoreFetchAbort = !!ignoreFetchAbort; - // NB: maxEntrySize is set to maxSize if it's set - if (this.maxEntrySize !== 0) { - if (this.#maxSize !== 0) { - if (!isPosInt(this.#maxSize)) { - throw new TypeError('maxSize must be a positive integer if specified'); - } - } - if (!isPosInt(this.maxEntrySize)) { - throw new TypeError('maxEntrySize must be a positive integer if specified'); - } - this.#initializeSizeTracking(); - } - this.allowStale = !!allowStale; - this.noDeleteOnStaleGet = !!noDeleteOnStaleGet; - this.updateAgeOnGet = !!updateAgeOnGet; - this.updateAgeOnHas = !!updateAgeOnHas; - this.ttlResolution = - isPosInt(ttlResolution) || ttlResolution === 0 - ? ttlResolution - : 1; - this.ttlAutopurge = !!ttlAutopurge; - this.ttl = ttl || 0; - if (this.ttl) { - if (!isPosInt(this.ttl)) { - throw new TypeError('ttl must be a positive integer if specified'); - } - this.#initializeTTLTracking(); - } - // do not allow completely unbounded caches - if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) { - throw new TypeError('At least one of max, maxSize, or ttl is required'); - } - if (!this.ttlAutopurge && !this.#max && !this.#maxSize) { - const code = 'LRU_CACHE_UNBOUNDED'; - if (shouldWarn(code)) { - warned.add(code); - const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' + - 'result in unbounded memory consumption.'; - emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache); - } - } - } - /** - * Return the number of ms left in the item's TTL. If item is not in cache, - * returns `0`. Returns `Infinity` if item is in cache without a defined TTL. - */ - getRemainingTTL(key) { - return this.#keyMap.has(key) ? Infinity : 0; - } - #initializeTTLTracking() { - const ttls = new ZeroArray(this.#max); - const starts = new ZeroArray(this.#max); - this.#ttls = ttls; - this.#starts = starts; - this.#setItemTTL = (index, ttl, start = perf.now()) => { - starts[index] = ttl !== 0 ? start : 0; - ttls[index] = ttl; - if (ttl !== 0 && this.ttlAutopurge) { - const t = setTimeout(() => { - if (this.#isStale(index)) { - this.#delete(this.#keyList[index], 'expire'); - } - }, ttl + 1); - // unref() not supported on all platforms - /* c8 ignore start */ - if (t.unref) { - t.unref(); - } - /* c8 ignore stop */ - } - }; - this.#updateItemAge = index => { - starts[index] = ttls[index] !== 0 ? perf.now() : 0; - }; - this.#statusTTL = (status, index) => { - if (ttls[index]) { - const ttl = ttls[index]; - const start = starts[index]; - /* c8 ignore next */ - if (!ttl || !start) - return; - status.ttl = ttl; - status.start = start; - status.now = cachedNow || getNow(); - const age = status.now - start; - status.remainingTTL = ttl - age; - } - }; - // debounce calls to perf.now() to 1s so we're not hitting - // that costly call repeatedly. - let cachedNow = 0; - const getNow = () => { - const n = perf.now(); - if (this.ttlResolution > 0) { - cachedNow = n; - const t = setTimeout(() => (cachedNow = 0), this.ttlResolution); - // not available on all platforms - /* c8 ignore start */ - if (t.unref) { - t.unref(); - } - /* c8 ignore stop */ - } - return n; - }; - this.getRemainingTTL = key => { - const index = this.#keyMap.get(key); - if (index === undefined) { - return 0; - } - const ttl = ttls[index]; - const start = starts[index]; - if (!ttl || !start) { - return Infinity; - } - const age = (cachedNow || getNow()) - start; - return ttl - age; - }; - this.#isStale = index => { - const s = starts[index]; - const t = ttls[index]; - return !!t && !!s && (cachedNow || getNow()) - s > t; - }; - } - // conditionally set private methods related to TTL - #updateItemAge = () => { }; - #statusTTL = () => { }; - #setItemTTL = () => { }; - /* c8 ignore stop */ - #isStale = () => false; - #initializeSizeTracking() { - const sizes = new ZeroArray(this.#max); - this.#calculatedSize = 0; - this.#sizes = sizes; - this.#removeItemSize = index => { - this.#calculatedSize -= sizes[index]; - sizes[index] = 0; - }; - this.#requireSize = (k, v, size, sizeCalculation) => { - // provisionally accept background fetches. - // actual value size will be checked when they return. - if (this.#isBackgroundFetch(v)) { - return 0; - } - if (!isPosInt(size)) { - if (sizeCalculation) { - if (typeof sizeCalculation !== 'function') { - throw new TypeError('sizeCalculation must be a function'); - } - size = sizeCalculation(v, k); - if (!isPosInt(size)) { - throw new TypeError('sizeCalculation return invalid (expect positive integer)'); - } - } - else { - throw new TypeError('invalid size value (must be positive integer). ' + - 'When maxSize or maxEntrySize is used, sizeCalculation ' + - 'or size must be set.'); - } - } - return size; - }; - this.#addItemSize = (index, size, status) => { - sizes[index] = size; - if (this.#maxSize) { - const maxSize = this.#maxSize - sizes[index]; - while (this.#calculatedSize > maxSize) { - this.#evict(true); - } - } - this.#calculatedSize += sizes[index]; - if (status) { - status.entrySize = size; - status.totalCalculatedSize = this.#calculatedSize; - } - }; - } - #removeItemSize = _i => { }; - #addItemSize = (_i, _s, _st) => { }; - #requireSize = (_k, _v, size, sizeCalculation) => { - if (size || sizeCalculation) { - throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache'); - } - return 0; - }; - *#indexes({ allowStale = this.allowStale } = {}) { - if (this.#size) { - for (let i = this.#tail; true;) { - if (!this.#isValidIndex(i)) { - break; - } - if (allowStale || !this.#isStale(i)) { - yield i; - } - if (i === this.#head) { - break; - } - else { - i = this.#prev[i]; - } - } - } - } - *#rindexes({ allowStale = this.allowStale } = {}) { - if (this.#size) { - for (let i = this.#head; true;) { - if (!this.#isValidIndex(i)) { - break; - } - if (allowStale || !this.#isStale(i)) { - yield i; - } - if (i === this.#tail) { - break; - } - else { - i = this.#next[i]; - } - } - } - } - #isValidIndex(index) { - return (index !== undefined && - this.#keyMap.get(this.#keyList[index]) === index); - } - /** - * Return a generator yielding `[key, value]` pairs, - * in order from most recently used to least recently used. - */ - *entries() { - for (const i of this.#indexes()) { - if (this.#valList[i] !== undefined && - this.#keyList[i] !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield [this.#keyList[i], this.#valList[i]]; - } - } - } - /** - * Inverse order version of {@link LRUCache.entries} - * - * Return a generator yielding `[key, value]` pairs, - * in order from least recently used to most recently used. - */ - *rentries() { - for (const i of this.#rindexes()) { - if (this.#valList[i] !== undefined && - this.#keyList[i] !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield [this.#keyList[i], this.#valList[i]]; - } - } - } - /** - * Return a generator yielding the keys in the cache, - * in order from most recently used to least recently used. - */ - *keys() { - for (const i of this.#indexes()) { - const k = this.#keyList[i]; - if (k !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield k; - } - } - } - /** - * Inverse order version of {@link LRUCache.keys} - * - * Return a generator yielding the keys in the cache, - * in order from least recently used to most recently used. - */ - *rkeys() { - for (const i of this.#rindexes()) { - const k = this.#keyList[i]; - if (k !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield k; - } - } - } - /** - * Return a generator yielding the values in the cache, - * in order from most recently used to least recently used. - */ - *values() { - for (const i of this.#indexes()) { - const v = this.#valList[i]; - if (v !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield this.#valList[i]; - } - } - } - /** - * Inverse order version of {@link LRUCache.values} - * - * Return a generator yielding the values in the cache, - * in order from least recently used to most recently used. - */ - *rvalues() { - for (const i of this.#rindexes()) { - const v = this.#valList[i]; - if (v !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield this.#valList[i]; - } - } - } - /** - * Iterating over the cache itself yields the same results as - * {@link LRUCache.entries} - */ - [Symbol.iterator]() { - return this.entries(); - } - /** - * A String value that is used in the creation of the default string - * description of an object. Called by the built-in method - * `Object.prototype.toString`. - */ - [Symbol.toStringTag] = 'LRUCache'; - /** - * Find a value for which the supplied fn method returns a truthy value, - * similar to `Array.find()`. fn is called as `fn(value, key, cache)`. - */ - find(fn, getOptions = {}) { - for (const i of this.#indexes()) { - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined) - continue; - if (fn(value, this.#keyList[i], this)) { - return this.get(this.#keyList[i], getOptions); - } - } - } - /** - * Call the supplied function on each item in the cache, in order from most - * recently used to least recently used. - * - * `fn` is called as `fn(value, key, cache)`. - * - * If `thisp` is provided, function will be called in the `this`-context of - * the provided object, or the cache if no `thisp` object is provided. - * - * Does not update age or recenty of use, or iterate over stale values. - */ - forEach(fn, thisp = this) { - for (const i of this.#indexes()) { - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined) - continue; - fn.call(thisp, value, this.#keyList[i], this); - } - } - /** - * The same as {@link LRUCache.forEach} but items are iterated over in - * reverse order. (ie, less recently used items are iterated over first.) - */ - rforEach(fn, thisp = this) { - for (const i of this.#rindexes()) { - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined) - continue; - fn.call(thisp, value, this.#keyList[i], this); - } - } - /** - * Delete any stale entries. Returns true if anything was removed, - * false otherwise. - */ - purgeStale() { - let deleted = false; - for (const i of this.#rindexes({ allowStale: true })) { - if (this.#isStale(i)) { - this.#delete(this.#keyList[i], 'expire'); - deleted = true; - } - } - return deleted; - } - /** - * Get the extended info about a given entry, to get its value, size, and - * TTL info simultaneously. Returns `undefined` if the key is not present. - * - * Unlike {@link LRUCache#dump}, which is designed to be portable and survive - * serialization, the `start` value is always the current timestamp, and the - * `ttl` is a calculated remaining time to live (negative if expired). - * - * Always returns stale values, if their info is found in the cache, so be - * sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl}) - * if relevant. - */ - info(key) { - const i = this.#keyMap.get(key); - if (i === undefined) - return undefined; - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined) - return undefined; - const entry = { value }; - if (this.#ttls && this.#starts) { - const ttl = this.#ttls[i]; - const start = this.#starts[i]; - if (ttl && start) { - const remain = ttl - (perf.now() - start); - entry.ttl = remain; - entry.start = Date.now(); - } - } - if (this.#sizes) { - entry.size = this.#sizes[i]; - } - return entry; - } - /** - * Return an array of [key, {@link LRUCache.Entry}] tuples which can be - * passed to {@link LRLUCache#load}. - * - * The `start` fields are calculated relative to a portable `Date.now()` - * timestamp, even if `performance.now()` is available. - * - * Stale entries are always included in the `dump`, even if - * {@link LRUCache.OptionsBase.allowStale} is false. - * - * Note: this returns an actual array, not a generator, so it can be more - * easily passed around. - */ - dump() { - const arr = []; - for (const i of this.#indexes({ allowStale: true })) { - const key = this.#keyList[i]; - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined || key === undefined) - continue; - const entry = { value }; - if (this.#ttls && this.#starts) { - entry.ttl = this.#ttls[i]; - // always dump the start relative to a portable timestamp - // it's ok for this to be a bit slow, it's a rare operation. - const age = perf.now() - this.#starts[i]; - entry.start = Math.floor(Date.now() - age); - } - if (this.#sizes) { - entry.size = this.#sizes[i]; - } - arr.unshift([key, entry]); - } - return arr; - } - /** - * Reset the cache and load in the items in entries in the order listed. - * - * The shape of the resulting cache may be different if the same options are - * not used in both caches. - * - * The `start` fields are assumed to be calculated relative to a portable - * `Date.now()` timestamp, even if `performance.now()` is available. - */ - load(arr) { - this.clear(); - for (const [key, entry] of arr) { - if (entry.start) { - // entry.start is a portable timestamp, but we may be using - // node's performance.now(), so calculate the offset, so that - // we get the intended remaining TTL, no matter how long it's - // been on ice. - // - // it's ok for this to be a bit slow, it's a rare operation. - const age = Date.now() - entry.start; - entry.start = perf.now() - age; - } - this.set(key, entry.value, entry); - } - } - /** - * Add a value to the cache. - * - * Note: if `undefined` is specified as a value, this is an alias for - * {@link LRUCache#delete} - * - * Fields on the {@link LRUCache.SetOptions} options param will override - * their corresponding values in the constructor options for the scope - * of this single `set()` operation. - * - * If `start` is provided, then that will set the effective start - * time for the TTL calculation. Note that this must be a previous - * value of `performance.now()` if supported, or a previous value of - * `Date.now()` if not. - * - * Options object may also include `size`, which will prevent - * calling the `sizeCalculation` function and just use the specified - * number if it is a positive integer, and `noDisposeOnSet` which - * will prevent calling a `dispose` function in the case of - * overwrites. - * - * If the `size` (or return value of `sizeCalculation`) for a given - * entry is greater than `maxEntrySize`, then the item will not be - * added to the cache. - * - * Will update the recency of the entry. - * - * If the value is `undefined`, then this is an alias for - * `cache.delete(key)`. `undefined` is never stored in the cache. - */ - set(k, v, setOptions = {}) { - if (v === undefined) { - this.delete(k); - return this; - } - const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions; - let { noUpdateTTL = this.noUpdateTTL } = setOptions; - const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation); - // if the item doesn't fit, don't do anything - // NB: maxEntrySize set to maxSize by default - if (this.maxEntrySize && size > this.maxEntrySize) { - if (status) { - status.set = 'miss'; - status.maxEntrySizeExceeded = true; - } - // have to delete, in case something is there already. - this.#delete(k, 'set'); - return this; - } - let index = this.#size === 0 ? undefined : this.#keyMap.get(k); - if (index === undefined) { - // addition - index = (this.#size === 0 - ? this.#tail - : this.#free.length !== 0 - ? this.#free.pop() - : this.#size === this.#max - ? this.#evict(false) - : this.#size); - this.#keyList[index] = k; - this.#valList[index] = v; - this.#keyMap.set(k, index); - this.#next[this.#tail] = index; - this.#prev[index] = this.#tail; - this.#tail = index; - this.#size++; - this.#addItemSize(index, size, status); - if (status) - status.set = 'add'; - noUpdateTTL = false; - } - else { - // update - this.#moveToTail(index); - const oldVal = this.#valList[index]; - if (v !== oldVal) { - if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) { - oldVal.__abortController.abort(new Error('replaced')); - const { __staleWhileFetching: s } = oldVal; - if (s !== undefined && !noDisposeOnSet) { - if (this.#hasDispose) { - this.#dispose?.(s, k, 'set'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([s, k, 'set']); - } - } - } - else if (!noDisposeOnSet) { - if (this.#hasDispose) { - this.#dispose?.(oldVal, k, 'set'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([oldVal, k, 'set']); - } - } - this.#removeItemSize(index); - this.#addItemSize(index, size, status); - this.#valList[index] = v; - if (status) { - status.set = 'replace'; - const oldValue = oldVal && this.#isBackgroundFetch(oldVal) - ? oldVal.__staleWhileFetching - : oldVal; - if (oldValue !== undefined) - status.oldValue = oldValue; - } - } - else if (status) { - status.set = 'update'; - } - } - if (ttl !== 0 && !this.#ttls) { - this.#initializeTTLTracking(); - } - if (this.#ttls) { - if (!noUpdateTTL) { - this.#setItemTTL(index, ttl, start); - } - if (status) - this.#statusTTL(status, index); - } - if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) { - const dt = this.#disposed; - let task; - while ((task = dt?.shift())) { - this.#disposeAfter?.(...task); - } - } - return this; - } - /** - * Evict the least recently used item, returning its value or - * `undefined` if cache is empty. - */ - pop() { - try { - while (this.#size) { - const val = this.#valList[this.#head]; - this.#evict(true); - if (this.#isBackgroundFetch(val)) { - if (val.__staleWhileFetching) { - return val.__staleWhileFetching; - } - } - else if (val !== undefined) { - return val; - } - } - } - finally { - if (this.#hasDisposeAfter && this.#disposed) { - const dt = this.#disposed; - let task; - while ((task = dt?.shift())) { - this.#disposeAfter?.(...task); - } - } - } - } - #evict(free) { - const head = this.#head; - const k = this.#keyList[head]; - const v = this.#valList[head]; - if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) { - v.__abortController.abort(new Error('evicted')); - } - else if (this.#hasDispose || this.#hasDisposeAfter) { - if (this.#hasDispose) { - this.#dispose?.(v, k, 'evict'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([v, k, 'evict']); - } - } - this.#removeItemSize(head); - // if we aren't about to use the index, then null these out - if (free) { - this.#keyList[head] = undefined; - this.#valList[head] = undefined; - this.#free.push(head); - } - if (this.#size === 1) { - this.#head = this.#tail = 0; - this.#free.length = 0; - } - else { - this.#head = this.#next[head]; - } - this.#keyMap.delete(k); - this.#size--; - return head; - } - /** - * Check if a key is in the cache, without updating the recency of use. - * Will return false if the item is stale, even though it is technically - * in the cache. - * - * Check if a key is in the cache, without updating the recency of - * use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set - * to `true` in either the options or the constructor. - * - * Will return `false` if the item is stale, even though it is technically in - * the cache. The difference can be determined (if it matters) by using a - * `status` argument, and inspecting the `has` field. - * - * Will not update item age unless - * {@link LRUCache.OptionsBase.updateAgeOnHas} is set. - */ - has(k, hasOptions = {}) { - const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions; - const index = this.#keyMap.get(k); - if (index !== undefined) { - const v = this.#valList[index]; - if (this.#isBackgroundFetch(v) && - v.__staleWhileFetching === undefined) { - return false; - } - if (!this.#isStale(index)) { - if (updateAgeOnHas) { - this.#updateItemAge(index); - } - if (status) { - status.has = 'hit'; - this.#statusTTL(status, index); - } - return true; - } - else if (status) { - status.has = 'stale'; - this.#statusTTL(status, index); - } - } - else if (status) { - status.has = 'miss'; - } - return false; - } - /** - * Like {@link LRUCache#get} but doesn't update recency or delete stale - * items. - * - * Returns `undefined` if the item is stale, unless - * {@link LRUCache.OptionsBase.allowStale} is set. - */ - peek(k, peekOptions = {}) { - const { allowStale = this.allowStale } = peekOptions; - const index = this.#keyMap.get(k); - if (index === undefined || - (!allowStale && this.#isStale(index))) { - return; - } - const v = this.#valList[index]; - // either stale and allowed, or forcing a refresh of non-stale value - return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; - } - #backgroundFetch(k, index, options, context) { - const v = index === undefined ? undefined : this.#valList[index]; - if (this.#isBackgroundFetch(v)) { - return v; - } - const ac = new AC(); - const { signal } = options; - // when/if our AC signals, then stop listening to theirs. - signal?.addEventListener('abort', () => ac.abort(signal.reason), { - signal: ac.signal, - }); - const fetchOpts = { - signal: ac.signal, - options, - context, - }; - const cb = (v, updateCache = false) => { - const { aborted } = ac.signal; - const ignoreAbort = options.ignoreFetchAbort && v !== undefined; - if (options.status) { - if (aborted && !updateCache) { - options.status.fetchAborted = true; - options.status.fetchError = ac.signal.reason; - if (ignoreAbort) - options.status.fetchAbortIgnored = true; - } - else { - options.status.fetchResolved = true; - } - } - if (aborted && !ignoreAbort && !updateCache) { - return fetchFail(ac.signal.reason); - } - // either we didn't abort, and are still here, or we did, and ignored - const bf = p; - if (this.#valList[index] === p) { - if (v === undefined) { - if (bf.__staleWhileFetching) { - this.#valList[index] = bf.__staleWhileFetching; - } - else { - this.#delete(k, 'fetch'); - } - } - else { - if (options.status) - options.status.fetchUpdated = true; - this.set(k, v, fetchOpts.options); - } - } - return v; - }; - const eb = (er) => { - if (options.status) { - options.status.fetchRejected = true; - options.status.fetchError = er; - } - return fetchFail(er); - }; - const fetchFail = (er) => { - const { aborted } = ac.signal; - const allowStaleAborted = aborted && options.allowStaleOnFetchAbort; - const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection; - const noDelete = allowStale || options.noDeleteOnFetchRejection; - const bf = p; - if (this.#valList[index] === p) { - // if we allow stale on fetch rejections, then we need to ensure that - // the stale value is not removed from the cache when the fetch fails. - const del = !noDelete || bf.__staleWhileFetching === undefined; - if (del) { - this.#delete(k, 'fetch'); - } - else if (!allowStaleAborted) { - // still replace the *promise* with the stale value, - // since we are done with the promise at this point. - // leave it untouched if we're still waiting for an - // aborted background fetch that hasn't yet returned. - this.#valList[index] = bf.__staleWhileFetching; - } - } - if (allowStale) { - if (options.status && bf.__staleWhileFetching !== undefined) { - options.status.returnedStale = true; - } - return bf.__staleWhileFetching; - } - else if (bf.__returned === bf) { - throw er; - } - }; - const pcall = (res, rej) => { - const fmp = this.#fetchMethod?.(k, v, fetchOpts); - if (fmp && fmp instanceof Promise) { - fmp.then(v => res(v === undefined ? undefined : v), rej); - } - // ignored, we go until we finish, regardless. - // defer check until we are actually aborting, - // so fetchMethod can override. - ac.signal.addEventListener('abort', () => { - if (!options.ignoreFetchAbort || - options.allowStaleOnFetchAbort) { - res(undefined); - // when it eventually resolves, update the cache. - if (options.allowStaleOnFetchAbort) { - res = v => cb(v, true); - } - } - }); - }; - if (options.status) - options.status.fetchDispatched = true; - const p = new Promise(pcall).then(cb, eb); - const bf = Object.assign(p, { - __abortController: ac, - __staleWhileFetching: v, - __returned: undefined, - }); - if (index === undefined) { - // internal, don't expose status. - this.set(k, bf, { ...fetchOpts.options, status: undefined }); - index = this.#keyMap.get(k); - } - else { - this.#valList[index] = bf; - } - return bf; - } - #isBackgroundFetch(p) { - if (!this.#hasFetchMethod) - return false; - const b = p; - return (!!b && - b instanceof Promise && - b.hasOwnProperty('__staleWhileFetching') && - b.__abortController instanceof AC); - } - async fetch(k, fetchOptions = {}) { - const { - // get options - allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, - // set options - ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, - // fetch exclusive options - noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions; - if (!this.#hasFetchMethod) { - if (status) - status.fetch = 'get'; - return this.get(k, { - allowStale, - updateAgeOnGet, - noDeleteOnStaleGet, - status, - }); - } - const options = { - allowStale, - updateAgeOnGet, - noDeleteOnStaleGet, - ttl, - noDisposeOnSet, - size, - sizeCalculation, - noUpdateTTL, - noDeleteOnFetchRejection, - allowStaleOnFetchRejection, - allowStaleOnFetchAbort, - ignoreFetchAbort, - status, - signal, - }; - let index = this.#keyMap.get(k); - if (index === undefined) { - if (status) - status.fetch = 'miss'; - const p = this.#backgroundFetch(k, index, options, context); - return (p.__returned = p); - } - else { - // in cache, maybe already fetching - const v = this.#valList[index]; - if (this.#isBackgroundFetch(v)) { - const stale = allowStale && v.__staleWhileFetching !== undefined; - if (status) { - status.fetch = 'inflight'; - if (stale) - status.returnedStale = true; - } - return stale ? v.__staleWhileFetching : (v.__returned = v); - } - // if we force a refresh, that means do NOT serve the cached value, - // unless we are already in the process of refreshing the cache. - const isStale = this.#isStale(index); - if (!forceRefresh && !isStale) { - if (status) - status.fetch = 'hit'; - this.#moveToTail(index); - if (updateAgeOnGet) { - this.#updateItemAge(index); - } - if (status) - this.#statusTTL(status, index); - return v; - } - // ok, it is stale or a forced refresh, and not already fetching. - // refresh the cache. - const p = this.#backgroundFetch(k, index, options, context); - const hasStale = p.__staleWhileFetching !== undefined; - const staleVal = hasStale && allowStale; - if (status) { - status.fetch = isStale ? 'stale' : 'refresh'; - if (staleVal && isStale) - status.returnedStale = true; - } - return staleVal ? p.__staleWhileFetching : (p.__returned = p); - } - } - async forceFetch(k, fetchOptions = {}) { - const v = await this.fetch(k, fetchOptions); - if (v === undefined) - throw new Error('fetch() returned undefined'); - return v; - } - memo(k, memoOptions = {}) { - const memoMethod = this.#memoMethod; - if (!memoMethod) { - throw new Error('no memoMethod provided to constructor'); - } - const { context, forceRefresh, ...options } = memoOptions; - const v = this.get(k, options); - if (!forceRefresh && v !== undefined) - return v; - const vv = memoMethod(k, v, { - options, - context, - }); - this.set(k, vv, options); - return vv; - } - /** - * Return a value from the cache. Will update the recency of the cache - * entry found. - * - * If the key is not found, get() will return `undefined`. - */ - get(k, getOptions = {}) { - const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions; - const index = this.#keyMap.get(k); - if (index !== undefined) { - const value = this.#valList[index]; - const fetching = this.#isBackgroundFetch(value); - if (status) - this.#statusTTL(status, index); - if (this.#isStale(index)) { - if (status) - status.get = 'stale'; - // delete only if not an in-flight background fetch - if (!fetching) { - if (!noDeleteOnStaleGet) { - this.#delete(k, 'expire'); - } - if (status && allowStale) - status.returnedStale = true; - return allowStale ? value : undefined; - } - else { - if (status && - allowStale && - value.__staleWhileFetching !== undefined) { - status.returnedStale = true; - } - return allowStale ? value.__staleWhileFetching : undefined; - } - } - else { - if (status) - status.get = 'hit'; - // if we're currently fetching it, we don't actually have it yet - // it's not stale, which means this isn't a staleWhileRefetching. - // If it's not stale, and fetching, AND has a __staleWhileFetching - // value, then that means the user fetched with {forceRefresh:true}, - // so it's safe to return that value. - if (fetching) { - return value.__staleWhileFetching; - } - this.#moveToTail(index); - if (updateAgeOnGet) { - this.#updateItemAge(index); - } - return value; - } - } - else if (status) { - status.get = 'miss'; - } - } - #connect(p, n) { - this.#prev[n] = p; - this.#next[p] = n; - } - #moveToTail(index) { - // if tail already, nothing to do - // if head, move head to next[index] - // else - // move next[prev[index]] to next[index] (head has no prev) - // move prev[next[index]] to prev[index] - // prev[index] = tail - // next[tail] = index - // tail = index - if (index !== this.#tail) { - if (index === this.#head) { - this.#head = this.#next[index]; - } - else { - this.#connect(this.#prev[index], this.#next[index]); - } - this.#connect(this.#tail, index); - this.#tail = index; - } - } - /** - * Deletes a key out of the cache. - * - * Returns true if the key was deleted, false otherwise. - */ - delete(k) { - return this.#delete(k, 'delete'); - } - #delete(k, reason) { - let deleted = false; - if (this.#size !== 0) { - const index = this.#keyMap.get(k); - if (index !== undefined) { - deleted = true; - if (this.#size === 1) { - this.#clear(reason); - } - else { - this.#removeItemSize(index); - const v = this.#valList[index]; - if (this.#isBackgroundFetch(v)) { - v.__abortController.abort(new Error('deleted')); - } - else if (this.#hasDispose || this.#hasDisposeAfter) { - if (this.#hasDispose) { - this.#dispose?.(v, k, reason); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([v, k, reason]); - } - } - this.#keyMap.delete(k); - this.#keyList[index] = undefined; - this.#valList[index] = undefined; - if (index === this.#tail) { - this.#tail = this.#prev[index]; - } - else if (index === this.#head) { - this.#head = this.#next[index]; - } - else { - const pi = this.#prev[index]; - this.#next[pi] = this.#next[index]; - const ni = this.#next[index]; - this.#prev[ni] = this.#prev[index]; - } - this.#size--; - this.#free.push(index); - } - } - } - if (this.#hasDisposeAfter && this.#disposed?.length) { - const dt = this.#disposed; - let task; - while ((task = dt?.shift())) { - this.#disposeAfter?.(...task); - } - } - return deleted; - } - /** - * Clear the cache entirely, throwing away all values. - */ - clear() { - return this.#clear('delete'); - } - #clear(reason) { - for (const index of this.#rindexes({ allowStale: true })) { - const v = this.#valList[index]; - if (this.#isBackgroundFetch(v)) { - v.__abortController.abort(new Error('deleted')); - } - else { - const k = this.#keyList[index]; - if (this.#hasDispose) { - this.#dispose?.(v, k, reason); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([v, k, reason]); - } - } - } - this.#keyMap.clear(); - this.#valList.fill(undefined); - this.#keyList.fill(undefined); - if (this.#ttls && this.#starts) { - this.#ttls.fill(0); - this.#starts.fill(0); - } - if (this.#sizes) { - this.#sizes.fill(0); - } - this.#head = 0; - this.#tail = 0; - this.#free.length = 0; - this.#calculatedSize = 0; - this.#size = 0; - if (this.#hasDisposeAfter && this.#disposed) { - const dt = this.#disposed; - let task; - while ((task = dt?.shift())) { - this.#disposeAfter?.(...task); - } - } - } -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/lru-cache/dist/esm/index.min.js b/node_modules/node-gyp/node_modules/lru-cache/dist/esm/index.min.js deleted file mode 100644 index 4571d0254e27d..0000000000000 --- a/node_modules/node-gyp/node_modules/lru-cache/dist/esm/index.min.js +++ /dev/null @@ -1,2 +0,0 @@ -var G=(l,t,e)=>{if(!t.has(l))throw TypeError("Cannot "+e)};var I=(l,t,e)=>(G(l,t,"read from private field"),e?e.call(l):t.get(l)),j=(l,t,e)=>{if(t.has(l))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(l):t.set(l,e)},x=(l,t,e,i)=>(G(l,t,"write to private field"),i?i.call(l,e):t.set(l,e),e);var T=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,P=new Set,M=typeof process=="object"&&process?process:{},H=(l,t,e,i)=>{typeof M.emitWarning=="function"?M.emitWarning(l,t,e,i):console.error(`[${e}] ${t}: ${l}`)},W=globalThis.AbortController,N=globalThis.AbortSignal;if(typeof W>"u"){N=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new N;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let l=M.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{l&&(l=!1,H("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=l=>!P.has(l),Y=Symbol("type"),A=l=>l&&l===Math.floor(l)&&l>0&&isFinite(l),k=l=>A(l)?l<=Math.pow(2,8)?Uint8Array:l<=Math.pow(2,16)?Uint16Array:l<=Math.pow(2,32)?Uint32Array:l<=Number.MAX_SAFE_INTEGER?O:null:null,O=class extends Array{constructor(t){super(t),this.fill(0)}},z,E=class{heap;length;static create(t){let e=k(t);if(!e)return[];x(E,z,!0);let i=new E(t,e);return x(E,z,!1),i}constructor(t,e){if(!I(E,z))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},R=E;z=new WeakMap,j(R,z,!1);var D=class{#g;#f;#p;#w;#R;#W;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#S;#s;#i;#t;#l;#c;#o;#h;#_;#r;#m;#b;#u;#y;#O;#a;static unsafeExposeInternals(t){return{starts:t.#b,ttls:t.#u,sizes:t.#m,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#_,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#x(e,i,s,n),moveToTail:e=>t.#C(e),indexes:e=>t.#A(e),rindexes:e=>t.#F(e),isStale:e=>t.#d(e)}}get max(){return this.#g}get maxSize(){return this.#f}get calculatedSize(){return this.#S}get size(){return this.#n}get fetchMethod(){return this.#R}get memoMethod(){return this.#W}get dispose(){return this.#p}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,disposeAfter:m,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,memoMethod:a,noDeleteOnFetchRejection:w,noDeleteOnStaleGet:b,allowStaleOnFetchRejection:p,allowStaleOnFetchAbort:_,ignoreFetchAbort:v}=t;if(e!==0&&!A(e))throw new TypeError("max option must be a nonnegative integer");let y=e?k(e):Array;if(!y)throw new Error("invalid max value: "+e);if(this.#g=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(a!==void 0&&typeof a!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#W=a,S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#R=S,this.#O=!!S,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new y(e),this.#c=new y(e),this.#o=0,this.#h=0,this.#_=R.create(e),this.#n=0,this.#S=0,typeof g=="function"&&(this.#p=g),typeof m=="function"?(this.#w=m,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#y=!!this.#p,this.#a=!!this.#w,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!w,this.allowStaleOnFetchRejection=!!p,this.allowStaleOnFetchAbort=!!_,this.ignoreFetchAbort=!!v,this.maxEntrySize!==0){if(this.#f!==0&&!A(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!A(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#P()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!b,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=A(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!A(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#M()}if(this.#g===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#g&&!this.#f){let C="LRU_CACHE_UNBOUNDED";V(C)&&(P.add(C),H("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",C,D))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#M(){let t=new O(this.#g),e=new O(this.#g);this.#u=t,this.#b=e,this.#U=(n,h,o=T.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#d(n)&&this.#T(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?T.now():0},this.#E=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=T.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#d=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#z=()=>{};#E=()=>{};#U=()=>{};#d=()=>!1;#P(){let t=new O(this.#g);this.#S=0,this.#m=t,this.#v=e=>{this.#S-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!A(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!A(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#D=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#S>n;)this.#L(!0)}this.#S+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#S)}}#v=t=>{};#D=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#A({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#I(e)||((t||!this.#d(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#I(e)||((t||!this.#d(e))&&(yield e),e===this.#h));)e=this.#l[e]}#I(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#F({allowStale:!0}))this.#d(e)&&(this.#T(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#u&&this.#b){let h=this.#u[e],o=this.#b[e];if(h&&o){let r=h-(T.now()-o);n.ttl=r,n.start=Date.now()}}return this.#m&&(n.size=this.#m[e]),n}dump(){let t=[];for(let e of this.#A({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#u&&this.#b){h.ttl=this.#u[e];let o=T.now()-this.#b[e];h.start=Math.floor(Date.now()-o)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=T.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,m=this.#G(t,e,i.size||0,o);if(this.maxEntrySize&&m>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#T(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#_.length!==0?this.#_.pop():this.#n===this.#g?this.#L(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#n++,this.#D(f,m,r),r&&(r.set="add"),g=!1;else{this.#C(f);let u=this.#t[f];if(e!==u){if(this.#O&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#y&&this.#p?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#y&&this.#p?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#v(f),this.#D(f,m,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#u&&this.#M(),this.#u&&(g||this.#U(f,s,n),r&&this.#E(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#w?.(...c)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#L(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#L(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#O&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#y||this.#a)&&(this.#y&&this.#p?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#v(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#_.push(e)),this.#n===1?(this.#o=this.#h=0,this.#_.length=0):this.#o=this.#l[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#d(n))s&&(s.has="stale",this.#E(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#E(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#d(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#x(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:a}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(a&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),a&&!w&&!S)return f(h.signal.reason);let b=c;return this.#t[e]===c&&(d===void 0?b.__staleWhileFetching?this.#t[e]=b.__staleWhileFetching:this.#T(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},m=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,a=S&&i.allowStaleOnFetchAbort,w=a||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!b||p.__staleWhileFetching===void 0?this.#T(t,"fetch"):a||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let a=this.#R?.(t,n,r);a&&a instanceof Promise&&a.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,m),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=F,F}#e(t){if(!this.#O)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:m=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:a,signal:w}=e;if(!this.#O)return a&&(a.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:a});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:m,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:a,signal:w},p=this.#s.get(t);if(p===void 0){a&&(a.fetch="miss");let _=this.#x(t,p,b,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let U=i&&_.__staleWhileFetching!==void 0;return a&&(a.fetch="inflight",U&&(a.returnedStale=!0)),U?_.__staleWhileFetching:_.__returned=_}let v=this.#d(p);if(!S&&!v)return a&&(a.fetch="hit"),this.#C(p),s&&this.#z(p),a&&this.#E(a,p),_;let y=this.#x(t,p,b,d),L=y.__staleWhileFetching!==void 0&&i;return a&&(a.fetch=v?"stale":"refresh",L&&v&&(a.returnedStale=!0)),L?y.__staleWhileFetching:y.__returned=y}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#W;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#E(h,o),this.#d(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#T(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#C(o),s&&this.#z(o),r))}else h&&(h.get="miss")}#j(t,e){this.#c[e]=t,this.#l[t]=e}#C(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#j(this.#c[t],this.#l[t]),this.#j(this.#h,t),this.#h=t)}delete(t){return this.#T(t,"delete")}#T(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#N(e);else{this.#v(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#y||this.#a)&&(this.#y&&this.#p?.(n,t,e),this.#a&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#c[s];else if(s===this.#o)this.#o=this.#l[s];else{let h=this.#c[s];this.#l[h]=this.#l[s];let o=this.#l[s];this.#c[o]=this.#c[s]}this.#n--,this.#_.push(s)}}if(this.#a&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#N("delete")}#N(t){for(let e of this.#F({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#y&&this.#p?.(i,s,t),this.#a&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#u&&this.#b&&(this.#u.fill(0),this.#b.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#_.length=0,this.#S=0,this.#n=0,this.#a&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};export{D as LRUCache}; -//# sourceMappingURL=index.min.js.map diff --git a/node_modules/node-gyp/node_modules/lru-cache/dist/esm/package.json b/node_modules/node-gyp/node_modules/lru-cache/dist/esm/package.json deleted file mode 100644 index 3dbc1ca591c05..0000000000000 --- a/node_modules/node-gyp/node_modules/lru-cache/dist/esm/package.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "type": "module" -} diff --git a/node_modules/node-gyp/node_modules/lru-cache/package.json b/node_modules/node-gyp/node_modules/lru-cache/package.json deleted file mode 100644 index f3cd4c0cc53f7..0000000000000 --- a/node_modules/node-gyp/node_modules/lru-cache/package.json +++ /dev/null @@ -1,116 +0,0 @@ -{ - "name": "lru-cache", - "publishConfig": { - "tag": "legacy-v10" - }, - "description": "A cache object that deletes the least-recently-used items.", - "version": "10.4.3", - "author": "Isaac Z. Schlueter ", - "keywords": [ - "mru", - "lru", - "cache" - ], - "sideEffects": false, - "scripts": { - "build": "npm run prepare", - "prepare": "tshy && bash fixup.sh", - "pretest": "npm run prepare", - "presnap": "npm run prepare", - "test": "tap", - "snap": "tap", - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags", - "format": "prettier --write .", - "typedoc": "typedoc --tsconfig ./.tshy/esm.json ./src/*.ts", - "benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh", - "prebenchmark": "npm run prepare", - "benchmark": "make -C benchmark", - "preprofile": "npm run prepare", - "profile": "make -C benchmark profile" - }, - "main": "./dist/commonjs/index.js", - "types": "./dist/commonjs/index.d.ts", - "tshy": { - "exports": { - ".": "./src/index.ts", - "./min": { - "import": { - "types": "./dist/esm/index.d.ts", - "default": "./dist/esm/index.min.js" - }, - "require": { - "types": "./dist/commonjs/index.d.ts", - "default": "./dist/commonjs/index.min.js" - } - } - } - }, - "repository": { - "type": "git", - "url": "git://github.com/isaacs/node-lru-cache.git" - }, - "devDependencies": { - "@types/node": "^20.2.5", - "@types/tap": "^15.0.6", - "benchmark": "^2.1.4", - "esbuild": "^0.17.11", - "eslint-config-prettier": "^8.5.0", - "marked": "^4.2.12", - "mkdirp": "^2.1.5", - "prettier": "^2.6.2", - "tap": "^20.0.3", - "tshy": "^2.0.0", - "tslib": "^2.4.0", - "typedoc": "^0.25.3", - "typescript": "^5.2.2" - }, - "license": "ISC", - "files": [ - "dist" - ], - "prettier": { - "semi": false, - "printWidth": 70, - "tabWidth": 2, - "useTabs": false, - "singleQuote": true, - "jsxSingleQuote": false, - "bracketSameLine": true, - "arrowParens": "avoid", - "endOfLine": "lf" - }, - "tap": { - "node-arg": [ - "--expose-gc" - ], - "plugin": [ - "@tapjs/clock" - ] - }, - "exports": { - ".": { - "import": { - "types": "./dist/esm/index.d.ts", - "default": "./dist/esm/index.js" - }, - "require": { - "types": "./dist/commonjs/index.d.ts", - "default": "./dist/commonjs/index.js" - } - }, - "./min": { - "import": { - "types": "./dist/esm/index.d.ts", - "default": "./dist/esm/index.min.js" - }, - "require": { - "types": "./dist/commonjs/index.d.ts", - "default": "./dist/commonjs/index.min.js" - } - } - }, - "type": "module", - "module": "./dist/esm/index.js" -} diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE b/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE deleted file mode 100644 index 1808eb2844231..0000000000000 --- a/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE +++ /dev/null @@ -1,16 +0,0 @@ -ISC License - -Copyright 2017-2022 (c) npm, Inc. - -Permission to use, copy, modify, and/or distribute this software for -any purpose with or without fee is hereby granted, provided that the -above copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS -ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE -COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE -USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/entry.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/entry.js deleted file mode 100644 index bfcfacbcc95e1..0000000000000 --- a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/entry.js +++ /dev/null @@ -1,471 +0,0 @@ -const { Request, Response } = require('minipass-fetch') -const { Minipass } = require('minipass') -const MinipassFlush = require('minipass-flush') -const cacache = require('cacache') -const url = require('url') - -const CachingMinipassPipeline = require('../pipeline.js') -const CachePolicy = require('./policy.js') -const cacheKey = require('./key.js') -const remote = require('../remote.js') - -const hasOwnProperty = (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop) - -// allow list for request headers that will be written to the cache index -// note: we will also store any request headers -// that are named in a response's vary header -const KEEP_REQUEST_HEADERS = [ - 'accept-charset', - 'accept-encoding', - 'accept-language', - 'accept', - 'cache-control', -] - -// allow list for response headers that will be written to the cache index -// note: we must not store the real response's age header, or when we load -// a cache policy based on the metadata it will think the cached response -// is always stale -const KEEP_RESPONSE_HEADERS = [ - 'cache-control', - 'content-encoding', - 'content-language', - 'content-type', - 'date', - 'etag', - 'expires', - 'last-modified', - 'link', - 'location', - 'pragma', - 'vary', -] - -// return an object containing all metadata to be written to the index -const getMetadata = (request, response, options) => { - const metadata = { - time: Date.now(), - url: request.url, - reqHeaders: {}, - resHeaders: {}, - - // options on which we must match the request and vary the response - options: { - compress: options.compress != null ? options.compress : request.compress, - }, - } - - // only save the status if it's not a 200 or 304 - if (response.status !== 200 && response.status !== 304) { - metadata.status = response.status - } - - for (const name of KEEP_REQUEST_HEADERS) { - if (request.headers.has(name)) { - metadata.reqHeaders[name] = request.headers.get(name) - } - } - - // if the request's host header differs from the host in the url - // we need to keep it, otherwise it's just noise and we ignore it - const host = request.headers.get('host') - const parsedUrl = new url.URL(request.url) - if (host && parsedUrl.host !== host) { - metadata.reqHeaders.host = host - } - - // if the response has a vary header, make sure - // we store the relevant request headers too - if (response.headers.has('vary')) { - const vary = response.headers.get('vary') - // a vary of "*" means every header causes a different response. - // in that scenario, we do not include any additional headers - // as the freshness check will always fail anyway and we don't - // want to bloat the cache indexes - if (vary !== '*') { - // copy any other request headers that will vary the response - const varyHeaders = vary.trim().toLowerCase().split(/\s*,\s*/) - for (const name of varyHeaders) { - if (request.headers.has(name)) { - metadata.reqHeaders[name] = request.headers.get(name) - } - } - } - } - - for (const name of KEEP_RESPONSE_HEADERS) { - if (response.headers.has(name)) { - metadata.resHeaders[name] = response.headers.get(name) - } - } - - for (const name of options.cacheAdditionalHeaders) { - if (response.headers.has(name)) { - metadata.resHeaders[name] = response.headers.get(name) - } - } - - return metadata -} - -// symbols used to hide objects that may be lazily evaluated in a getter -const _request = Symbol('request') -const _response = Symbol('response') -const _policy = Symbol('policy') - -class CacheEntry { - constructor ({ entry, request, response, options }) { - if (entry) { - this.key = entry.key - this.entry = entry - // previous versions of this module didn't write an explicit timestamp in - // the metadata, so fall back to the entry's timestamp. we can't use the - // entry timestamp to determine staleness because cacache will update it - // when it verifies its data - this.entry.metadata.time = this.entry.metadata.time || this.entry.time - } else { - this.key = cacheKey(request) - } - - this.options = options - - // these properties are behind getters that lazily evaluate - this[_request] = request - this[_response] = response - this[_policy] = null - } - - // returns a CacheEntry instance that satisfies the given request - // or undefined if no existing entry satisfies - static async find (request, options) { - try { - // compacts the index and returns an array of unique entries - var matches = await cacache.index.compact(options.cachePath, cacheKey(request), (A, B) => { - const entryA = new CacheEntry({ entry: A, options }) - const entryB = new CacheEntry({ entry: B, options }) - return entryA.policy.satisfies(entryB.request) - }, { - validateEntry: (entry) => { - // clean out entries with a buggy content-encoding value - if (entry.metadata && - entry.metadata.resHeaders && - entry.metadata.resHeaders['content-encoding'] === null) { - return false - } - - // if an integrity is null, it needs to have a status specified - if (entry.integrity === null) { - return !!(entry.metadata && entry.metadata.status) - } - - return true - }, - }) - } catch (err) { - // if the compact request fails, ignore the error and return - return - } - - // a cache mode of 'reload' means to behave as though we have no cache - // on the way to the network. return undefined to allow cacheFetch to - // create a brand new request no matter what. - if (options.cache === 'reload') { - return - } - - // find the specific entry that satisfies the request - let match - for (const entry of matches) { - const _entry = new CacheEntry({ - entry, - options, - }) - - if (_entry.policy.satisfies(request)) { - match = _entry - break - } - } - - return match - } - - // if the user made a PUT/POST/PATCH then we invalidate our - // cache for the same url by deleting the index entirely - static async invalidate (request, options) { - const key = cacheKey(request) - try { - await cacache.rm.entry(options.cachePath, key, { removeFully: true }) - } catch (err) { - // ignore errors - } - } - - get request () { - if (!this[_request]) { - this[_request] = new Request(this.entry.metadata.url, { - method: 'GET', - headers: this.entry.metadata.reqHeaders, - ...this.entry.metadata.options, - }) - } - - return this[_request] - } - - get response () { - if (!this[_response]) { - this[_response] = new Response(null, { - url: this.entry.metadata.url, - counter: this.options.counter, - status: this.entry.metadata.status || 200, - headers: { - ...this.entry.metadata.resHeaders, - 'content-length': this.entry.size, - }, - }) - } - - return this[_response] - } - - get policy () { - if (!this[_policy]) { - this[_policy] = new CachePolicy({ - entry: this.entry, - request: this.request, - response: this.response, - options: this.options, - }) - } - - return this[_policy] - } - - // wraps the response in a pipeline that stores the data - // in the cache while the user consumes it - async store (status) { - // if we got a status other than 200, 301, or 308, - // or the CachePolicy forbid storage, append the - // cache status header and return it untouched - if ( - this.request.method !== 'GET' || - ![200, 301, 308].includes(this.response.status) || - !this.policy.storable() - ) { - this.response.headers.set('x-local-cache-status', 'skip') - return this.response - } - - const size = this.response.headers.get('content-length') - const cacheOpts = { - algorithms: this.options.algorithms, - metadata: getMetadata(this.request, this.response, this.options), - size, - integrity: this.options.integrity, - integrityEmitter: this.response.body.hasIntegrityEmitter && this.response.body, - } - - let body = null - // we only set a body if the status is a 200, redirects are - // stored as metadata only - if (this.response.status === 200) { - let cacheWriteResolve, cacheWriteReject - const cacheWritePromise = new Promise((resolve, reject) => { - cacheWriteResolve = resolve - cacheWriteReject = reject - }).catch((err) => { - body.emit('error', err) - }) - - body = new CachingMinipassPipeline({ events: ['integrity', 'size'] }, new MinipassFlush({ - flush () { - return cacheWritePromise - }, - })) - // this is always true since if we aren't reusing the one from the remote fetch, we - // are using the one from cacache - body.hasIntegrityEmitter = true - - const onResume = () => { - const tee = new Minipass() - const cacheStream = cacache.put.stream(this.options.cachePath, this.key, cacheOpts) - // re-emit the integrity and size events on our new response body so they can be reused - cacheStream.on('integrity', i => body.emit('integrity', i)) - cacheStream.on('size', s => body.emit('size', s)) - // stick a flag on here so downstream users will know if they can expect integrity events - tee.pipe(cacheStream) - // TODO if the cache write fails, log a warning but return the response anyway - // eslint-disable-next-line promise/catch-or-return - cacheStream.promise().then(cacheWriteResolve, cacheWriteReject) - body.unshift(tee) - body.unshift(this.response.body) - } - - body.once('resume', onResume) - body.once('end', () => body.removeListener('resume', onResume)) - } else { - await cacache.index.insert(this.options.cachePath, this.key, null, cacheOpts) - } - - // note: we do not set the x-local-cache-hash header because we do not know - // the hash value until after the write to the cache completes, which doesn't - // happen until after the response has been sent and it's too late to write - // the header anyway - this.response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) - this.response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) - this.response.headers.set('x-local-cache-mode', 'stream') - this.response.headers.set('x-local-cache-status', status) - this.response.headers.set('x-local-cache-time', new Date().toISOString()) - const newResponse = new Response(body, { - url: this.response.url, - status: this.response.status, - headers: this.response.headers, - counter: this.options.counter, - }) - return newResponse - } - - // use the cached data to create a response and return it - async respond (method, options, status) { - let response - if (method === 'HEAD' || [301, 308].includes(this.response.status)) { - // if the request is a HEAD, or the response is a redirect, - // then the metadata in the entry already includes everything - // we need to build a response - response = this.response - } else { - // we're responding with a full cached response, so create a body - // that reads from cacache and attach it to a new Response - const body = new Minipass() - const headers = { ...this.policy.responseHeaders() } - - const onResume = () => { - const cacheStream = cacache.get.stream.byDigest( - this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } - ) - cacheStream.on('error', async (err) => { - cacheStream.pause() - if (err.code === 'EINTEGRITY') { - await cacache.rm.content( - this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } - ) - } - if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') { - await CacheEntry.invalidate(this.request, this.options) - } - body.emit('error', err) - cacheStream.resume() - }) - // emit the integrity and size events based on our metadata so we're consistent - body.emit('integrity', this.entry.integrity) - body.emit('size', Number(headers['content-length'])) - cacheStream.pipe(body) - } - - body.once('resume', onResume) - body.once('end', () => body.removeListener('resume', onResume)) - response = new Response(body, { - url: this.entry.metadata.url, - counter: options.counter, - status: 200, - headers, - }) - } - - response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) - response.headers.set('x-local-cache-hash', encodeURIComponent(this.entry.integrity)) - response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) - response.headers.set('x-local-cache-mode', 'stream') - response.headers.set('x-local-cache-status', status) - response.headers.set('x-local-cache-time', new Date(this.entry.metadata.time).toUTCString()) - return response - } - - // use the provided request along with this cache entry to - // revalidate the stored response. returns a response, either - // from the cache or from the update - async revalidate (request, options) { - const revalidateRequest = new Request(request, { - headers: this.policy.revalidationHeaders(request), - }) - - try { - // NOTE: be sure to remove the headers property from the - // user supplied options, since we have already defined - // them on the new request object. if they're still in the - // options then those will overwrite the ones from the policy - var response = await remote(revalidateRequest, { - ...options, - headers: undefined, - }) - } catch (err) { - // if the network fetch fails, return the stale - // cached response unless it has a cache-control - // of 'must-revalidate' - if (!this.policy.mustRevalidate) { - return this.respond(request.method, options, 'stale') - } - - throw err - } - - if (this.policy.revalidated(revalidateRequest, response)) { - // we got a 304, write a new index to the cache and respond from cache - const metadata = getMetadata(request, response, options) - // 304 responses do not include headers that are specific to the response data - // since they do not include a body, so we copy values for headers that were - // in the old cache entry to the new one, if the new metadata does not already - // include that header - for (const name of KEEP_RESPONSE_HEADERS) { - if ( - !hasOwnProperty(metadata.resHeaders, name) && - hasOwnProperty(this.entry.metadata.resHeaders, name) - ) { - metadata.resHeaders[name] = this.entry.metadata.resHeaders[name] - } - } - - for (const name of options.cacheAdditionalHeaders) { - const inMeta = hasOwnProperty(metadata.resHeaders, name) - const inEntry = hasOwnProperty(this.entry.metadata.resHeaders, name) - const inPolicy = hasOwnProperty(this.policy.response.headers, name) - - // if the header is in the existing entry, but it is not in the metadata - // then we need to write it to the metadata as this will refresh the on-disk cache - if (!inMeta && inEntry) { - metadata.resHeaders[name] = this.entry.metadata.resHeaders[name] - } - // if the header is in the metadata, but not in the policy, then we need to set - // it in the policy so that it's included in the immediate response. future - // responses will load a new cache entry, so we don't need to change that - if (!inPolicy && inMeta) { - this.policy.response.headers[name] = metadata.resHeaders[name] - } - } - - try { - await cacache.index.insert(options.cachePath, this.key, this.entry.integrity, { - size: this.entry.size, - metadata, - }) - } catch (err) { - // if updating the cache index fails, we ignore it and - // respond anyway - } - return this.respond(request.method, options, 'revalidated') - } - - // if we got a modified response, create a new entry based on it - const newEntry = new CacheEntry({ - request, - response, - options, - }) - - // respond with the new entry while writing it to the cache - return newEntry.store('updated') - } -} - -module.exports = CacheEntry diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/errors.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/errors.js deleted file mode 100644 index 67a66573bebe6..0000000000000 --- a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/errors.js +++ /dev/null @@ -1,11 +0,0 @@ -class NotCachedError extends Error { - constructor (url) { - /* eslint-disable-next-line max-len */ - super(`request to ${url} failed: cache mode is 'only-if-cached' but no cached response is available.`) - this.code = 'ENOTCACHED' - } -} - -module.exports = { - NotCachedError, -} diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/index.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/index.js deleted file mode 100644 index 0de49d23fb933..0000000000000 --- a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/index.js +++ /dev/null @@ -1,49 +0,0 @@ -const { NotCachedError } = require('./errors.js') -const CacheEntry = require('./entry.js') -const remote = require('../remote.js') - -// do whatever is necessary to get a Response and return it -const cacheFetch = async (request, options) => { - // try to find a cached entry that satisfies this request - const entry = await CacheEntry.find(request, options) - if (!entry) { - // no cached result, if the cache mode is 'only-if-cached' that's a failure - if (options.cache === 'only-if-cached') { - throw new NotCachedError(request.url) - } - - // otherwise, we make a request, store it and return it - const response = await remote(request, options) - const newEntry = new CacheEntry({ request, response, options }) - return newEntry.store('miss') - } - - // we have a cached response that satisfies this request, however if the cache - // mode is 'no-cache' then we send the revalidation request no matter what - if (options.cache === 'no-cache') { - return entry.revalidate(request, options) - } - - // if the cached entry is not stale, or if the cache mode is 'force-cache' or - // 'only-if-cached' we can respond with the cached entry. set the status - // based on the result of needsRevalidation and respond - const _needsRevalidation = entry.policy.needsRevalidation(request) - if (options.cache === 'force-cache' || - options.cache === 'only-if-cached' || - !_needsRevalidation) { - return entry.respond(request.method, options, _needsRevalidation ? 'stale' : 'hit') - } - - // if we got here, the cache entry is stale so revalidate it - return entry.revalidate(request, options) -} - -cacheFetch.invalidate = async (request, options) => { - if (!options.cachePath) { - return - } - - return CacheEntry.invalidate(request, options) -} - -module.exports = cacheFetch diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/key.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/key.js deleted file mode 100644 index f7684d562b7fa..0000000000000 --- a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/key.js +++ /dev/null @@ -1,17 +0,0 @@ -const { URL, format } = require('url') - -// options passed to url.format() when generating a key -const formatOptions = { - auth: false, - fragment: false, - search: true, - unicode: false, -} - -// returns a string to be used as the cache key for the Request -const cacheKey = (request) => { - const parsed = new URL(request.url) - return `make-fetch-happen:request-cache:${format(parsed, formatOptions)}` -} - -module.exports = cacheKey diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/policy.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/policy.js deleted file mode 100644 index ada3c8600dae9..0000000000000 --- a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/policy.js +++ /dev/null @@ -1,161 +0,0 @@ -const CacheSemantics = require('http-cache-semantics') -const Negotiator = require('negotiator') -const ssri = require('ssri') - -// options passed to http-cache-semantics constructor -const policyOptions = { - shared: false, - ignoreCargoCult: true, -} - -// a fake empty response, used when only testing the -// request for storability -const emptyResponse = { status: 200, headers: {} } - -// returns a plain object representation of the Request -const requestObject = (request) => { - const _obj = { - method: request.method, - url: request.url, - headers: {}, - compress: request.compress, - } - - request.headers.forEach((value, key) => { - _obj.headers[key] = value - }) - - return _obj -} - -// returns a plain object representation of the Response -const responseObject = (response) => { - const _obj = { - status: response.status, - headers: {}, - } - - response.headers.forEach((value, key) => { - _obj.headers[key] = value - }) - - return _obj -} - -class CachePolicy { - constructor ({ entry, request, response, options }) { - this.entry = entry - this.request = requestObject(request) - this.response = responseObject(response) - this.options = options - this.policy = new CacheSemantics(this.request, this.response, policyOptions) - - if (this.entry) { - // if we have an entry, copy the timestamp to the _responseTime - // this is necessary because the CacheSemantics constructor forces - // the value to Date.now() which means a policy created from a - // cache entry is likely to always identify itself as stale - this.policy._responseTime = this.entry.metadata.time - } - } - - // static method to quickly determine if a request alone is storable - static storable (request, options) { - // no cachePath means no caching - if (!options.cachePath) { - return false - } - - // user explicitly asked not to cache - if (options.cache === 'no-store') { - return false - } - - // we only cache GET and HEAD requests - if (!['GET', 'HEAD'].includes(request.method)) { - return false - } - - // otherwise, let http-cache-semantics make the decision - // based on the request's headers - const policy = new CacheSemantics(requestObject(request), emptyResponse, policyOptions) - return policy.storable() - } - - // returns true if the policy satisfies the request - satisfies (request) { - const _req = requestObject(request) - if (this.request.headers.host !== _req.headers.host) { - return false - } - - if (this.request.compress !== _req.compress) { - return false - } - - const negotiatorA = new Negotiator(this.request) - const negotiatorB = new Negotiator(_req) - - if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes())) { - return false - } - - if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages())) { - return false - } - - if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings())) { - return false - } - - if (this.options.integrity) { - return ssri.parse(this.options.integrity).match(this.entry.integrity) - } - - return true - } - - // returns true if the request and response allow caching - storable () { - return this.policy.storable() - } - - // NOTE: this is a hack to avoid parsing the cache-control - // header ourselves, it returns true if the response's - // cache-control contains must-revalidate - get mustRevalidate () { - return !!this.policy._rescc['must-revalidate'] - } - - // returns true if the cached response requires revalidation - // for the given request - needsRevalidation (request) { - const _req = requestObject(request) - // force method to GET because we only cache GETs - // but can serve a HEAD from a cached GET - _req.method = 'GET' - return !this.policy.satisfiesWithoutRevalidation(_req) - } - - responseHeaders () { - return this.policy.responseHeaders() - } - - // returns a new object containing the appropriate headers - // to send a revalidation request - revalidationHeaders (request) { - const _req = requestObject(request) - return this.policy.revalidationHeaders(_req) - } - - // returns true if the request/response was revalidated - // successfully. returns false if a new response was received - revalidated (request, response) { - const _req = requestObject(request) - const _res = responseObject(response) - const policy = this.policy.revalidatedPolicy(_req, _res) - return !policy.modified - } -} - -module.exports = CachePolicy diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/fetch.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/fetch.js deleted file mode 100644 index 233ba67e16550..0000000000000 --- a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/fetch.js +++ /dev/null @@ -1,118 +0,0 @@ -'use strict' - -const { FetchError, Request, isRedirect } = require('minipass-fetch') -const url = require('url') - -const CachePolicy = require('./cache/policy.js') -const cache = require('./cache/index.js') -const remote = require('./remote.js') - -// given a Request, a Response and user options -// return true if the response is a redirect that -// can be followed. we throw errors that will result -// in the fetch being rejected if the redirect is -// possible but invalid for some reason -const canFollowRedirect = (request, response, options) => { - if (!isRedirect(response.status)) { - return false - } - - if (options.redirect === 'manual') { - return false - } - - if (options.redirect === 'error') { - throw new FetchError(`redirect mode is set to error: ${request.url}`, - 'no-redirect', { code: 'ENOREDIRECT' }) - } - - if (!response.headers.has('location')) { - throw new FetchError(`redirect location header missing for: ${request.url}`, - 'no-location', { code: 'EINVALIDREDIRECT' }) - } - - if (request.counter >= request.follow) { - throw new FetchError(`maximum redirect reached at: ${request.url}`, - 'max-redirect', { code: 'EMAXREDIRECT' }) - } - - return true -} - -// given a Request, a Response, and the user's options return an object -// with a new Request and a new options object that will be used for -// following the redirect -const getRedirect = (request, response, options) => { - const _opts = { ...options } - const location = response.headers.get('location') - const redirectUrl = new url.URL(location, /^https?:/.test(location) ? undefined : request.url) - // Comment below is used under the following license: - /** - * @license - * Copyright (c) 2010-2012 Mikeal Rogers - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an "AS - * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language - * governing permissions and limitations under the License. - */ - - // Remove authorization if changing hostnames (but not if just - // changing ports or protocols). This matches the behavior of request: - // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138 - if (new url.URL(request.url).hostname !== redirectUrl.hostname) { - request.headers.delete('authorization') - request.headers.delete('cookie') - } - - // for POST request with 301/302 response, or any request with 303 response, - // use GET when following redirect - if ( - response.status === 303 || - (request.method === 'POST' && [301, 302].includes(response.status)) - ) { - _opts.method = 'GET' - _opts.body = null - request.headers.delete('content-length') - } - - _opts.headers = {} - request.headers.forEach((value, key) => { - _opts.headers[key] = value - }) - - _opts.counter = ++request.counter - const redirectReq = new Request(url.format(redirectUrl), _opts) - return { - request: redirectReq, - options: _opts, - } -} - -const fetch = async (request, options) => { - const response = CachePolicy.storable(request, options) - ? await cache(request, options) - : await remote(request, options) - - // if the request wasn't a GET or HEAD, and the response - // status is between 200 and 399 inclusive, invalidate the - // request url - if (!['GET', 'HEAD'].includes(request.method) && - response.status >= 200 && - response.status <= 399) { - await cache.invalidate(request, options) - } - - if (!canFollowRedirect(request, response, options)) { - return response - } - - const redirect = getRedirect(request, response, options) - return fetch(redirect.request, redirect.options) -} - -module.exports = fetch diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/index.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/index.js deleted file mode 100644 index 2f12e8e1b6113..0000000000000 --- a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/index.js +++ /dev/null @@ -1,41 +0,0 @@ -const { FetchError, Headers, Request, Response } = require('minipass-fetch') - -const configureOptions = require('./options.js') -const fetch = require('./fetch.js') - -const makeFetchHappen = (url, opts) => { - const options = configureOptions(opts) - - const request = new Request(url, options) - return fetch(request, options) -} - -makeFetchHappen.defaults = (defaultUrl, defaultOptions = {}, wrappedFetch = makeFetchHappen) => { - if (typeof defaultUrl === 'object') { - defaultOptions = defaultUrl - defaultUrl = null - } - - const defaultedFetch = (url, options = {}) => { - const finalUrl = url || defaultUrl - const finalOptions = { - ...defaultOptions, - ...options, - headers: { - ...defaultOptions.headers, - ...options.headers, - }, - } - return wrappedFetch(finalUrl, finalOptions) - } - - defaultedFetch.defaults = (defaultUrl1, defaultOptions1 = {}) => - makeFetchHappen.defaults(defaultUrl1, defaultOptions1, defaultedFetch) - return defaultedFetch -} - -module.exports = makeFetchHappen -module.exports.FetchError = FetchError -module.exports.Headers = Headers -module.exports.Request = Request -module.exports.Response = Response diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/options.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/options.js deleted file mode 100644 index db51cc6324817..0000000000000 --- a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/options.js +++ /dev/null @@ -1,59 +0,0 @@ -const dns = require('dns') - -const conditionalHeaders = [ - 'if-modified-since', - 'if-none-match', - 'if-unmodified-since', - 'if-match', - 'if-range', -] - -const configureOptions = (opts) => { - const { strictSSL, ...options } = { ...opts } - options.method = options.method ? options.method.toUpperCase() : 'GET' - - if (strictSSL === undefined || strictSSL === null) { - options.rejectUnauthorized = process.env.NODE_TLS_REJECT_UNAUTHORIZED !== '0' - } else { - options.rejectUnauthorized = strictSSL !== false - } - - if (!options.retry) { - options.retry = { retries: 0 } - } else if (typeof options.retry === 'string') { - const retries = parseInt(options.retry, 10) - if (isFinite(retries)) { - options.retry = { retries } - } else { - options.retry = { retries: 0 } - } - } else if (typeof options.retry === 'number') { - options.retry = { retries: options.retry } - } else { - options.retry = { retries: 0, ...options.retry } - } - - options.dns = { ttl: 5 * 60 * 1000, lookup: dns.lookup, ...options.dns } - - options.cache = options.cache || 'default' - if (options.cache === 'default') { - const hasConditionalHeader = Object.keys(options.headers || {}).some((name) => { - return conditionalHeaders.includes(name.toLowerCase()) - }) - if (hasConditionalHeader) { - options.cache = 'no-store' - } - } - - options.cacheAdditionalHeaders = options.cacheAdditionalHeaders || [] - - // cacheManager is deprecated, but if it's set and - // cachePath is not we should copy it to the new field - if (options.cacheManager && !options.cachePath) { - options.cachePath = options.cacheManager - } - - return options -} - -module.exports = configureOptions diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/pipeline.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/pipeline.js deleted file mode 100644 index b1d221b2d0ce3..0000000000000 --- a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/pipeline.js +++ /dev/null @@ -1,41 +0,0 @@ -'use strict' - -const MinipassPipeline = require('minipass-pipeline') - -class CachingMinipassPipeline extends MinipassPipeline { - #events = [] - #data = new Map() - - constructor (opts, ...streams) { - // CRITICAL: do NOT pass the streams to the call to super(), this will start - // the flow of data and potentially cause the events we need to catch to emit - // before we've finished our own setup. instead we call super() with no args, - // finish our setup, and then push the streams into ourselves to start the - // data flow - super() - this.#events = opts.events - - /* istanbul ignore next - coverage disabled because this is pointless to test here */ - if (streams.length) { - this.push(...streams) - } - } - - on (event, handler) { - if (this.#events.includes(event) && this.#data.has(event)) { - return handler(...this.#data.get(event)) - } - - return super.on(event, handler) - } - - emit (event, ...data) { - if (this.#events.includes(event)) { - this.#data.set(event, data) - } - - return super.emit(event, ...data) - } -} - -module.exports = CachingMinipassPipeline diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/remote.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/remote.js deleted file mode 100644 index 1d640e5380baa..0000000000000 --- a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/remote.js +++ /dev/null @@ -1,132 +0,0 @@ -const { Minipass } = require('minipass') -const fetch = require('minipass-fetch') -const promiseRetry = require('promise-retry') -const ssri = require('ssri') -const { log } = require('proc-log') - -const CachingMinipassPipeline = require('./pipeline.js') -const { getAgent } = require('@npmcli/agent') -const pkg = require('../package.json') - -const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})` - -const RETRY_ERRORS = [ - 'ECONNRESET', // remote socket closed on us - 'ECONNREFUSED', // remote host refused to open connection - 'EADDRINUSE', // failed to bind to a local port (proxy?) - 'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW - // from @npmcli/agent - 'ECONNECTIONTIMEOUT', - 'EIDLETIMEOUT', - 'ERESPONSETIMEOUT', - 'ETRANSFERTIMEOUT', - // Known codes we do NOT retry on: - // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline) - // EINVALIDPROXY // invalid protocol from @npmcli/agent - // EINVALIDRESPONSE // invalid status code from @npmcli/agent -] - -const RETRY_TYPES = [ - 'request-timeout', -] - -// make a request directly to the remote source, -// retrying certain classes of errors as well as -// following redirects (through the cache if necessary) -// and verifying response integrity -const remoteFetch = (request, options) => { - // options.signal is intended for the fetch itself, not the agent. Attaching it to the agent will re-use that signal across multiple requests, which prevents any connections beyond the first one. - const agent = getAgent(request.url, { ...options, signal: undefined }) - if (!request.headers.has('connection')) { - request.headers.set('connection', agent ? 'keep-alive' : 'close') - } - - if (!request.headers.has('user-agent')) { - request.headers.set('user-agent', USER_AGENT) - } - - // keep our own options since we're overriding the agent - // and the redirect mode - const _opts = { - ...options, - agent, - redirect: 'manual', - } - - return promiseRetry(async (retryHandler, attemptNum) => { - const req = new fetch.Request(request, _opts) - try { - let res = await fetch(req, _opts) - if (_opts.integrity && res.status === 200) { - // we got a 200 response and the user has specified an expected - // integrity value, so wrap the response in an ssri stream to verify it - const integrityStream = ssri.integrityStream({ - algorithms: _opts.algorithms, - integrity: _opts.integrity, - size: _opts.size, - }) - const pipeline = new CachingMinipassPipeline({ - events: ['integrity', 'size'], - }, res.body, integrityStream) - // we also propagate the integrity and size events out to the pipeline so we can use - // this new response body as an integrityEmitter for cacache - integrityStream.on('integrity', i => pipeline.emit('integrity', i)) - integrityStream.on('size', s => pipeline.emit('size', s)) - res = new fetch.Response(pipeline, res) - // set an explicit flag so we know if our response body will emit integrity and size - res.body.hasIntegrityEmitter = true - } - - res.headers.set('x-fetch-attempts', attemptNum) - - // do not retry POST requests, or requests with a streaming body - // do retry requests with a 408, 420, 429 or 500+ status in the response - const isStream = Minipass.isStream(req.body) - const isRetriable = req.method !== 'POST' && - !isStream && - ([408, 420, 429].includes(res.status) || res.status >= 500) - - if (isRetriable) { - if (typeof options.onRetry === 'function') { - options.onRetry(res) - } - - /* eslint-disable-next-line max-len */ - log.http('fetch', `${req.method} ${req.url} attempt ${attemptNum} failed with ${res.status}`) - return retryHandler(res) - } - - return res - } catch (err) { - const code = (err.code === 'EPROMISERETRY') - ? err.retried.code - : err.code - - // err.retried will be the thing that was thrown from above - // if it's a response, we just got a bad status code and we - // can re-throw to allow the retry - const isRetryError = err.retried instanceof fetch.Response || - (RETRY_ERRORS.includes(code) && RETRY_TYPES.includes(err.type)) - - if (req.method === 'POST' || isRetryError) { - throw err - } - - if (typeof options.onRetry === 'function') { - options.onRetry(err) - } - - log.http('fetch', `${req.method} ${req.url} attempt ${attemptNum} failed with ${err.code}`) - return retryHandler(err) - } - }, options.retry).catch((err) => { - // don't reject for http errors, just return them - if (err.status >= 400 && err.type !== 'system') { - return err - } - - throw err - }) -} - -module.exports = remoteFetch diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/package.json b/node_modules/node-gyp/node_modules/make-fetch-happen/package.json deleted file mode 100644 index 054fe841f13b7..0000000000000 --- a/node_modules/node-gyp/node_modules/make-fetch-happen/package.json +++ /dev/null @@ -1,74 +0,0 @@ -{ - "name": "make-fetch-happen", - "version": "14.0.3", - "description": "Opinionated, caching, retrying fetch client", - "main": "lib/index.js", - "files": [ - "bin/", - "lib/" - ], - "scripts": { - "test": "tap", - "posttest": "npm run lint", - "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "lint": "npm run eslint", - "lintfix": "npm run eslint -- --fix", - "postlint": "template-oss-check", - "snap": "tap", - "template-oss-apply": "template-oss-apply --force" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/npm/make-fetch-happen.git" - }, - "keywords": [ - "http", - "request", - "fetch", - "mean girls", - "caching", - "cache", - "subresource integrity" - ], - "author": "GitHub Inc.", - "license": "ISC", - "dependencies": { - "@npmcli/agent": "^3.0.0", - "cacache": "^19.0.1", - "http-cache-semantics": "^4.1.1", - "minipass": "^7.0.2", - "minipass-fetch": "^4.0.0", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "negotiator": "^1.0.0", - "proc-log": "^5.0.0", - "promise-retry": "^2.0.1", - "ssri": "^12.0.0" - }, - "devDependencies": { - "@npmcli/eslint-config": "^5.0.0", - "@npmcli/template-oss": "4.23.4", - "nock": "^13.2.4", - "safe-buffer": "^5.2.1", - "standard-version": "^9.3.2", - "tap": "^16.0.0" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - }, - "tap": { - "color": 1, - "files": "test/*.js", - "check-coverage": true, - "timeout": 60, - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.23.4", - "publish": "true" - } -} diff --git a/node_modules/node-gyp/node_modules/minimatch/LICENSE b/node_modules/node-gyp/node_modules/minimatch/LICENSE deleted file mode 100644 index 1493534e60dce..0000000000000 --- a/node_modules/node-gyp/node_modules/minimatch/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) 2011-2023 Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/node-gyp/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js b/node_modules/node-gyp/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js deleted file mode 100644 index 5fc86bbd0116c..0000000000000 --- a/node_modules/node-gyp/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js +++ /dev/null @@ -1,14 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.assertValidPattern = void 0; -const MAX_PATTERN_LENGTH = 1024 * 64; -const assertValidPattern = (pattern) => { - if (typeof pattern !== 'string') { - throw new TypeError('invalid pattern'); - } - if (pattern.length > MAX_PATTERN_LENGTH) { - throw new TypeError('pattern is too long'); - } -}; -exports.assertValidPattern = assertValidPattern; -//# sourceMappingURL=assert-valid-pattern.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/minimatch/dist/commonjs/ast.js b/node_modules/node-gyp/node_modules/minimatch/dist/commonjs/ast.js deleted file mode 100644 index 7b2109625eaeb..0000000000000 --- a/node_modules/node-gyp/node_modules/minimatch/dist/commonjs/ast.js +++ /dev/null @@ -1,592 +0,0 @@ -"use strict"; -// parse a single path portion -Object.defineProperty(exports, "__esModule", { value: true }); -exports.AST = void 0; -const brace_expressions_js_1 = require("./brace-expressions.js"); -const unescape_js_1 = require("./unescape.js"); -const types = new Set(['!', '?', '+', '*', '@']); -const isExtglobType = (c) => types.has(c); -// Patterns that get prepended to bind to the start of either the -// entire string, or just a single path portion, to prevent dots -// and/or traversal patterns, when needed. -// Exts don't need the ^ or / bit, because the root binds that already. -const startNoTraversal = '(?!(?:^|/)\\.\\.?(?:$|/))'; -const startNoDot = '(?!\\.)'; -// characters that indicate a start of pattern needs the "no dots" bit, -// because a dot *might* be matched. ( is not in the list, because in -// the case of a child extglob, it will handle the prevention itself. -const addPatternStart = new Set(['[', '.']); -// cases where traversal is A-OK, no dot prevention needed -const justDots = new Set(['..', '.']); -const reSpecials = new Set('().*{}+?[]^$\\!'); -const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&'); -// any single thing other than / -const qmark = '[^/]'; -// * => any number of characters -const star = qmark + '*?'; -// use + when we need to ensure that *something* matches, because the * is -// the only thing in the path portion. -const starNoEmpty = qmark + '+?'; -// remove the \ chars that we added if we end up doing a nonmagic compare -// const deslash = (s: string) => s.replace(/\\(.)/g, '$1') -class AST { - type; - #root; - #hasMagic; - #uflag = false; - #parts = []; - #parent; - #parentIndex; - #negs; - #filledNegs = false; - #options; - #toString; - // set to true if it's an extglob with no children - // (which really means one child of '') - #emptyExt = false; - constructor(type, parent, options = {}) { - this.type = type; - // extglobs are inherently magical - if (type) - this.#hasMagic = true; - this.#parent = parent; - this.#root = this.#parent ? this.#parent.#root : this; - this.#options = this.#root === this ? options : this.#root.#options; - this.#negs = this.#root === this ? [] : this.#root.#negs; - if (type === '!' && !this.#root.#filledNegs) - this.#negs.push(this); - this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0; - } - get hasMagic() { - /* c8 ignore start */ - if (this.#hasMagic !== undefined) - return this.#hasMagic; - /* c8 ignore stop */ - for (const p of this.#parts) { - if (typeof p === 'string') - continue; - if (p.type || p.hasMagic) - return (this.#hasMagic = true); - } - // note: will be undefined until we generate the regexp src and find out - return this.#hasMagic; - } - // reconstructs the pattern - toString() { - if (this.#toString !== undefined) - return this.#toString; - if (!this.type) { - return (this.#toString = this.#parts.map(p => String(p)).join('')); - } - else { - return (this.#toString = - this.type + '(' + this.#parts.map(p => String(p)).join('|') + ')'); - } - } - #fillNegs() { - /* c8 ignore start */ - if (this !== this.#root) - throw new Error('should only call on root'); - if (this.#filledNegs) - return this; - /* c8 ignore stop */ - // call toString() once to fill this out - this.toString(); - this.#filledNegs = true; - let n; - while ((n = this.#negs.pop())) { - if (n.type !== '!') - continue; - // walk up the tree, appending everthing that comes AFTER parentIndex - let p = n; - let pp = p.#parent; - while (pp) { - for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) { - for (const part of n.#parts) { - /* c8 ignore start */ - if (typeof part === 'string') { - throw new Error('string part in extglob AST??'); - } - /* c8 ignore stop */ - part.copyIn(pp.#parts[i]); - } - } - p = pp; - pp = p.#parent; - } - } - return this; - } - push(...parts) { - for (const p of parts) { - if (p === '') - continue; - /* c8 ignore start */ - if (typeof p !== 'string' && !(p instanceof AST && p.#parent === this)) { - throw new Error('invalid part: ' + p); - } - /* c8 ignore stop */ - this.#parts.push(p); - } - } - toJSON() { - const ret = this.type === null - ? this.#parts.slice().map(p => (typeof p === 'string' ? p : p.toJSON())) - : [this.type, ...this.#parts.map(p => p.toJSON())]; - if (this.isStart() && !this.type) - ret.unshift([]); - if (this.isEnd() && - (this === this.#root || - (this.#root.#filledNegs && this.#parent?.type === '!'))) { - ret.push({}); - } - return ret; - } - isStart() { - if (this.#root === this) - return true; - // if (this.type) return !!this.#parent?.isStart() - if (!this.#parent?.isStart()) - return false; - if (this.#parentIndex === 0) - return true; - // if everything AHEAD of this is a negation, then it's still the "start" - const p = this.#parent; - for (let i = 0; i < this.#parentIndex; i++) { - const pp = p.#parts[i]; - if (!(pp instanceof AST && pp.type === '!')) { - return false; - } - } - return true; - } - isEnd() { - if (this.#root === this) - return true; - if (this.#parent?.type === '!') - return true; - if (!this.#parent?.isEnd()) - return false; - if (!this.type) - return this.#parent?.isEnd(); - // if not root, it'll always have a parent - /* c8 ignore start */ - const pl = this.#parent ? this.#parent.#parts.length : 0; - /* c8 ignore stop */ - return this.#parentIndex === pl - 1; - } - copyIn(part) { - if (typeof part === 'string') - this.push(part); - else - this.push(part.clone(this)); - } - clone(parent) { - const c = new AST(this.type, parent); - for (const p of this.#parts) { - c.copyIn(p); - } - return c; - } - static #parseAST(str, ast, pos, opt) { - let escaping = false; - let inBrace = false; - let braceStart = -1; - let braceNeg = false; - if (ast.type === null) { - // outside of a extglob, append until we find a start - let i = pos; - let acc = ''; - while (i < str.length) { - const c = str.charAt(i++); - // still accumulate escapes at this point, but we do ignore - // starts that are escaped - if (escaping || c === '\\') { - escaping = !escaping; - acc += c; - continue; - } - if (inBrace) { - if (i === braceStart + 1) { - if (c === '^' || c === '!') { - braceNeg = true; - } - } - else if (c === ']' && !(i === braceStart + 2 && braceNeg)) { - inBrace = false; - } - acc += c; - continue; - } - else if (c === '[') { - inBrace = true; - braceStart = i; - braceNeg = false; - acc += c; - continue; - } - if (!opt.noext && isExtglobType(c) && str.charAt(i) === '(') { - ast.push(acc); - acc = ''; - const ext = new AST(c, ast); - i = AST.#parseAST(str, ext, i, opt); - ast.push(ext); - continue; - } - acc += c; - } - ast.push(acc); - return i; - } - // some kind of extglob, pos is at the ( - // find the next | or ) - let i = pos + 1; - let part = new AST(null, ast); - const parts = []; - let acc = ''; - while (i < str.length) { - const c = str.charAt(i++); - // still accumulate escapes at this point, but we do ignore - // starts that are escaped - if (escaping || c === '\\') { - escaping = !escaping; - acc += c; - continue; - } - if (inBrace) { - if (i === braceStart + 1) { - if (c === '^' || c === '!') { - braceNeg = true; - } - } - else if (c === ']' && !(i === braceStart + 2 && braceNeg)) { - inBrace = false; - } - acc += c; - continue; - } - else if (c === '[') { - inBrace = true; - braceStart = i; - braceNeg = false; - acc += c; - continue; - } - if (isExtglobType(c) && str.charAt(i) === '(') { - part.push(acc); - acc = ''; - const ext = new AST(c, part); - part.push(ext); - i = AST.#parseAST(str, ext, i, opt); - continue; - } - if (c === '|') { - part.push(acc); - acc = ''; - parts.push(part); - part = new AST(null, ast); - continue; - } - if (c === ')') { - if (acc === '' && ast.#parts.length === 0) { - ast.#emptyExt = true; - } - part.push(acc); - acc = ''; - ast.push(...parts, part); - return i; - } - acc += c; - } - // unfinished extglob - // if we got here, it was a malformed extglob! not an extglob, but - // maybe something else in there. - ast.type = null; - ast.#hasMagic = undefined; - ast.#parts = [str.substring(pos - 1)]; - return i; - } - static fromGlob(pattern, options = {}) { - const ast = new AST(null, undefined, options); - AST.#parseAST(pattern, ast, 0, options); - return ast; - } - // returns the regular expression if there's magic, or the unescaped - // string if not. - toMMPattern() { - // should only be called on root - /* c8 ignore start */ - if (this !== this.#root) - return this.#root.toMMPattern(); - /* c8 ignore stop */ - const glob = this.toString(); - const [re, body, hasMagic, uflag] = this.toRegExpSource(); - // if we're in nocase mode, and not nocaseMagicOnly, then we do - // still need a regular expression if we have to case-insensitively - // match capital/lowercase characters. - const anyMagic = hasMagic || - this.#hasMagic || - (this.#options.nocase && - !this.#options.nocaseMagicOnly && - glob.toUpperCase() !== glob.toLowerCase()); - if (!anyMagic) { - return body; - } - const flags = (this.#options.nocase ? 'i' : '') + (uflag ? 'u' : ''); - return Object.assign(new RegExp(`^${re}$`, flags), { - _src: re, - _glob: glob, - }); - } - get options() { - return this.#options; - } - // returns the string match, the regexp source, whether there's magic - // in the regexp (so a regular expression is required) and whether or - // not the uflag is needed for the regular expression (for posix classes) - // TODO: instead of injecting the start/end at this point, just return - // the BODY of the regexp, along with the start/end portions suitable - // for binding the start/end in either a joined full-path makeRe context - // (where we bind to (^|/), or a standalone matchPart context (where - // we bind to ^, and not /). Otherwise slashes get duped! - // - // In part-matching mode, the start is: - // - if not isStart: nothing - // - if traversal possible, but not allowed: ^(?!\.\.?$) - // - if dots allowed or not possible: ^ - // - if dots possible and not allowed: ^(?!\.) - // end is: - // - if not isEnd(): nothing - // - else: $ - // - // In full-path matching mode, we put the slash at the START of the - // pattern, so start is: - // - if first pattern: same as part-matching mode - // - if not isStart(): nothing - // - if traversal possible, but not allowed: /(?!\.\.?(?:$|/)) - // - if dots allowed or not possible: / - // - if dots possible and not allowed: /(?!\.) - // end is: - // - if last pattern, same as part-matching mode - // - else nothing - // - // Always put the (?:$|/) on negated tails, though, because that has to be - // there to bind the end of the negated pattern portion, and it's easier to - // just stick it in now rather than try to inject it later in the middle of - // the pattern. - // - // We can just always return the same end, and leave it up to the caller - // to know whether it's going to be used joined or in parts. - // And, if the start is adjusted slightly, can do the same there: - // - if not isStart: nothing - // - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$) - // - if dots allowed or not possible: (?:/|^) - // - if dots possible and not allowed: (?:/|^)(?!\.) - // - // But it's better to have a simpler binding without a conditional, for - // performance, so probably better to return both start options. - // - // Then the caller just ignores the end if it's not the first pattern, - // and the start always gets applied. - // - // But that's always going to be $ if it's the ending pattern, or nothing, - // so the caller can just attach $ at the end of the pattern when building. - // - // So the todo is: - // - better detect what kind of start is needed - // - return both flavors of starting pattern - // - attach $ at the end of the pattern when creating the actual RegExp - // - // Ah, but wait, no, that all only applies to the root when the first pattern - // is not an extglob. If the first pattern IS an extglob, then we need all - // that dot prevention biz to live in the extglob portions, because eg - // +(*|.x*) can match .xy but not .yx. - // - // So, return the two flavors if it's #root and the first child is not an - // AST, otherwise leave it to the child AST to handle it, and there, - // use the (?:^|/) style of start binding. - // - // Even simplified further: - // - Since the start for a join is eg /(?!\.) and the start for a part - // is ^(?!\.), we can just prepend (?!\.) to the pattern (either root - // or start or whatever) and prepend ^ or / at the Regexp construction. - toRegExpSource(allowDot) { - const dot = allowDot ?? !!this.#options.dot; - if (this.#root === this) - this.#fillNegs(); - if (!this.type) { - const noEmpty = this.isStart() && this.isEnd(); - const src = this.#parts - .map(p => { - const [re, _, hasMagic, uflag] = typeof p === 'string' - ? AST.#parseGlob(p, this.#hasMagic, noEmpty) - : p.toRegExpSource(allowDot); - this.#hasMagic = this.#hasMagic || hasMagic; - this.#uflag = this.#uflag || uflag; - return re; - }) - .join(''); - let start = ''; - if (this.isStart()) { - if (typeof this.#parts[0] === 'string') { - // this is the string that will match the start of the pattern, - // so we need to protect against dots and such. - // '.' and '..' cannot match unless the pattern is that exactly, - // even if it starts with . or dot:true is set. - const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]); - if (!dotTravAllowed) { - const aps = addPatternStart; - // check if we have a possibility of matching . or .., - // and prevent that. - const needNoTrav = - // dots are allowed, and the pattern starts with [ or . - (dot && aps.has(src.charAt(0))) || - // the pattern starts with \., and then [ or . - (src.startsWith('\\.') && aps.has(src.charAt(2))) || - // the pattern starts with \.\., and then [ or . - (src.startsWith('\\.\\.') && aps.has(src.charAt(4))); - // no need to prevent dots if it can't match a dot, or if a - // sub-pattern will be preventing it anyway. - const needNoDot = !dot && !allowDot && aps.has(src.charAt(0)); - start = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : ''; - } - } - } - // append the "end of path portion" pattern to negation tails - let end = ''; - if (this.isEnd() && - this.#root.#filledNegs && - this.#parent?.type === '!') { - end = '(?:$|\\/)'; - } - const final = start + src + end; - return [ - final, - (0, unescape_js_1.unescape)(src), - (this.#hasMagic = !!this.#hasMagic), - this.#uflag, - ]; - } - // We need to calculate the body *twice* if it's a repeat pattern - // at the start, once in nodot mode, then again in dot mode, so a - // pattern like *(?) can match 'x.y' - const repeated = this.type === '*' || this.type === '+'; - // some kind of extglob - const start = this.type === '!' ? '(?:(?!(?:' : '(?:'; - let body = this.#partsToRegExp(dot); - if (this.isStart() && this.isEnd() && !body && this.type !== '!') { - // invalid extglob, has to at least be *something* present, if it's - // the entire path portion. - const s = this.toString(); - this.#parts = [s]; - this.type = null; - this.#hasMagic = undefined; - return [s, (0, unescape_js_1.unescape)(this.toString()), false, false]; - } - // XXX abstract out this map method - let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot - ? '' - : this.#partsToRegExp(true); - if (bodyDotAllowed === body) { - bodyDotAllowed = ''; - } - if (bodyDotAllowed) { - body = `(?:${body})(?:${bodyDotAllowed})*?`; - } - // an empty !() is exactly equivalent to a starNoEmpty - let final = ''; - if (this.type === '!' && this.#emptyExt) { - final = (this.isStart() && !dot ? startNoDot : '') + starNoEmpty; - } - else { - const close = this.type === '!' - ? // !() must match something,but !(x) can match '' - '))' + - (this.isStart() && !dot && !allowDot ? startNoDot : '') + - star + - ')' - : this.type === '@' - ? ')' - : this.type === '?' - ? ')?' - : this.type === '+' && bodyDotAllowed - ? ')' - : this.type === '*' && bodyDotAllowed - ? `)?` - : `)${this.type}`; - final = start + body + close; - } - return [ - final, - (0, unescape_js_1.unescape)(body), - (this.#hasMagic = !!this.#hasMagic), - this.#uflag, - ]; - } - #partsToRegExp(dot) { - return this.#parts - .map(p => { - // extglob ASTs should only contain parent ASTs - /* c8 ignore start */ - if (typeof p === 'string') { - throw new Error('string type in extglob ast??'); - } - /* c8 ignore stop */ - // can ignore hasMagic, because extglobs are already always magic - const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot); - this.#uflag = this.#uflag || uflag; - return re; - }) - .filter(p => !(this.isStart() && this.isEnd()) || !!p) - .join('|'); - } - static #parseGlob(glob, hasMagic, noEmpty = false) { - let escaping = false; - let re = ''; - let uflag = false; - for (let i = 0; i < glob.length; i++) { - const c = glob.charAt(i); - if (escaping) { - escaping = false; - re += (reSpecials.has(c) ? '\\' : '') + c; - continue; - } - if (c === '\\') { - if (i === glob.length - 1) { - re += '\\\\'; - } - else { - escaping = true; - } - continue; - } - if (c === '[') { - const [src, needUflag, consumed, magic] = (0, brace_expressions_js_1.parseClass)(glob, i); - if (consumed) { - re += src; - uflag = uflag || needUflag; - i += consumed - 1; - hasMagic = hasMagic || magic; - continue; - } - } - if (c === '*') { - if (noEmpty && glob === '*') - re += starNoEmpty; - else - re += star; - hasMagic = true; - continue; - } - if (c === '?') { - re += qmark; - hasMagic = true; - continue; - } - re += regExpEscape(c); - } - return [re, (0, unescape_js_1.unescape)(glob), !!hasMagic, uflag]; - } -} -exports.AST = AST; -//# sourceMappingURL=ast.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/minimatch/dist/commonjs/brace-expressions.js b/node_modules/node-gyp/node_modules/minimatch/dist/commonjs/brace-expressions.js deleted file mode 100644 index 0e13eefc4cfee..0000000000000 --- a/node_modules/node-gyp/node_modules/minimatch/dist/commonjs/brace-expressions.js +++ /dev/null @@ -1,152 +0,0 @@ -"use strict"; -// translate the various posix character classes into unicode properties -// this works across all unicode locales -Object.defineProperty(exports, "__esModule", { value: true }); -exports.parseClass = void 0; -// { : [, /u flag required, negated] -const posixClasses = { - '[:alnum:]': ['\\p{L}\\p{Nl}\\p{Nd}', true], - '[:alpha:]': ['\\p{L}\\p{Nl}', true], - '[:ascii:]': ['\\x' + '00-\\x' + '7f', false], - '[:blank:]': ['\\p{Zs}\\t', true], - '[:cntrl:]': ['\\p{Cc}', true], - '[:digit:]': ['\\p{Nd}', true], - '[:graph:]': ['\\p{Z}\\p{C}', true, true], - '[:lower:]': ['\\p{Ll}', true], - '[:print:]': ['\\p{C}', true], - '[:punct:]': ['\\p{P}', true], - '[:space:]': ['\\p{Z}\\t\\r\\n\\v\\f', true], - '[:upper:]': ['\\p{Lu}', true], - '[:word:]': ['\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}', true], - '[:xdigit:]': ['A-Fa-f0-9', false], -}; -// only need to escape a few things inside of brace expressions -// escapes: [ \ ] - -const braceEscape = (s) => s.replace(/[[\]\\-]/g, '\\$&'); -// escape all regexp magic characters -const regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&'); -// everything has already been escaped, we just have to join -const rangesToString = (ranges) => ranges.join(''); -// takes a glob string at a posix brace expression, and returns -// an equivalent regular expression source, and boolean indicating -// whether the /u flag needs to be applied, and the number of chars -// consumed to parse the character class. -// This also removes out of order ranges, and returns ($.) if the -// entire class just no good. -const parseClass = (glob, position) => { - const pos = position; - /* c8 ignore start */ - if (glob.charAt(pos) !== '[') { - throw new Error('not in a brace expression'); - } - /* c8 ignore stop */ - const ranges = []; - const negs = []; - let i = pos + 1; - let sawStart = false; - let uflag = false; - let escaping = false; - let negate = false; - let endPos = pos; - let rangeStart = ''; - WHILE: while (i < glob.length) { - const c = glob.charAt(i); - if ((c === '!' || c === '^') && i === pos + 1) { - negate = true; - i++; - continue; - } - if (c === ']' && sawStart && !escaping) { - endPos = i + 1; - break; - } - sawStart = true; - if (c === '\\') { - if (!escaping) { - escaping = true; - i++; - continue; - } - // escaped \ char, fall through and treat like normal char - } - if (c === '[' && !escaping) { - // either a posix class, a collation equivalent, or just a [ - for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) { - if (glob.startsWith(cls, i)) { - // invalid, [a-[] is fine, but not [a-[:alpha]] - if (rangeStart) { - return ['$.', false, glob.length - pos, true]; - } - i += cls.length; - if (neg) - negs.push(unip); - else - ranges.push(unip); - uflag = uflag || u; - continue WHILE; - } - } - } - // now it's just a normal character, effectively - escaping = false; - if (rangeStart) { - // throw this range away if it's not valid, but others - // can still match. - if (c > rangeStart) { - ranges.push(braceEscape(rangeStart) + '-' + braceEscape(c)); - } - else if (c === rangeStart) { - ranges.push(braceEscape(c)); - } - rangeStart = ''; - i++; - continue; - } - // now might be the start of a range. - // can be either c-d or c-] or c] or c] at this point - if (glob.startsWith('-]', i + 1)) { - ranges.push(braceEscape(c + '-')); - i += 2; - continue; - } - if (glob.startsWith('-', i + 1)) { - rangeStart = c; - i += 2; - continue; - } - // not the start of a range, just a single character - ranges.push(braceEscape(c)); - i++; - } - if (endPos < i) { - // didn't see the end of the class, not a valid class, - // but might still be valid as a literal match. - return ['', false, 0, false]; - } - // if we got no ranges and no negates, then we have a range that - // cannot possibly match anything, and that poisons the whole glob - if (!ranges.length && !negs.length) { - return ['$.', false, glob.length - pos, true]; - } - // if we got one positive range, and it's a single character, then that's - // not actually a magic pattern, it's just that one literal character. - // we should not treat that as "magic", we should just return the literal - // character. [_] is a perfectly valid way to escape glob magic chars. - if (negs.length === 0 && - ranges.length === 1 && - /^\\?.$/.test(ranges[0]) && - !negate) { - const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0]; - return [regexpEscape(r), false, endPos - pos, false]; - } - const sranges = '[' + (negate ? '^' : '') + rangesToString(ranges) + ']'; - const snegs = '[' + (negate ? '' : '^') + rangesToString(negs) + ']'; - const comb = ranges.length && negs.length - ? '(' + sranges + '|' + snegs + ')' - : ranges.length - ? sranges - : snegs; - return [comb, uflag, endPos - pos, true]; -}; -exports.parseClass = parseClass; -//# sourceMappingURL=brace-expressions.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/minimatch/dist/commonjs/escape.js b/node_modules/node-gyp/node_modules/minimatch/dist/commonjs/escape.js deleted file mode 100644 index 02a4f8a8e0a58..0000000000000 --- a/node_modules/node-gyp/node_modules/minimatch/dist/commonjs/escape.js +++ /dev/null @@ -1,22 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.escape = void 0; -/** - * Escape all magic characters in a glob pattern. - * - * If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape} - * option is used, then characters are escaped by wrapping in `[]`, because - * a magic character wrapped in a character class can only be satisfied by - * that exact character. In this mode, `\` is _not_ escaped, because it is - * not interpreted as a magic character, but instead as a path separator. - */ -const escape = (s, { windowsPathsNoEscape = false, } = {}) => { - // don't need to escape +@! because we escape the parens - // that make those magic, and escaping ! as [!] isn't valid, - // because [!]] is a valid glob class meaning not ']'. - return windowsPathsNoEscape - ? s.replace(/[?*()[\]]/g, '[$&]') - : s.replace(/[?*()[\]\\]/g, '\\$&'); -}; -exports.escape = escape; -//# sourceMappingURL=escape.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/minimatch/dist/commonjs/index.js b/node_modules/node-gyp/node_modules/minimatch/dist/commonjs/index.js deleted file mode 100644 index 64a0f1f833222..0000000000000 --- a/node_modules/node-gyp/node_modules/minimatch/dist/commonjs/index.js +++ /dev/null @@ -1,1017 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.unescape = exports.escape = exports.AST = exports.Minimatch = exports.match = exports.makeRe = exports.braceExpand = exports.defaults = exports.filter = exports.GLOBSTAR = exports.sep = exports.minimatch = void 0; -const brace_expansion_1 = __importDefault(require("brace-expansion")); -const assert_valid_pattern_js_1 = require("./assert-valid-pattern.js"); -const ast_js_1 = require("./ast.js"); -const escape_js_1 = require("./escape.js"); -const unescape_js_1 = require("./unescape.js"); -const minimatch = (p, pattern, options = {}) => { - (0, assert_valid_pattern_js_1.assertValidPattern)(pattern); - // shortcut: comments match nothing. - if (!options.nocomment && pattern.charAt(0) === '#') { - return false; - } - return new Minimatch(pattern, options).match(p); -}; -exports.minimatch = minimatch; -// Optimized checking for the most common glob patterns. -const starDotExtRE = /^\*+([^+@!?\*\[\(]*)$/; -const starDotExtTest = (ext) => (f) => !f.startsWith('.') && f.endsWith(ext); -const starDotExtTestDot = (ext) => (f) => f.endsWith(ext); -const starDotExtTestNocase = (ext) => { - ext = ext.toLowerCase(); - return (f) => !f.startsWith('.') && f.toLowerCase().endsWith(ext); -}; -const starDotExtTestNocaseDot = (ext) => { - ext = ext.toLowerCase(); - return (f) => f.toLowerCase().endsWith(ext); -}; -const starDotStarRE = /^\*+\.\*+$/; -const starDotStarTest = (f) => !f.startsWith('.') && f.includes('.'); -const starDotStarTestDot = (f) => f !== '.' && f !== '..' && f.includes('.'); -const dotStarRE = /^\.\*+$/; -const dotStarTest = (f) => f !== '.' && f !== '..' && f.startsWith('.'); -const starRE = /^\*+$/; -const starTest = (f) => f.length !== 0 && !f.startsWith('.'); -const starTestDot = (f) => f.length !== 0 && f !== '.' && f !== '..'; -const qmarksRE = /^\?+([^+@!?\*\[\(]*)?$/; -const qmarksTestNocase = ([$0, ext = '']) => { - const noext = qmarksTestNoExt([$0]); - if (!ext) - return noext; - ext = ext.toLowerCase(); - return (f) => noext(f) && f.toLowerCase().endsWith(ext); -}; -const qmarksTestNocaseDot = ([$0, ext = '']) => { - const noext = qmarksTestNoExtDot([$0]); - if (!ext) - return noext; - ext = ext.toLowerCase(); - return (f) => noext(f) && f.toLowerCase().endsWith(ext); -}; -const qmarksTestDot = ([$0, ext = '']) => { - const noext = qmarksTestNoExtDot([$0]); - return !ext ? noext : (f) => noext(f) && f.endsWith(ext); -}; -const qmarksTest = ([$0, ext = '']) => { - const noext = qmarksTestNoExt([$0]); - return !ext ? noext : (f) => noext(f) && f.endsWith(ext); -}; -const qmarksTestNoExt = ([$0]) => { - const len = $0.length; - return (f) => f.length === len && !f.startsWith('.'); -}; -const qmarksTestNoExtDot = ([$0]) => { - const len = $0.length; - return (f) => f.length === len && f !== '.' && f !== '..'; -}; -/* c8 ignore start */ -const defaultPlatform = (typeof process === 'object' && process - ? (typeof process.env === 'object' && - process.env && - process.env.__MINIMATCH_TESTING_PLATFORM__) || - process.platform - : 'posix'); -const path = { - win32: { sep: '\\' }, - posix: { sep: '/' }, -}; -/* c8 ignore stop */ -exports.sep = defaultPlatform === 'win32' ? path.win32.sep : path.posix.sep; -exports.minimatch.sep = exports.sep; -exports.GLOBSTAR = Symbol('globstar **'); -exports.minimatch.GLOBSTAR = exports.GLOBSTAR; -// any single thing other than / -// don't need to escape / when using new RegExp() -const qmark = '[^/]'; -// * => any number of characters -const star = qmark + '*?'; -// ** when dots are allowed. Anything goes, except .. and . -// not (^ or / followed by one or two dots followed by $ or /), -// followed by anything, any number of times. -const twoStarDot = '(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?'; -// not a ^ or / followed by a dot, -// followed by anything, any number of times. -const twoStarNoDot = '(?:(?!(?:\\/|^)\\.).)*?'; -const filter = (pattern, options = {}) => (p) => (0, exports.minimatch)(p, pattern, options); -exports.filter = filter; -exports.minimatch.filter = exports.filter; -const ext = (a, b = {}) => Object.assign({}, a, b); -const defaults = (def) => { - if (!def || typeof def !== 'object' || !Object.keys(def).length) { - return exports.minimatch; - } - const orig = exports.minimatch; - const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options)); - return Object.assign(m, { - Minimatch: class Minimatch extends orig.Minimatch { - constructor(pattern, options = {}) { - super(pattern, ext(def, options)); - } - static defaults(options) { - return orig.defaults(ext(def, options)).Minimatch; - } - }, - AST: class AST extends orig.AST { - /* c8 ignore start */ - constructor(type, parent, options = {}) { - super(type, parent, ext(def, options)); - } - /* c8 ignore stop */ - static fromGlob(pattern, options = {}) { - return orig.AST.fromGlob(pattern, ext(def, options)); - } - }, - unescape: (s, options = {}) => orig.unescape(s, ext(def, options)), - escape: (s, options = {}) => orig.escape(s, ext(def, options)), - filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)), - defaults: (options) => orig.defaults(ext(def, options)), - makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)), - braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)), - match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)), - sep: orig.sep, - GLOBSTAR: exports.GLOBSTAR, - }); -}; -exports.defaults = defaults; -exports.minimatch.defaults = exports.defaults; -// Brace expansion: -// a{b,c}d -> abd acd -// a{b,}c -> abc ac -// a{0..3}d -> a0d a1d a2d a3d -// a{b,c{d,e}f}g -> abg acdfg acefg -// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg -// -// Invalid sets are not expanded. -// a{2..}b -> a{2..}b -// a{b}c -> a{b}c -const braceExpand = (pattern, options = {}) => { - (0, assert_valid_pattern_js_1.assertValidPattern)(pattern); - // Thanks to Yeting Li for - // improving this regexp to avoid a ReDOS vulnerability. - if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) { - // shortcut. no need to expand. - return [pattern]; - } - return (0, brace_expansion_1.default)(pattern); -}; -exports.braceExpand = braceExpand; -exports.minimatch.braceExpand = exports.braceExpand; -// parse a component of the expanded set. -// At this point, no pattern may contain "/" in it -// so we're going to return a 2d array, where each entry is the full -// pattern, split on '/', and then turned into a regular expression. -// A regexp is made at the end which joins each array with an -// escaped /, and another full one which joins each regexp with |. -// -// Following the lead of Bash 4.1, note that "**" only has special meaning -// when it is the *only* thing in a path portion. Otherwise, any series -// of * is equivalent to a single *. Globstar behavior is enabled by -// default, and can be disabled by setting options.noglobstar. -const makeRe = (pattern, options = {}) => new Minimatch(pattern, options).makeRe(); -exports.makeRe = makeRe; -exports.minimatch.makeRe = exports.makeRe; -const match = (list, pattern, options = {}) => { - const mm = new Minimatch(pattern, options); - list = list.filter(f => mm.match(f)); - if (mm.options.nonull && !list.length) { - list.push(pattern); - } - return list; -}; -exports.match = match; -exports.minimatch.match = exports.match; -// replace stuff like \* with * -const globMagic = /[?*]|[+@!]\(.*?\)|\[|\]/; -const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&'); -class Minimatch { - options; - set; - pattern; - windowsPathsNoEscape; - nonegate; - negate; - comment; - empty; - preserveMultipleSlashes; - partial; - globSet; - globParts; - nocase; - isWindows; - platform; - windowsNoMagicRoot; - regexp; - constructor(pattern, options = {}) { - (0, assert_valid_pattern_js_1.assertValidPattern)(pattern); - options = options || {}; - this.options = options; - this.pattern = pattern; - this.platform = options.platform || defaultPlatform; - this.isWindows = this.platform === 'win32'; - this.windowsPathsNoEscape = - !!options.windowsPathsNoEscape || options.allowWindowsEscape === false; - if (this.windowsPathsNoEscape) { - this.pattern = this.pattern.replace(/\\/g, '/'); - } - this.preserveMultipleSlashes = !!options.preserveMultipleSlashes; - this.regexp = null; - this.negate = false; - this.nonegate = !!options.nonegate; - this.comment = false; - this.empty = false; - this.partial = !!options.partial; - this.nocase = !!this.options.nocase; - this.windowsNoMagicRoot = - options.windowsNoMagicRoot !== undefined - ? options.windowsNoMagicRoot - : !!(this.isWindows && this.nocase); - this.globSet = []; - this.globParts = []; - this.set = []; - // make the set of regexps etc. - this.make(); - } - hasMagic() { - if (this.options.magicalBraces && this.set.length > 1) { - return true; - } - for (const pattern of this.set) { - for (const part of pattern) { - if (typeof part !== 'string') - return true; - } - } - return false; - } - debug(..._) { } - make() { - const pattern = this.pattern; - const options = this.options; - // empty patterns and comments match nothing. - if (!options.nocomment && pattern.charAt(0) === '#') { - this.comment = true; - return; - } - if (!pattern) { - this.empty = true; - return; - } - // step 1: figure out negation, etc. - this.parseNegate(); - // step 2: expand braces - this.globSet = [...new Set(this.braceExpand())]; - if (options.debug) { - this.debug = (...args) => console.error(...args); - } - this.debug(this.pattern, this.globSet); - // step 3: now we have a set, so turn each one into a series of - // path-portion matching patterns. - // These will be regexps, except in the case of "**", which is - // set to the GLOBSTAR object for globstar behavior, - // and will not contain any / characters - // - // First, we preprocess to make the glob pattern sets a bit simpler - // and deduped. There are some perf-killing patterns that can cause - // problems with a glob walk, but we can simplify them down a bit. - const rawGlobParts = this.globSet.map(s => this.slashSplit(s)); - this.globParts = this.preprocess(rawGlobParts); - this.debug(this.pattern, this.globParts); - // glob --> regexps - let set = this.globParts.map((s, _, __) => { - if (this.isWindows && this.windowsNoMagicRoot) { - // check if it's a drive or unc path. - const isUNC = s[0] === '' && - s[1] === '' && - (s[2] === '?' || !globMagic.test(s[2])) && - !globMagic.test(s[3]); - const isDrive = /^[a-z]:/i.test(s[0]); - if (isUNC) { - return [...s.slice(0, 4), ...s.slice(4).map(ss => this.parse(ss))]; - } - else if (isDrive) { - return [s[0], ...s.slice(1).map(ss => this.parse(ss))]; - } - } - return s.map(ss => this.parse(ss)); - }); - this.debug(this.pattern, set); - // filter out everything that didn't compile properly. - this.set = set.filter(s => s.indexOf(false) === -1); - // do not treat the ? in UNC paths as magic - if (this.isWindows) { - for (let i = 0; i < this.set.length; i++) { - const p = this.set[i]; - if (p[0] === '' && - p[1] === '' && - this.globParts[i][2] === '?' && - typeof p[3] === 'string' && - /^[a-z]:$/i.test(p[3])) { - p[2] = '?'; - } - } - } - this.debug(this.pattern, this.set); - } - // various transforms to equivalent pattern sets that are - // faster to process in a filesystem walk. The goal is to - // eliminate what we can, and push all ** patterns as far - // to the right as possible, even if it increases the number - // of patterns that we have to process. - preprocess(globParts) { - // if we're not in globstar mode, then turn all ** into * - if (this.options.noglobstar) { - for (let i = 0; i < globParts.length; i++) { - for (let j = 0; j < globParts[i].length; j++) { - if (globParts[i][j] === '**') { - globParts[i][j] = '*'; - } - } - } - } - const { optimizationLevel = 1 } = this.options; - if (optimizationLevel >= 2) { - // aggressive optimization for the purpose of fs walking - globParts = this.firstPhasePreProcess(globParts); - globParts = this.secondPhasePreProcess(globParts); - } - else if (optimizationLevel >= 1) { - // just basic optimizations to remove some .. parts - globParts = this.levelOneOptimize(globParts); - } - else { - // just collapse multiple ** portions into one - globParts = this.adjascentGlobstarOptimize(globParts); - } - return globParts; - } - // just get rid of adjascent ** portions - adjascentGlobstarOptimize(globParts) { - return globParts.map(parts => { - let gs = -1; - while (-1 !== (gs = parts.indexOf('**', gs + 1))) { - let i = gs; - while (parts[i + 1] === '**') { - i++; - } - if (i !== gs) { - parts.splice(gs, i - gs); - } - } - return parts; - }); - } - // get rid of adjascent ** and resolve .. portions - levelOneOptimize(globParts) { - return globParts.map(parts => { - parts = parts.reduce((set, part) => { - const prev = set[set.length - 1]; - if (part === '**' && prev === '**') { - return set; - } - if (part === '..') { - if (prev && prev !== '..' && prev !== '.' && prev !== '**') { - set.pop(); - return set; - } - } - set.push(part); - return set; - }, []); - return parts.length === 0 ? [''] : parts; - }); - } - levelTwoFileOptimize(parts) { - if (!Array.isArray(parts)) { - parts = this.slashSplit(parts); - } - let didSomething = false; - do { - didSomething = false; - //
// -> 
/
-            if (!this.preserveMultipleSlashes) {
-                for (let i = 1; i < parts.length - 1; i++) {
-                    const p = parts[i];
-                    // don't squeeze out UNC patterns
-                    if (i === 1 && p === '' && parts[0] === '')
-                        continue;
-                    if (p === '.' || p === '') {
-                        didSomething = true;
-                        parts.splice(i, 1);
-                        i--;
-                    }
-                }
-                if (parts[0] === '.' &&
-                    parts.length === 2 &&
-                    (parts[1] === '.' || parts[1] === '')) {
-                    didSomething = true;
-                    parts.pop();
-                }
-            }
-            // 
/

/../ ->

/
-            let dd = 0;
-            while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
-                const p = parts[dd - 1];
-                if (p && p !== '.' && p !== '..' && p !== '**') {
-                    didSomething = true;
-                    parts.splice(dd - 1, 2);
-                    dd -= 2;
-                }
-            }
-        } while (didSomething);
-        return parts.length === 0 ? [''] : parts;
-    }
-    // First phase: single-pattern processing
-    // 
 is 1 or more portions
-    //  is 1 or more portions
-    // 

is any portion other than ., .., '', or ** - // is . or '' - // - // **/.. is *brutal* for filesystem walking performance, because - // it effectively resets the recursive walk each time it occurs, - // and ** cannot be reduced out by a .. pattern part like a regexp - // or most strings (other than .., ., and '') can be. - // - //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/} - //

// -> 
/
-    // 
/

/../ ->

/
-    // **/**/ -> **/
-    //
-    // **/*/ -> */**/ <== not valid because ** doesn't follow
-    // this WOULD be allowed if ** did follow symlinks, or * didn't
-    firstPhasePreProcess(globParts) {
-        let didSomething = false;
-        do {
-            didSomething = false;
-            // 
/**/../

/

/ -> {

/../

/

/,

/**/

/

/} - for (let parts of globParts) { - let gs = -1; - while (-1 !== (gs = parts.indexOf('**', gs + 1))) { - let gss = gs; - while (parts[gss + 1] === '**') { - //

/**/**/ -> 
/**/
-                        gss++;
-                    }
-                    // eg, if gs is 2 and gss is 4, that means we have 3 **
-                    // parts, and can remove 2 of them.
-                    if (gss > gs) {
-                        parts.splice(gs + 1, gss - gs);
-                    }
-                    let next = parts[gs + 1];
-                    const p = parts[gs + 2];
-                    const p2 = parts[gs + 3];
-                    if (next !== '..')
-                        continue;
-                    if (!p ||
-                        p === '.' ||
-                        p === '..' ||
-                        !p2 ||
-                        p2 === '.' ||
-                        p2 === '..') {
-                        continue;
-                    }
-                    didSomething = true;
-                    // edit parts in place, and push the new one
-                    parts.splice(gs, 1);
-                    const other = parts.slice(0);
-                    other[gs] = '**';
-                    globParts.push(other);
-                    gs--;
-                }
-                // 
// -> 
/
-                if (!this.preserveMultipleSlashes) {
-                    for (let i = 1; i < parts.length - 1; i++) {
-                        const p = parts[i];
-                        // don't squeeze out UNC patterns
-                        if (i === 1 && p === '' && parts[0] === '')
-                            continue;
-                        if (p === '.' || p === '') {
-                            didSomething = true;
-                            parts.splice(i, 1);
-                            i--;
-                        }
-                    }
-                    if (parts[0] === '.' &&
-                        parts.length === 2 &&
-                        (parts[1] === '.' || parts[1] === '')) {
-                        didSomething = true;
-                        parts.pop();
-                    }
-                }
-                // 
/

/../ ->

/
-                let dd = 0;
-                while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
-                    const p = parts[dd - 1];
-                    if (p && p !== '.' && p !== '..' && p !== '**') {
-                        didSomething = true;
-                        const needDot = dd === 1 && parts[dd + 1] === '**';
-                        const splin = needDot ? ['.'] : [];
-                        parts.splice(dd - 1, 2, ...splin);
-                        if (parts.length === 0)
-                            parts.push('');
-                        dd -= 2;
-                    }
-                }
-            }
-        } while (didSomething);
-        return globParts;
-    }
-    // second phase: multi-pattern dedupes
-    // {
/*/,
/

/} ->

/*/
-    // {
/,
/} -> 
/
-    // {
/**/,
/} -> 
/**/
-    //
-    // {
/**/,
/**/

/} ->

/**/
-    // ^-- not valid because ** doens't follow symlinks
-    secondPhasePreProcess(globParts) {
-        for (let i = 0; i < globParts.length - 1; i++) {
-            for (let j = i + 1; j < globParts.length; j++) {
-                const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);
-                if (matched) {
-                    globParts[i] = [];
-                    globParts[j] = matched;
-                    break;
-                }
-            }
-        }
-        return globParts.filter(gs => gs.length);
-    }
-    partsMatch(a, b, emptyGSMatch = false) {
-        let ai = 0;
-        let bi = 0;
-        let result = [];
-        let which = '';
-        while (ai < a.length && bi < b.length) {
-            if (a[ai] === b[bi]) {
-                result.push(which === 'b' ? b[bi] : a[ai]);
-                ai++;
-                bi++;
-            }
-            else if (emptyGSMatch && a[ai] === '**' && b[bi] === a[ai + 1]) {
-                result.push(a[ai]);
-                ai++;
-            }
-            else if (emptyGSMatch && b[bi] === '**' && a[ai] === b[bi + 1]) {
-                result.push(b[bi]);
-                bi++;
-            }
-            else if (a[ai] === '*' &&
-                b[bi] &&
-                (this.options.dot || !b[bi].startsWith('.')) &&
-                b[bi] !== '**') {
-                if (which === 'b')
-                    return false;
-                which = 'a';
-                result.push(a[ai]);
-                ai++;
-                bi++;
-            }
-            else if (b[bi] === '*' &&
-                a[ai] &&
-                (this.options.dot || !a[ai].startsWith('.')) &&
-                a[ai] !== '**') {
-                if (which === 'a')
-                    return false;
-                which = 'b';
-                result.push(b[bi]);
-                ai++;
-                bi++;
-            }
-            else {
-                return false;
-            }
-        }
-        // if we fall out of the loop, it means they two are identical
-        // as long as their lengths match
-        return a.length === b.length && result;
-    }
-    parseNegate() {
-        if (this.nonegate)
-            return;
-        const pattern = this.pattern;
-        let negate = false;
-        let negateOffset = 0;
-        for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {
-            negate = !negate;
-            negateOffset++;
-        }
-        if (negateOffset)
-            this.pattern = pattern.slice(negateOffset);
-        this.negate = negate;
-    }
-    // set partial to true to test if, for example,
-    // "/a/b" matches the start of "/*/b/*/d"
-    // Partial means, if you run out of file before you run
-    // out of pattern, then that's fine, as long as all
-    // the parts match.
-    matchOne(file, pattern, partial = false) {
-        const options = this.options;
-        // UNC paths like //?/X:/... can match X:/... and vice versa
-        // Drive letters in absolute drive or unc paths are always compared
-        // case-insensitively.
-        if (this.isWindows) {
-            const fileDrive = typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0]);
-            const fileUNC = !fileDrive &&
-                file[0] === '' &&
-                file[1] === '' &&
-                file[2] === '?' &&
-                /^[a-z]:$/i.test(file[3]);
-            const patternDrive = typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0]);
-            const patternUNC = !patternDrive &&
-                pattern[0] === '' &&
-                pattern[1] === '' &&
-                pattern[2] === '?' &&
-                typeof pattern[3] === 'string' &&
-                /^[a-z]:$/i.test(pattern[3]);
-            const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined;
-            const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined;
-            if (typeof fdi === 'number' && typeof pdi === 'number') {
-                const [fd, pd] = [file[fdi], pattern[pdi]];
-                if (fd.toLowerCase() === pd.toLowerCase()) {
-                    pattern[pdi] = fd;
-                    if (pdi > fdi) {
-                        pattern = pattern.slice(pdi);
-                    }
-                    else if (fdi > pdi) {
-                        file = file.slice(fdi);
-                    }
-                }
-            }
-        }
-        // resolve and reduce . and .. portions in the file as well.
-        // dont' need to do the second phase, because it's only one string[]
-        const { optimizationLevel = 1 } = this.options;
-        if (optimizationLevel >= 2) {
-            file = this.levelTwoFileOptimize(file);
-        }
-        this.debug('matchOne', this, { file, pattern });
-        this.debug('matchOne', file.length, pattern.length);
-        for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
-            this.debug('matchOne loop');
-            var p = pattern[pi];
-            var f = file[fi];
-            this.debug(pattern, p, f);
-            // should be impossible.
-            // some invalid regexp stuff in the set.
-            /* c8 ignore start */
-            if (p === false) {
-                return false;
-            }
-            /* c8 ignore stop */
-            if (p === exports.GLOBSTAR) {
-                this.debug('GLOBSTAR', [pattern, p, f]);
-                // "**"
-                // a/**/b/**/c would match the following:
-                // a/b/x/y/z/c
-                // a/x/y/z/b/c
-                // a/b/x/b/x/c
-                // a/b/c
-                // To do this, take the rest of the pattern after
-                // the **, and see if it would match the file remainder.
-                // If so, return success.
-                // If not, the ** "swallows" a segment, and try again.
-                // This is recursively awful.
-                //
-                // a/**/b/**/c matching a/b/x/y/z/c
-                // - a matches a
-                // - doublestar
-                //   - matchOne(b/x/y/z/c, b/**/c)
-                //     - b matches b
-                //     - doublestar
-                //       - matchOne(x/y/z/c, c) -> no
-                //       - matchOne(y/z/c, c) -> no
-                //       - matchOne(z/c, c) -> no
-                //       - matchOne(c, c) yes, hit
-                var fr = fi;
-                var pr = pi + 1;
-                if (pr === pl) {
-                    this.debug('** at the end');
-                    // a ** at the end will just swallow the rest.
-                    // We have found a match.
-                    // however, it will not swallow /.x, unless
-                    // options.dot is set.
-                    // . and .. are *never* matched by **, for explosively
-                    // exponential reasons.
-                    for (; fi < fl; fi++) {
-                        if (file[fi] === '.' ||
-                            file[fi] === '..' ||
-                            (!options.dot && file[fi].charAt(0) === '.'))
-                            return false;
-                    }
-                    return true;
-                }
-                // ok, let's see if we can swallow whatever we can.
-                while (fr < fl) {
-                    var swallowee = file[fr];
-                    this.debug('\nglobstar while', file, fr, pattern, pr, swallowee);
-                    // XXX remove this slice.  Just pass the start index.
-                    if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
-                        this.debug('globstar found match!', fr, fl, swallowee);
-                        // found a match.
-                        return true;
-                    }
-                    else {
-                        // can't swallow "." or ".." ever.
-                        // can only swallow ".foo" when explicitly asked.
-                        if (swallowee === '.' ||
-                            swallowee === '..' ||
-                            (!options.dot && swallowee.charAt(0) === '.')) {
-                            this.debug('dot detected!', file, fr, pattern, pr);
-                            break;
-                        }
-                        // ** swallows a segment, and continue.
-                        this.debug('globstar swallow a segment, and continue');
-                        fr++;
-                    }
-                }
-                // no match was found.
-                // However, in partial mode, we can't say this is necessarily over.
-                /* c8 ignore start */
-                if (partial) {
-                    // ran out of file
-                    this.debug('\n>>> no match, partial?', file, fr, pattern, pr);
-                    if (fr === fl) {
-                        return true;
-                    }
-                }
-                /* c8 ignore stop */
-                return false;
-            }
-            // something other than **
-            // non-magic patterns just have to match exactly
-            // patterns with magic have been turned into regexps.
-            let hit;
-            if (typeof p === 'string') {
-                hit = f === p;
-                this.debug('string match', p, f, hit);
-            }
-            else {
-                hit = p.test(f);
-                this.debug('pattern match', p, f, hit);
-            }
-            if (!hit)
-                return false;
-        }
-        // Note: ending in / means that we'll get a final ""
-        // at the end of the pattern.  This can only match a
-        // corresponding "" at the end of the file.
-        // If the file ends in /, then it can only match a
-        // a pattern that ends in /, unless the pattern just
-        // doesn't have any more for it. But, a/b/ should *not*
-        // match "a/b/*", even though "" matches against the
-        // [^/]*? pattern, except in partial mode, where it might
-        // simply not be reached yet.
-        // However, a/b/ should still satisfy a/*
-        // now either we fell off the end of the pattern, or we're done.
-        if (fi === fl && pi === pl) {
-            // ran out of pattern and filename at the same time.
-            // an exact hit!
-            return true;
-        }
-        else if (fi === fl) {
-            // ran out of file, but still had pattern left.
-            // this is ok if we're doing the match as part of
-            // a glob fs traversal.
-            return partial;
-        }
-        else if (pi === pl) {
-            // ran out of pattern, still have file left.
-            // this is only acceptable if we're on the very last
-            // empty segment of a file with a trailing slash.
-            // a/* should match a/b/
-            return fi === fl - 1 && file[fi] === '';
-            /* c8 ignore start */
-        }
-        else {
-            // should be unreachable.
-            throw new Error('wtf?');
-        }
-        /* c8 ignore stop */
-    }
-    braceExpand() {
-        return (0, exports.braceExpand)(this.pattern, this.options);
-    }
-    parse(pattern) {
-        (0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
-        const options = this.options;
-        // shortcuts
-        if (pattern === '**')
-            return exports.GLOBSTAR;
-        if (pattern === '')
-            return '';
-        // far and away, the most common glob pattern parts are
-        // *, *.*, and *.  Add a fast check method for those.
-        let m;
-        let fastTest = null;
-        if ((m = pattern.match(starRE))) {
-            fastTest = options.dot ? starTestDot : starTest;
-        }
-        else if ((m = pattern.match(starDotExtRE))) {
-            fastTest = (options.nocase
-                ? options.dot
-                    ? starDotExtTestNocaseDot
-                    : starDotExtTestNocase
-                : options.dot
-                    ? starDotExtTestDot
-                    : starDotExtTest)(m[1]);
-        }
-        else if ((m = pattern.match(qmarksRE))) {
-            fastTest = (options.nocase
-                ? options.dot
-                    ? qmarksTestNocaseDot
-                    : qmarksTestNocase
-                : options.dot
-                    ? qmarksTestDot
-                    : qmarksTest)(m);
-        }
-        else if ((m = pattern.match(starDotStarRE))) {
-            fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
-        }
-        else if ((m = pattern.match(dotStarRE))) {
-            fastTest = dotStarTest;
-        }
-        const re = ast_js_1.AST.fromGlob(pattern, this.options).toMMPattern();
-        if (fastTest && typeof re === 'object') {
-            // Avoids overriding in frozen environments
-            Reflect.defineProperty(re, 'test', { value: fastTest });
-        }
-        return re;
-    }
-    makeRe() {
-        if (this.regexp || this.regexp === false)
-            return this.regexp;
-        // at this point, this.set is a 2d array of partial
-        // pattern strings, or "**".
-        //
-        // It's better to use .match().  This function shouldn't
-        // be used, really, but it's pretty convenient sometimes,
-        // when you just want to work with a regex.
-        const set = this.set;
-        if (!set.length) {
-            this.regexp = false;
-            return this.regexp;
-        }
-        const options = this.options;
-        const twoStar = options.noglobstar
-            ? star
-            : options.dot
-                ? twoStarDot
-                : twoStarNoDot;
-        const flags = new Set(options.nocase ? ['i'] : []);
-        // regexpify non-globstar patterns
-        // if ** is only item, then we just do one twoStar
-        // if ** is first, and there are more, prepend (\/|twoStar\/)? to next
-        // if ** is last, append (\/twoStar|) to previous
-        // if ** is in the middle, append (\/|\/twoStar\/) to previous
-        // then filter out GLOBSTAR symbols
-        let re = set
-            .map(pattern => {
-            const pp = pattern.map(p => {
-                if (p instanceof RegExp) {
-                    for (const f of p.flags.split(''))
-                        flags.add(f);
-                }
-                return typeof p === 'string'
-                    ? regExpEscape(p)
-                    : p === exports.GLOBSTAR
-                        ? exports.GLOBSTAR
-                        : p._src;
-            });
-            pp.forEach((p, i) => {
-                const next = pp[i + 1];
-                const prev = pp[i - 1];
-                if (p !== exports.GLOBSTAR || prev === exports.GLOBSTAR) {
-                    return;
-                }
-                if (prev === undefined) {
-                    if (next !== undefined && next !== exports.GLOBSTAR) {
-                        pp[i + 1] = '(?:\\/|' + twoStar + '\\/)?' + next;
-                    }
-                    else {
-                        pp[i] = twoStar;
-                    }
-                }
-                else if (next === undefined) {
-                    pp[i - 1] = prev + '(?:\\/|' + twoStar + ')?';
-                }
-                else if (next !== exports.GLOBSTAR) {
-                    pp[i - 1] = prev + '(?:\\/|\\/' + twoStar + '\\/)' + next;
-                    pp[i + 1] = exports.GLOBSTAR;
-                }
-            });
-            return pp.filter(p => p !== exports.GLOBSTAR).join('/');
-        })
-            .join('|');
-        // need to wrap in parens if we had more than one thing with |,
-        // otherwise only the first will be anchored to ^ and the last to $
-        const [open, close] = set.length > 1 ? ['(?:', ')'] : ['', ''];
-        // must match entire pattern
-        // ending in a * or ** will make it less strict.
-        re = '^' + open + re + close + '$';
-        // can match anything, as long as it's not this.
-        if (this.negate)
-            re = '^(?!' + re + ').+$';
-        try {
-            this.regexp = new RegExp(re, [...flags].join(''));
-            /* c8 ignore start */
-        }
-        catch (ex) {
-            // should be impossible
-            this.regexp = false;
-        }
-        /* c8 ignore stop */
-        return this.regexp;
-    }
-    slashSplit(p) {
-        // if p starts with // on windows, we preserve that
-        // so that UNC paths aren't broken.  Otherwise, any number of
-        // / characters are coalesced into one, unless
-        // preserveMultipleSlashes is set to true.
-        if (this.preserveMultipleSlashes) {
-            return p.split('/');
-        }
-        else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
-            // add an extra '' for the one we lose
-            return ['', ...p.split(/\/+/)];
-        }
-        else {
-            return p.split(/\/+/);
-        }
-    }
-    match(f, partial = this.partial) {
-        this.debug('match', f, this.pattern);
-        // short-circuit in the case of busted things.
-        // comments, etc.
-        if (this.comment) {
-            return false;
-        }
-        if (this.empty) {
-            return f === '';
-        }
-        if (f === '/' && partial) {
-            return true;
-        }
-        const options = this.options;
-        // windows: need to use /, not \
-        if (this.isWindows) {
-            f = f.split('\\').join('/');
-        }
-        // treat the test path as a set of pathparts.
-        const ff = this.slashSplit(f);
-        this.debug(this.pattern, 'split', ff);
-        // just ONE of the pattern sets in this.set needs to match
-        // in order for it to be valid.  If negating, then just one
-        // match means that we have failed.
-        // Either way, return on the first hit.
-        const set = this.set;
-        this.debug(this.pattern, 'set', set);
-        // Find the basename of the path by looking for the last non-empty segment
-        let filename = ff[ff.length - 1];
-        if (!filename) {
-            for (let i = ff.length - 2; !filename && i >= 0; i--) {
-                filename = ff[i];
-            }
-        }
-        for (let i = 0; i < set.length; i++) {
-            const pattern = set[i];
-            let file = ff;
-            if (options.matchBase && pattern.length === 1) {
-                file = [filename];
-            }
-            const hit = this.matchOne(file, pattern, partial);
-            if (hit) {
-                if (options.flipNegate) {
-                    return true;
-                }
-                return !this.negate;
-            }
-        }
-        // didn't get any hits.  this is success if it's a negative
-        // pattern, failure otherwise.
-        if (options.flipNegate) {
-            return false;
-        }
-        return this.negate;
-    }
-    static defaults(def) {
-        return exports.minimatch.defaults(def).Minimatch;
-    }
-}
-exports.Minimatch = Minimatch;
-/* c8 ignore start */
-var ast_js_2 = require("./ast.js");
-Object.defineProperty(exports, "AST", { enumerable: true, get: function () { return ast_js_2.AST; } });
-var escape_js_2 = require("./escape.js");
-Object.defineProperty(exports, "escape", { enumerable: true, get: function () { return escape_js_2.escape; } });
-var unescape_js_2 = require("./unescape.js");
-Object.defineProperty(exports, "unescape", { enumerable: true, get: function () { return unescape_js_2.unescape; } });
-/* c8 ignore stop */
-exports.minimatch.AST = ast_js_1.AST;
-exports.minimatch.Minimatch = Minimatch;
-exports.minimatch.escape = escape_js_1.escape;
-exports.minimatch.unescape = unescape_js_1.unescape;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/minimatch/dist/commonjs/package.json b/node_modules/node-gyp/node_modules/minimatch/dist/commonjs/package.json
deleted file mode 100644
index 5bbefffbabee3..0000000000000
--- a/node_modules/node-gyp/node_modules/minimatch/dist/commonjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "commonjs"
-}
diff --git a/node_modules/node-gyp/node_modules/minimatch/dist/commonjs/unescape.js b/node_modules/node-gyp/node_modules/minimatch/dist/commonjs/unescape.js
deleted file mode 100644
index 47c36bcee5a02..0000000000000
--- a/node_modules/node-gyp/node_modules/minimatch/dist/commonjs/unescape.js
+++ /dev/null
@@ -1,24 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.unescape = void 0;
-/**
- * Un-escape a string that has been escaped with {@link escape}.
- *
- * If the {@link windowsPathsNoEscape} option is used, then square-brace
- * escapes are removed, but not backslash escapes.  For example, it will turn
- * the string `'[*]'` into `*`, but it will not turn `'\\*'` into `'*'`,
- * becuase `\` is a path separator in `windowsPathsNoEscape` mode.
- *
- * When `windowsPathsNoEscape` is not set, then both brace escapes and
- * backslash escapes are removed.
- *
- * Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot be escaped
- * or unescaped.
- */
-const unescape = (s, { windowsPathsNoEscape = false, } = {}) => {
-    return windowsPathsNoEscape
-        ? s.replace(/\[([^\/\\])\]/g, '$1')
-        : s.replace(/((?!\\).|^)\[([^\/\\])\]/g, '$1$2').replace(/\\([^\/])/g, '$1');
-};
-exports.unescape = unescape;
-//# sourceMappingURL=unescape.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/minimatch/dist/esm/assert-valid-pattern.js b/node_modules/node-gyp/node_modules/minimatch/dist/esm/assert-valid-pattern.js
deleted file mode 100644
index 7b534fc30200b..0000000000000
--- a/node_modules/node-gyp/node_modules/minimatch/dist/esm/assert-valid-pattern.js
+++ /dev/null
@@ -1,10 +0,0 @@
-const MAX_PATTERN_LENGTH = 1024 * 64;
-export const assertValidPattern = (pattern) => {
-    if (typeof pattern !== 'string') {
-        throw new TypeError('invalid pattern');
-    }
-    if (pattern.length > MAX_PATTERN_LENGTH) {
-        throw new TypeError('pattern is too long');
-    }
-};
-//# sourceMappingURL=assert-valid-pattern.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/minimatch/dist/esm/ast.js b/node_modules/node-gyp/node_modules/minimatch/dist/esm/ast.js
deleted file mode 100644
index 2d2bced6533de..0000000000000
--- a/node_modules/node-gyp/node_modules/minimatch/dist/esm/ast.js
+++ /dev/null
@@ -1,588 +0,0 @@
-// parse a single path portion
-import { parseClass } from './brace-expressions.js';
-import { unescape } from './unescape.js';
-const types = new Set(['!', '?', '+', '*', '@']);
-const isExtglobType = (c) => types.has(c);
-// Patterns that get prepended to bind to the start of either the
-// entire string, or just a single path portion, to prevent dots
-// and/or traversal patterns, when needed.
-// Exts don't need the ^ or / bit, because the root binds that already.
-const startNoTraversal = '(?!(?:^|/)\\.\\.?(?:$|/))';
-const startNoDot = '(?!\\.)';
-// characters that indicate a start of pattern needs the "no dots" bit,
-// because a dot *might* be matched. ( is not in the list, because in
-// the case of a child extglob, it will handle the prevention itself.
-const addPatternStart = new Set(['[', '.']);
-// cases where traversal is A-OK, no dot prevention needed
-const justDots = new Set(['..', '.']);
-const reSpecials = new Set('().*{}+?[]^$\\!');
-const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
-// any single thing other than /
-const qmark = '[^/]';
-// * => any number of characters
-const star = qmark + '*?';
-// use + when we need to ensure that *something* matches, because the * is
-// the only thing in the path portion.
-const starNoEmpty = qmark + '+?';
-// remove the \ chars that we added if we end up doing a nonmagic compare
-// const deslash = (s: string) => s.replace(/\\(.)/g, '$1')
-export class AST {
-    type;
-    #root;
-    #hasMagic;
-    #uflag = false;
-    #parts = [];
-    #parent;
-    #parentIndex;
-    #negs;
-    #filledNegs = false;
-    #options;
-    #toString;
-    // set to true if it's an extglob with no children
-    // (which really means one child of '')
-    #emptyExt = false;
-    constructor(type, parent, options = {}) {
-        this.type = type;
-        // extglobs are inherently magical
-        if (type)
-            this.#hasMagic = true;
-        this.#parent = parent;
-        this.#root = this.#parent ? this.#parent.#root : this;
-        this.#options = this.#root === this ? options : this.#root.#options;
-        this.#negs = this.#root === this ? [] : this.#root.#negs;
-        if (type === '!' && !this.#root.#filledNegs)
-            this.#negs.push(this);
-        this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0;
-    }
-    get hasMagic() {
-        /* c8 ignore start */
-        if (this.#hasMagic !== undefined)
-            return this.#hasMagic;
-        /* c8 ignore stop */
-        for (const p of this.#parts) {
-            if (typeof p === 'string')
-                continue;
-            if (p.type || p.hasMagic)
-                return (this.#hasMagic = true);
-        }
-        // note: will be undefined until we generate the regexp src and find out
-        return this.#hasMagic;
-    }
-    // reconstructs the pattern
-    toString() {
-        if (this.#toString !== undefined)
-            return this.#toString;
-        if (!this.type) {
-            return (this.#toString = this.#parts.map(p => String(p)).join(''));
-        }
-        else {
-            return (this.#toString =
-                this.type + '(' + this.#parts.map(p => String(p)).join('|') + ')');
-        }
-    }
-    #fillNegs() {
-        /* c8 ignore start */
-        if (this !== this.#root)
-            throw new Error('should only call on root');
-        if (this.#filledNegs)
-            return this;
-        /* c8 ignore stop */
-        // call toString() once to fill this out
-        this.toString();
-        this.#filledNegs = true;
-        let n;
-        while ((n = this.#negs.pop())) {
-            if (n.type !== '!')
-                continue;
-            // walk up the tree, appending everthing that comes AFTER parentIndex
-            let p = n;
-            let pp = p.#parent;
-            while (pp) {
-                for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) {
-                    for (const part of n.#parts) {
-                        /* c8 ignore start */
-                        if (typeof part === 'string') {
-                            throw new Error('string part in extglob AST??');
-                        }
-                        /* c8 ignore stop */
-                        part.copyIn(pp.#parts[i]);
-                    }
-                }
-                p = pp;
-                pp = p.#parent;
-            }
-        }
-        return this;
-    }
-    push(...parts) {
-        for (const p of parts) {
-            if (p === '')
-                continue;
-            /* c8 ignore start */
-            if (typeof p !== 'string' && !(p instanceof AST && p.#parent === this)) {
-                throw new Error('invalid part: ' + p);
-            }
-            /* c8 ignore stop */
-            this.#parts.push(p);
-        }
-    }
-    toJSON() {
-        const ret = this.type === null
-            ? this.#parts.slice().map(p => (typeof p === 'string' ? p : p.toJSON()))
-            : [this.type, ...this.#parts.map(p => p.toJSON())];
-        if (this.isStart() && !this.type)
-            ret.unshift([]);
-        if (this.isEnd() &&
-            (this === this.#root ||
-                (this.#root.#filledNegs && this.#parent?.type === '!'))) {
-            ret.push({});
-        }
-        return ret;
-    }
-    isStart() {
-        if (this.#root === this)
-            return true;
-        // if (this.type) return !!this.#parent?.isStart()
-        if (!this.#parent?.isStart())
-            return false;
-        if (this.#parentIndex === 0)
-            return true;
-        // if everything AHEAD of this is a negation, then it's still the "start"
-        const p = this.#parent;
-        for (let i = 0; i < this.#parentIndex; i++) {
-            const pp = p.#parts[i];
-            if (!(pp instanceof AST && pp.type === '!')) {
-                return false;
-            }
-        }
-        return true;
-    }
-    isEnd() {
-        if (this.#root === this)
-            return true;
-        if (this.#parent?.type === '!')
-            return true;
-        if (!this.#parent?.isEnd())
-            return false;
-        if (!this.type)
-            return this.#parent?.isEnd();
-        // if not root, it'll always have a parent
-        /* c8 ignore start */
-        const pl = this.#parent ? this.#parent.#parts.length : 0;
-        /* c8 ignore stop */
-        return this.#parentIndex === pl - 1;
-    }
-    copyIn(part) {
-        if (typeof part === 'string')
-            this.push(part);
-        else
-            this.push(part.clone(this));
-    }
-    clone(parent) {
-        const c = new AST(this.type, parent);
-        for (const p of this.#parts) {
-            c.copyIn(p);
-        }
-        return c;
-    }
-    static #parseAST(str, ast, pos, opt) {
-        let escaping = false;
-        let inBrace = false;
-        let braceStart = -1;
-        let braceNeg = false;
-        if (ast.type === null) {
-            // outside of a extglob, append until we find a start
-            let i = pos;
-            let acc = '';
-            while (i < str.length) {
-                const c = str.charAt(i++);
-                // still accumulate escapes at this point, but we do ignore
-                // starts that are escaped
-                if (escaping || c === '\\') {
-                    escaping = !escaping;
-                    acc += c;
-                    continue;
-                }
-                if (inBrace) {
-                    if (i === braceStart + 1) {
-                        if (c === '^' || c === '!') {
-                            braceNeg = true;
-                        }
-                    }
-                    else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
-                        inBrace = false;
-                    }
-                    acc += c;
-                    continue;
-                }
-                else if (c === '[') {
-                    inBrace = true;
-                    braceStart = i;
-                    braceNeg = false;
-                    acc += c;
-                    continue;
-                }
-                if (!opt.noext && isExtglobType(c) && str.charAt(i) === '(') {
-                    ast.push(acc);
-                    acc = '';
-                    const ext = new AST(c, ast);
-                    i = AST.#parseAST(str, ext, i, opt);
-                    ast.push(ext);
-                    continue;
-                }
-                acc += c;
-            }
-            ast.push(acc);
-            return i;
-        }
-        // some kind of extglob, pos is at the (
-        // find the next | or )
-        let i = pos + 1;
-        let part = new AST(null, ast);
-        const parts = [];
-        let acc = '';
-        while (i < str.length) {
-            const c = str.charAt(i++);
-            // still accumulate escapes at this point, but we do ignore
-            // starts that are escaped
-            if (escaping || c === '\\') {
-                escaping = !escaping;
-                acc += c;
-                continue;
-            }
-            if (inBrace) {
-                if (i === braceStart + 1) {
-                    if (c === '^' || c === '!') {
-                        braceNeg = true;
-                    }
-                }
-                else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
-                    inBrace = false;
-                }
-                acc += c;
-                continue;
-            }
-            else if (c === '[') {
-                inBrace = true;
-                braceStart = i;
-                braceNeg = false;
-                acc += c;
-                continue;
-            }
-            if (isExtglobType(c) && str.charAt(i) === '(') {
-                part.push(acc);
-                acc = '';
-                const ext = new AST(c, part);
-                part.push(ext);
-                i = AST.#parseAST(str, ext, i, opt);
-                continue;
-            }
-            if (c === '|') {
-                part.push(acc);
-                acc = '';
-                parts.push(part);
-                part = new AST(null, ast);
-                continue;
-            }
-            if (c === ')') {
-                if (acc === '' && ast.#parts.length === 0) {
-                    ast.#emptyExt = true;
-                }
-                part.push(acc);
-                acc = '';
-                ast.push(...parts, part);
-                return i;
-            }
-            acc += c;
-        }
-        // unfinished extglob
-        // if we got here, it was a malformed extglob! not an extglob, but
-        // maybe something else in there.
-        ast.type = null;
-        ast.#hasMagic = undefined;
-        ast.#parts = [str.substring(pos - 1)];
-        return i;
-    }
-    static fromGlob(pattern, options = {}) {
-        const ast = new AST(null, undefined, options);
-        AST.#parseAST(pattern, ast, 0, options);
-        return ast;
-    }
-    // returns the regular expression if there's magic, or the unescaped
-    // string if not.
-    toMMPattern() {
-        // should only be called on root
-        /* c8 ignore start */
-        if (this !== this.#root)
-            return this.#root.toMMPattern();
-        /* c8 ignore stop */
-        const glob = this.toString();
-        const [re, body, hasMagic, uflag] = this.toRegExpSource();
-        // if we're in nocase mode, and not nocaseMagicOnly, then we do
-        // still need a regular expression if we have to case-insensitively
-        // match capital/lowercase characters.
-        const anyMagic = hasMagic ||
-            this.#hasMagic ||
-            (this.#options.nocase &&
-                !this.#options.nocaseMagicOnly &&
-                glob.toUpperCase() !== glob.toLowerCase());
-        if (!anyMagic) {
-            return body;
-        }
-        const flags = (this.#options.nocase ? 'i' : '') + (uflag ? 'u' : '');
-        return Object.assign(new RegExp(`^${re}$`, flags), {
-            _src: re,
-            _glob: glob,
-        });
-    }
-    get options() {
-        return this.#options;
-    }
-    // returns the string match, the regexp source, whether there's magic
-    // in the regexp (so a regular expression is required) and whether or
-    // not the uflag is needed for the regular expression (for posix classes)
-    // TODO: instead of injecting the start/end at this point, just return
-    // the BODY of the regexp, along with the start/end portions suitable
-    // for binding the start/end in either a joined full-path makeRe context
-    // (where we bind to (^|/), or a standalone matchPart context (where
-    // we bind to ^, and not /).  Otherwise slashes get duped!
-    //
-    // In part-matching mode, the start is:
-    // - if not isStart: nothing
-    // - if traversal possible, but not allowed: ^(?!\.\.?$)
-    // - if dots allowed or not possible: ^
-    // - if dots possible and not allowed: ^(?!\.)
-    // end is:
-    // - if not isEnd(): nothing
-    // - else: $
-    //
-    // In full-path matching mode, we put the slash at the START of the
-    // pattern, so start is:
-    // - if first pattern: same as part-matching mode
-    // - if not isStart(): nothing
-    // - if traversal possible, but not allowed: /(?!\.\.?(?:$|/))
-    // - if dots allowed or not possible: /
-    // - if dots possible and not allowed: /(?!\.)
-    // end is:
-    // - if last pattern, same as part-matching mode
-    // - else nothing
-    //
-    // Always put the (?:$|/) on negated tails, though, because that has to be
-    // there to bind the end of the negated pattern portion, and it's easier to
-    // just stick it in now rather than try to inject it later in the middle of
-    // the pattern.
-    //
-    // We can just always return the same end, and leave it up to the caller
-    // to know whether it's going to be used joined or in parts.
-    // And, if the start is adjusted slightly, can do the same there:
-    // - if not isStart: nothing
-    // - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$)
-    // - if dots allowed or not possible: (?:/|^)
-    // - if dots possible and not allowed: (?:/|^)(?!\.)
-    //
-    // But it's better to have a simpler binding without a conditional, for
-    // performance, so probably better to return both start options.
-    //
-    // Then the caller just ignores the end if it's not the first pattern,
-    // and the start always gets applied.
-    //
-    // But that's always going to be $ if it's the ending pattern, or nothing,
-    // so the caller can just attach $ at the end of the pattern when building.
-    //
-    // So the todo is:
-    // - better detect what kind of start is needed
-    // - return both flavors of starting pattern
-    // - attach $ at the end of the pattern when creating the actual RegExp
-    //
-    // Ah, but wait, no, that all only applies to the root when the first pattern
-    // is not an extglob. If the first pattern IS an extglob, then we need all
-    // that dot prevention biz to live in the extglob portions, because eg
-    // +(*|.x*) can match .xy but not .yx.
-    //
-    // So, return the two flavors if it's #root and the first child is not an
-    // AST, otherwise leave it to the child AST to handle it, and there,
-    // use the (?:^|/) style of start binding.
-    //
-    // Even simplified further:
-    // - Since the start for a join is eg /(?!\.) and the start for a part
-    // is ^(?!\.), we can just prepend (?!\.) to the pattern (either root
-    // or start or whatever) and prepend ^ or / at the Regexp construction.
-    toRegExpSource(allowDot) {
-        const dot = allowDot ?? !!this.#options.dot;
-        if (this.#root === this)
-            this.#fillNegs();
-        if (!this.type) {
-            const noEmpty = this.isStart() && this.isEnd();
-            const src = this.#parts
-                .map(p => {
-                const [re, _, hasMagic, uflag] = typeof p === 'string'
-                    ? AST.#parseGlob(p, this.#hasMagic, noEmpty)
-                    : p.toRegExpSource(allowDot);
-                this.#hasMagic = this.#hasMagic || hasMagic;
-                this.#uflag = this.#uflag || uflag;
-                return re;
-            })
-                .join('');
-            let start = '';
-            if (this.isStart()) {
-                if (typeof this.#parts[0] === 'string') {
-                    // this is the string that will match the start of the pattern,
-                    // so we need to protect against dots and such.
-                    // '.' and '..' cannot match unless the pattern is that exactly,
-                    // even if it starts with . or dot:true is set.
-                    const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]);
-                    if (!dotTravAllowed) {
-                        const aps = addPatternStart;
-                        // check if we have a possibility of matching . or ..,
-                        // and prevent that.
-                        const needNoTrav = 
-                        // dots are allowed, and the pattern starts with [ or .
-                        (dot && aps.has(src.charAt(0))) ||
-                            // the pattern starts with \., and then [ or .
-                            (src.startsWith('\\.') && aps.has(src.charAt(2))) ||
-                            // the pattern starts with \.\., and then [ or .
-                            (src.startsWith('\\.\\.') && aps.has(src.charAt(4)));
-                        // no need to prevent dots if it can't match a dot, or if a
-                        // sub-pattern will be preventing it anyway.
-                        const needNoDot = !dot && !allowDot && aps.has(src.charAt(0));
-                        start = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : '';
-                    }
-                }
-            }
-            // append the "end of path portion" pattern to negation tails
-            let end = '';
-            if (this.isEnd() &&
-                this.#root.#filledNegs &&
-                this.#parent?.type === '!') {
-                end = '(?:$|\\/)';
-            }
-            const final = start + src + end;
-            return [
-                final,
-                unescape(src),
-                (this.#hasMagic = !!this.#hasMagic),
-                this.#uflag,
-            ];
-        }
-        // We need to calculate the body *twice* if it's a repeat pattern
-        // at the start, once in nodot mode, then again in dot mode, so a
-        // pattern like *(?) can match 'x.y'
-        const repeated = this.type === '*' || this.type === '+';
-        // some kind of extglob
-        const start = this.type === '!' ? '(?:(?!(?:' : '(?:';
-        let body = this.#partsToRegExp(dot);
-        if (this.isStart() && this.isEnd() && !body && this.type !== '!') {
-            // invalid extglob, has to at least be *something* present, if it's
-            // the entire path portion.
-            const s = this.toString();
-            this.#parts = [s];
-            this.type = null;
-            this.#hasMagic = undefined;
-            return [s, unescape(this.toString()), false, false];
-        }
-        // XXX abstract out this map method
-        let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot
-            ? ''
-            : this.#partsToRegExp(true);
-        if (bodyDotAllowed === body) {
-            bodyDotAllowed = '';
-        }
-        if (bodyDotAllowed) {
-            body = `(?:${body})(?:${bodyDotAllowed})*?`;
-        }
-        // an empty !() is exactly equivalent to a starNoEmpty
-        let final = '';
-        if (this.type === '!' && this.#emptyExt) {
-            final = (this.isStart() && !dot ? startNoDot : '') + starNoEmpty;
-        }
-        else {
-            const close = this.type === '!'
-                ? // !() must match something,but !(x) can match ''
-                    '))' +
-                        (this.isStart() && !dot && !allowDot ? startNoDot : '') +
-                        star +
-                        ')'
-                : this.type === '@'
-                    ? ')'
-                    : this.type === '?'
-                        ? ')?'
-                        : this.type === '+' && bodyDotAllowed
-                            ? ')'
-                            : this.type === '*' && bodyDotAllowed
-                                ? `)?`
-                                : `)${this.type}`;
-            final = start + body + close;
-        }
-        return [
-            final,
-            unescape(body),
-            (this.#hasMagic = !!this.#hasMagic),
-            this.#uflag,
-        ];
-    }
-    #partsToRegExp(dot) {
-        return this.#parts
-            .map(p => {
-            // extglob ASTs should only contain parent ASTs
-            /* c8 ignore start */
-            if (typeof p === 'string') {
-                throw new Error('string type in extglob ast??');
-            }
-            /* c8 ignore stop */
-            // can ignore hasMagic, because extglobs are already always magic
-            const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot);
-            this.#uflag = this.#uflag || uflag;
-            return re;
-        })
-            .filter(p => !(this.isStart() && this.isEnd()) || !!p)
-            .join('|');
-    }
-    static #parseGlob(glob, hasMagic, noEmpty = false) {
-        let escaping = false;
-        let re = '';
-        let uflag = false;
-        for (let i = 0; i < glob.length; i++) {
-            const c = glob.charAt(i);
-            if (escaping) {
-                escaping = false;
-                re += (reSpecials.has(c) ? '\\' : '') + c;
-                continue;
-            }
-            if (c === '\\') {
-                if (i === glob.length - 1) {
-                    re += '\\\\';
-                }
-                else {
-                    escaping = true;
-                }
-                continue;
-            }
-            if (c === '[') {
-                const [src, needUflag, consumed, magic] = parseClass(glob, i);
-                if (consumed) {
-                    re += src;
-                    uflag = uflag || needUflag;
-                    i += consumed - 1;
-                    hasMagic = hasMagic || magic;
-                    continue;
-                }
-            }
-            if (c === '*') {
-                if (noEmpty && glob === '*')
-                    re += starNoEmpty;
-                else
-                    re += star;
-                hasMagic = true;
-                continue;
-            }
-            if (c === '?') {
-                re += qmark;
-                hasMagic = true;
-                continue;
-            }
-            re += regExpEscape(c);
-        }
-        return [re, unescape(glob), !!hasMagic, uflag];
-    }
-}
-//# sourceMappingURL=ast.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/minimatch/dist/esm/brace-expressions.js b/node_modules/node-gyp/node_modules/minimatch/dist/esm/brace-expressions.js
deleted file mode 100644
index c629d6ae816e2..0000000000000
--- a/node_modules/node-gyp/node_modules/minimatch/dist/esm/brace-expressions.js
+++ /dev/null
@@ -1,148 +0,0 @@
-// translate the various posix character classes into unicode properties
-// this works across all unicode locales
-// { : [, /u flag required, negated]
-const posixClasses = {
-    '[:alnum:]': ['\\p{L}\\p{Nl}\\p{Nd}', true],
-    '[:alpha:]': ['\\p{L}\\p{Nl}', true],
-    '[:ascii:]': ['\\x' + '00-\\x' + '7f', false],
-    '[:blank:]': ['\\p{Zs}\\t', true],
-    '[:cntrl:]': ['\\p{Cc}', true],
-    '[:digit:]': ['\\p{Nd}', true],
-    '[:graph:]': ['\\p{Z}\\p{C}', true, true],
-    '[:lower:]': ['\\p{Ll}', true],
-    '[:print:]': ['\\p{C}', true],
-    '[:punct:]': ['\\p{P}', true],
-    '[:space:]': ['\\p{Z}\\t\\r\\n\\v\\f', true],
-    '[:upper:]': ['\\p{Lu}', true],
-    '[:word:]': ['\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}', true],
-    '[:xdigit:]': ['A-Fa-f0-9', false],
-};
-// only need to escape a few things inside of brace expressions
-// escapes: [ \ ] -
-const braceEscape = (s) => s.replace(/[[\]\\-]/g, '\\$&');
-// escape all regexp magic characters
-const regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
-// everything has already been escaped, we just have to join
-const rangesToString = (ranges) => ranges.join('');
-// takes a glob string at a posix brace expression, and returns
-// an equivalent regular expression source, and boolean indicating
-// whether the /u flag needs to be applied, and the number of chars
-// consumed to parse the character class.
-// This also removes out of order ranges, and returns ($.) if the
-// entire class just no good.
-export const parseClass = (glob, position) => {
-    const pos = position;
-    /* c8 ignore start */
-    if (glob.charAt(pos) !== '[') {
-        throw new Error('not in a brace expression');
-    }
-    /* c8 ignore stop */
-    const ranges = [];
-    const negs = [];
-    let i = pos + 1;
-    let sawStart = false;
-    let uflag = false;
-    let escaping = false;
-    let negate = false;
-    let endPos = pos;
-    let rangeStart = '';
-    WHILE: while (i < glob.length) {
-        const c = glob.charAt(i);
-        if ((c === '!' || c === '^') && i === pos + 1) {
-            negate = true;
-            i++;
-            continue;
-        }
-        if (c === ']' && sawStart && !escaping) {
-            endPos = i + 1;
-            break;
-        }
-        sawStart = true;
-        if (c === '\\') {
-            if (!escaping) {
-                escaping = true;
-                i++;
-                continue;
-            }
-            // escaped \ char, fall through and treat like normal char
-        }
-        if (c === '[' && !escaping) {
-            // either a posix class, a collation equivalent, or just a [
-            for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) {
-                if (glob.startsWith(cls, i)) {
-                    // invalid, [a-[] is fine, but not [a-[:alpha]]
-                    if (rangeStart) {
-                        return ['$.', false, glob.length - pos, true];
-                    }
-                    i += cls.length;
-                    if (neg)
-                        negs.push(unip);
-                    else
-                        ranges.push(unip);
-                    uflag = uflag || u;
-                    continue WHILE;
-                }
-            }
-        }
-        // now it's just a normal character, effectively
-        escaping = false;
-        if (rangeStart) {
-            // throw this range away if it's not valid, but others
-            // can still match.
-            if (c > rangeStart) {
-                ranges.push(braceEscape(rangeStart) + '-' + braceEscape(c));
-            }
-            else if (c === rangeStart) {
-                ranges.push(braceEscape(c));
-            }
-            rangeStart = '';
-            i++;
-            continue;
-        }
-        // now might be the start of a range.
-        // can be either c-d or c-] or c] or c] at this point
-        if (glob.startsWith('-]', i + 1)) {
-            ranges.push(braceEscape(c + '-'));
-            i += 2;
-            continue;
-        }
-        if (glob.startsWith('-', i + 1)) {
-            rangeStart = c;
-            i += 2;
-            continue;
-        }
-        // not the start of a range, just a single character
-        ranges.push(braceEscape(c));
-        i++;
-    }
-    if (endPos < i) {
-        // didn't see the end of the class, not a valid class,
-        // but might still be valid as a literal match.
-        return ['', false, 0, false];
-    }
-    // if we got no ranges and no negates, then we have a range that
-    // cannot possibly match anything, and that poisons the whole glob
-    if (!ranges.length && !negs.length) {
-        return ['$.', false, glob.length - pos, true];
-    }
-    // if we got one positive range, and it's a single character, then that's
-    // not actually a magic pattern, it's just that one literal character.
-    // we should not treat that as "magic", we should just return the literal
-    // character. [_] is a perfectly valid way to escape glob magic chars.
-    if (negs.length === 0 &&
-        ranges.length === 1 &&
-        /^\\?.$/.test(ranges[0]) &&
-        !negate) {
-        const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0];
-        return [regexpEscape(r), false, endPos - pos, false];
-    }
-    const sranges = '[' + (negate ? '^' : '') + rangesToString(ranges) + ']';
-    const snegs = '[' + (negate ? '' : '^') + rangesToString(negs) + ']';
-    const comb = ranges.length && negs.length
-        ? '(' + sranges + '|' + snegs + ')'
-        : ranges.length
-            ? sranges
-            : snegs;
-    return [comb, uflag, endPos - pos, true];
-};
-//# sourceMappingURL=brace-expressions.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/minimatch/dist/esm/escape.js b/node_modules/node-gyp/node_modules/minimatch/dist/esm/escape.js
deleted file mode 100644
index 16f7c8c7bdc64..0000000000000
--- a/node_modules/node-gyp/node_modules/minimatch/dist/esm/escape.js
+++ /dev/null
@@ -1,18 +0,0 @@
-/**
- * Escape all magic characters in a glob pattern.
- *
- * If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape}
- * option is used, then characters are escaped by wrapping in `[]`, because
- * a magic character wrapped in a character class can only be satisfied by
- * that exact character.  In this mode, `\` is _not_ escaped, because it is
- * not interpreted as a magic character, but instead as a path separator.
- */
-export const escape = (s, { windowsPathsNoEscape = false, } = {}) => {
-    // don't need to escape +@! because we escape the parens
-    // that make those magic, and escaping ! as [!] isn't valid,
-    // because [!]] is a valid glob class meaning not ']'.
-    return windowsPathsNoEscape
-        ? s.replace(/[?*()[\]]/g, '[$&]')
-        : s.replace(/[?*()[\]\\]/g, '\\$&');
-};
-//# sourceMappingURL=escape.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/minimatch/dist/esm/index.js b/node_modules/node-gyp/node_modules/minimatch/dist/esm/index.js
deleted file mode 100644
index 84b577b0472cb..0000000000000
--- a/node_modules/node-gyp/node_modules/minimatch/dist/esm/index.js
+++ /dev/null
@@ -1,1001 +0,0 @@
-import expand from 'brace-expansion';
-import { assertValidPattern } from './assert-valid-pattern.js';
-import { AST } from './ast.js';
-import { escape } from './escape.js';
-import { unescape } from './unescape.js';
-export const minimatch = (p, pattern, options = {}) => {
-    assertValidPattern(pattern);
-    // shortcut: comments match nothing.
-    if (!options.nocomment && pattern.charAt(0) === '#') {
-        return false;
-    }
-    return new Minimatch(pattern, options).match(p);
-};
-// Optimized checking for the most common glob patterns.
-const starDotExtRE = /^\*+([^+@!?\*\[\(]*)$/;
-const starDotExtTest = (ext) => (f) => !f.startsWith('.') && f.endsWith(ext);
-const starDotExtTestDot = (ext) => (f) => f.endsWith(ext);
-const starDotExtTestNocase = (ext) => {
-    ext = ext.toLowerCase();
-    return (f) => !f.startsWith('.') && f.toLowerCase().endsWith(ext);
-};
-const starDotExtTestNocaseDot = (ext) => {
-    ext = ext.toLowerCase();
-    return (f) => f.toLowerCase().endsWith(ext);
-};
-const starDotStarRE = /^\*+\.\*+$/;
-const starDotStarTest = (f) => !f.startsWith('.') && f.includes('.');
-const starDotStarTestDot = (f) => f !== '.' && f !== '..' && f.includes('.');
-const dotStarRE = /^\.\*+$/;
-const dotStarTest = (f) => f !== '.' && f !== '..' && f.startsWith('.');
-const starRE = /^\*+$/;
-const starTest = (f) => f.length !== 0 && !f.startsWith('.');
-const starTestDot = (f) => f.length !== 0 && f !== '.' && f !== '..';
-const qmarksRE = /^\?+([^+@!?\*\[\(]*)?$/;
-const qmarksTestNocase = ([$0, ext = '']) => {
-    const noext = qmarksTestNoExt([$0]);
-    if (!ext)
-        return noext;
-    ext = ext.toLowerCase();
-    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
-};
-const qmarksTestNocaseDot = ([$0, ext = '']) => {
-    const noext = qmarksTestNoExtDot([$0]);
-    if (!ext)
-        return noext;
-    ext = ext.toLowerCase();
-    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
-};
-const qmarksTestDot = ([$0, ext = '']) => {
-    const noext = qmarksTestNoExtDot([$0]);
-    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
-};
-const qmarksTest = ([$0, ext = '']) => {
-    const noext = qmarksTestNoExt([$0]);
-    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
-};
-const qmarksTestNoExt = ([$0]) => {
-    const len = $0.length;
-    return (f) => f.length === len && !f.startsWith('.');
-};
-const qmarksTestNoExtDot = ([$0]) => {
-    const len = $0.length;
-    return (f) => f.length === len && f !== '.' && f !== '..';
-};
-/* c8 ignore start */
-const defaultPlatform = (typeof process === 'object' && process
-    ? (typeof process.env === 'object' &&
-        process.env &&
-        process.env.__MINIMATCH_TESTING_PLATFORM__) ||
-        process.platform
-    : 'posix');
-const path = {
-    win32: { sep: '\\' },
-    posix: { sep: '/' },
-};
-/* c8 ignore stop */
-export const sep = defaultPlatform === 'win32' ? path.win32.sep : path.posix.sep;
-minimatch.sep = sep;
-export const GLOBSTAR = Symbol('globstar **');
-minimatch.GLOBSTAR = GLOBSTAR;
-// any single thing other than /
-// don't need to escape / when using new RegExp()
-const qmark = '[^/]';
-// * => any number of characters
-const star = qmark + '*?';
-// ** when dots are allowed.  Anything goes, except .. and .
-// not (^ or / followed by one or two dots followed by $ or /),
-// followed by anything, any number of times.
-const twoStarDot = '(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?';
-// not a ^ or / followed by a dot,
-// followed by anything, any number of times.
-const twoStarNoDot = '(?:(?!(?:\\/|^)\\.).)*?';
-export const filter = (pattern, options = {}) => (p) => minimatch(p, pattern, options);
-minimatch.filter = filter;
-const ext = (a, b = {}) => Object.assign({}, a, b);
-export const defaults = (def) => {
-    if (!def || typeof def !== 'object' || !Object.keys(def).length) {
-        return minimatch;
-    }
-    const orig = minimatch;
-    const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options));
-    return Object.assign(m, {
-        Minimatch: class Minimatch extends orig.Minimatch {
-            constructor(pattern, options = {}) {
-                super(pattern, ext(def, options));
-            }
-            static defaults(options) {
-                return orig.defaults(ext(def, options)).Minimatch;
-            }
-        },
-        AST: class AST extends orig.AST {
-            /* c8 ignore start */
-            constructor(type, parent, options = {}) {
-                super(type, parent, ext(def, options));
-            }
-            /* c8 ignore stop */
-            static fromGlob(pattern, options = {}) {
-                return orig.AST.fromGlob(pattern, ext(def, options));
-            }
-        },
-        unescape: (s, options = {}) => orig.unescape(s, ext(def, options)),
-        escape: (s, options = {}) => orig.escape(s, ext(def, options)),
-        filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)),
-        defaults: (options) => orig.defaults(ext(def, options)),
-        makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)),
-        braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)),
-        match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)),
-        sep: orig.sep,
-        GLOBSTAR: GLOBSTAR,
-    });
-};
-minimatch.defaults = defaults;
-// Brace expansion:
-// a{b,c}d -> abd acd
-// a{b,}c -> abc ac
-// a{0..3}d -> a0d a1d a2d a3d
-// a{b,c{d,e}f}g -> abg acdfg acefg
-// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
-//
-// Invalid sets are not expanded.
-// a{2..}b -> a{2..}b
-// a{b}c -> a{b}c
-export const braceExpand = (pattern, options = {}) => {
-    assertValidPattern(pattern);
-    // Thanks to Yeting Li  for
-    // improving this regexp to avoid a ReDOS vulnerability.
-    if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
-        // shortcut. no need to expand.
-        return [pattern];
-    }
-    return expand(pattern);
-};
-minimatch.braceExpand = braceExpand;
-// parse a component of the expanded set.
-// At this point, no pattern may contain "/" in it
-// so we're going to return a 2d array, where each entry is the full
-// pattern, split on '/', and then turned into a regular expression.
-// A regexp is made at the end which joins each array with an
-// escaped /, and another full one which joins each regexp with |.
-//
-// Following the lead of Bash 4.1, note that "**" only has special meaning
-// when it is the *only* thing in a path portion.  Otherwise, any series
-// of * is equivalent to a single *.  Globstar behavior is enabled by
-// default, and can be disabled by setting options.noglobstar.
-export const makeRe = (pattern, options = {}) => new Minimatch(pattern, options).makeRe();
-minimatch.makeRe = makeRe;
-export const match = (list, pattern, options = {}) => {
-    const mm = new Minimatch(pattern, options);
-    list = list.filter(f => mm.match(f));
-    if (mm.options.nonull && !list.length) {
-        list.push(pattern);
-    }
-    return list;
-};
-minimatch.match = match;
-// replace stuff like \* with *
-const globMagic = /[?*]|[+@!]\(.*?\)|\[|\]/;
-const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
-export class Minimatch {
-    options;
-    set;
-    pattern;
-    windowsPathsNoEscape;
-    nonegate;
-    negate;
-    comment;
-    empty;
-    preserveMultipleSlashes;
-    partial;
-    globSet;
-    globParts;
-    nocase;
-    isWindows;
-    platform;
-    windowsNoMagicRoot;
-    regexp;
-    constructor(pattern, options = {}) {
-        assertValidPattern(pattern);
-        options = options || {};
-        this.options = options;
-        this.pattern = pattern;
-        this.platform = options.platform || defaultPlatform;
-        this.isWindows = this.platform === 'win32';
-        this.windowsPathsNoEscape =
-            !!options.windowsPathsNoEscape || options.allowWindowsEscape === false;
-        if (this.windowsPathsNoEscape) {
-            this.pattern = this.pattern.replace(/\\/g, '/');
-        }
-        this.preserveMultipleSlashes = !!options.preserveMultipleSlashes;
-        this.regexp = null;
-        this.negate = false;
-        this.nonegate = !!options.nonegate;
-        this.comment = false;
-        this.empty = false;
-        this.partial = !!options.partial;
-        this.nocase = !!this.options.nocase;
-        this.windowsNoMagicRoot =
-            options.windowsNoMagicRoot !== undefined
-                ? options.windowsNoMagicRoot
-                : !!(this.isWindows && this.nocase);
-        this.globSet = [];
-        this.globParts = [];
-        this.set = [];
-        // make the set of regexps etc.
-        this.make();
-    }
-    hasMagic() {
-        if (this.options.magicalBraces && this.set.length > 1) {
-            return true;
-        }
-        for (const pattern of this.set) {
-            for (const part of pattern) {
-                if (typeof part !== 'string')
-                    return true;
-            }
-        }
-        return false;
-    }
-    debug(..._) { }
-    make() {
-        const pattern = this.pattern;
-        const options = this.options;
-        // empty patterns and comments match nothing.
-        if (!options.nocomment && pattern.charAt(0) === '#') {
-            this.comment = true;
-            return;
-        }
-        if (!pattern) {
-            this.empty = true;
-            return;
-        }
-        // step 1: figure out negation, etc.
-        this.parseNegate();
-        // step 2: expand braces
-        this.globSet = [...new Set(this.braceExpand())];
-        if (options.debug) {
-            this.debug = (...args) => console.error(...args);
-        }
-        this.debug(this.pattern, this.globSet);
-        // step 3: now we have a set, so turn each one into a series of
-        // path-portion matching patterns.
-        // These will be regexps, except in the case of "**", which is
-        // set to the GLOBSTAR object for globstar behavior,
-        // and will not contain any / characters
-        //
-        // First, we preprocess to make the glob pattern sets a bit simpler
-        // and deduped.  There are some perf-killing patterns that can cause
-        // problems with a glob walk, but we can simplify them down a bit.
-        const rawGlobParts = this.globSet.map(s => this.slashSplit(s));
-        this.globParts = this.preprocess(rawGlobParts);
-        this.debug(this.pattern, this.globParts);
-        // glob --> regexps
-        let set = this.globParts.map((s, _, __) => {
-            if (this.isWindows && this.windowsNoMagicRoot) {
-                // check if it's a drive or unc path.
-                const isUNC = s[0] === '' &&
-                    s[1] === '' &&
-                    (s[2] === '?' || !globMagic.test(s[2])) &&
-                    !globMagic.test(s[3]);
-                const isDrive = /^[a-z]:/i.test(s[0]);
-                if (isUNC) {
-                    return [...s.slice(0, 4), ...s.slice(4).map(ss => this.parse(ss))];
-                }
-                else if (isDrive) {
-                    return [s[0], ...s.slice(1).map(ss => this.parse(ss))];
-                }
-            }
-            return s.map(ss => this.parse(ss));
-        });
-        this.debug(this.pattern, set);
-        // filter out everything that didn't compile properly.
-        this.set = set.filter(s => s.indexOf(false) === -1);
-        // do not treat the ? in UNC paths as magic
-        if (this.isWindows) {
-            for (let i = 0; i < this.set.length; i++) {
-                const p = this.set[i];
-                if (p[0] === '' &&
-                    p[1] === '' &&
-                    this.globParts[i][2] === '?' &&
-                    typeof p[3] === 'string' &&
-                    /^[a-z]:$/i.test(p[3])) {
-                    p[2] = '?';
-                }
-            }
-        }
-        this.debug(this.pattern, this.set);
-    }
-    // various transforms to equivalent pattern sets that are
-    // faster to process in a filesystem walk.  The goal is to
-    // eliminate what we can, and push all ** patterns as far
-    // to the right as possible, even if it increases the number
-    // of patterns that we have to process.
-    preprocess(globParts) {
-        // if we're not in globstar mode, then turn all ** into *
-        if (this.options.noglobstar) {
-            for (let i = 0; i < globParts.length; i++) {
-                for (let j = 0; j < globParts[i].length; j++) {
-                    if (globParts[i][j] === '**') {
-                        globParts[i][j] = '*';
-                    }
-                }
-            }
-        }
-        const { optimizationLevel = 1 } = this.options;
-        if (optimizationLevel >= 2) {
-            // aggressive optimization for the purpose of fs walking
-            globParts = this.firstPhasePreProcess(globParts);
-            globParts = this.secondPhasePreProcess(globParts);
-        }
-        else if (optimizationLevel >= 1) {
-            // just basic optimizations to remove some .. parts
-            globParts = this.levelOneOptimize(globParts);
-        }
-        else {
-            // just collapse multiple ** portions into one
-            globParts = this.adjascentGlobstarOptimize(globParts);
-        }
-        return globParts;
-    }
-    // just get rid of adjascent ** portions
-    adjascentGlobstarOptimize(globParts) {
-        return globParts.map(parts => {
-            let gs = -1;
-            while (-1 !== (gs = parts.indexOf('**', gs + 1))) {
-                let i = gs;
-                while (parts[i + 1] === '**') {
-                    i++;
-                }
-                if (i !== gs) {
-                    parts.splice(gs, i - gs);
-                }
-            }
-            return parts;
-        });
-    }
-    // get rid of adjascent ** and resolve .. portions
-    levelOneOptimize(globParts) {
-        return globParts.map(parts => {
-            parts = parts.reduce((set, part) => {
-                const prev = set[set.length - 1];
-                if (part === '**' && prev === '**') {
-                    return set;
-                }
-                if (part === '..') {
-                    if (prev && prev !== '..' && prev !== '.' && prev !== '**') {
-                        set.pop();
-                        return set;
-                    }
-                }
-                set.push(part);
-                return set;
-            }, []);
-            return parts.length === 0 ? [''] : parts;
-        });
-    }
-    levelTwoFileOptimize(parts) {
-        if (!Array.isArray(parts)) {
-            parts = this.slashSplit(parts);
-        }
-        let didSomething = false;
-        do {
-            didSomething = false;
-            // 
// -> 
/
-            if (!this.preserveMultipleSlashes) {
-                for (let i = 1; i < parts.length - 1; i++) {
-                    const p = parts[i];
-                    // don't squeeze out UNC patterns
-                    if (i === 1 && p === '' && parts[0] === '')
-                        continue;
-                    if (p === '.' || p === '') {
-                        didSomething = true;
-                        parts.splice(i, 1);
-                        i--;
-                    }
-                }
-                if (parts[0] === '.' &&
-                    parts.length === 2 &&
-                    (parts[1] === '.' || parts[1] === '')) {
-                    didSomething = true;
-                    parts.pop();
-                }
-            }
-            // 
/

/../ ->

/
-            let dd = 0;
-            while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
-                const p = parts[dd - 1];
-                if (p && p !== '.' && p !== '..' && p !== '**') {
-                    didSomething = true;
-                    parts.splice(dd - 1, 2);
-                    dd -= 2;
-                }
-            }
-        } while (didSomething);
-        return parts.length === 0 ? [''] : parts;
-    }
-    // First phase: single-pattern processing
-    // 
 is 1 or more portions
-    //  is 1 or more portions
-    // 

is any portion other than ., .., '', or ** - // is . or '' - // - // **/.. is *brutal* for filesystem walking performance, because - // it effectively resets the recursive walk each time it occurs, - // and ** cannot be reduced out by a .. pattern part like a regexp - // or most strings (other than .., ., and '') can be. - // - //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/} - //

// -> 
/
-    // 
/

/../ ->

/
-    // **/**/ -> **/
-    //
-    // **/*/ -> */**/ <== not valid because ** doesn't follow
-    // this WOULD be allowed if ** did follow symlinks, or * didn't
-    firstPhasePreProcess(globParts) {
-        let didSomething = false;
-        do {
-            didSomething = false;
-            // 
/**/../

/

/ -> {

/../

/

/,

/**/

/

/} - for (let parts of globParts) { - let gs = -1; - while (-1 !== (gs = parts.indexOf('**', gs + 1))) { - let gss = gs; - while (parts[gss + 1] === '**') { - //

/**/**/ -> 
/**/
-                        gss++;
-                    }
-                    // eg, if gs is 2 and gss is 4, that means we have 3 **
-                    // parts, and can remove 2 of them.
-                    if (gss > gs) {
-                        parts.splice(gs + 1, gss - gs);
-                    }
-                    let next = parts[gs + 1];
-                    const p = parts[gs + 2];
-                    const p2 = parts[gs + 3];
-                    if (next !== '..')
-                        continue;
-                    if (!p ||
-                        p === '.' ||
-                        p === '..' ||
-                        !p2 ||
-                        p2 === '.' ||
-                        p2 === '..') {
-                        continue;
-                    }
-                    didSomething = true;
-                    // edit parts in place, and push the new one
-                    parts.splice(gs, 1);
-                    const other = parts.slice(0);
-                    other[gs] = '**';
-                    globParts.push(other);
-                    gs--;
-                }
-                // 
// -> 
/
-                if (!this.preserveMultipleSlashes) {
-                    for (let i = 1; i < parts.length - 1; i++) {
-                        const p = parts[i];
-                        // don't squeeze out UNC patterns
-                        if (i === 1 && p === '' && parts[0] === '')
-                            continue;
-                        if (p === '.' || p === '') {
-                            didSomething = true;
-                            parts.splice(i, 1);
-                            i--;
-                        }
-                    }
-                    if (parts[0] === '.' &&
-                        parts.length === 2 &&
-                        (parts[1] === '.' || parts[1] === '')) {
-                        didSomething = true;
-                        parts.pop();
-                    }
-                }
-                // 
/

/../ ->

/
-                let dd = 0;
-                while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
-                    const p = parts[dd - 1];
-                    if (p && p !== '.' && p !== '..' && p !== '**') {
-                        didSomething = true;
-                        const needDot = dd === 1 && parts[dd + 1] === '**';
-                        const splin = needDot ? ['.'] : [];
-                        parts.splice(dd - 1, 2, ...splin);
-                        if (parts.length === 0)
-                            parts.push('');
-                        dd -= 2;
-                    }
-                }
-            }
-        } while (didSomething);
-        return globParts;
-    }
-    // second phase: multi-pattern dedupes
-    // {
/*/,
/

/} ->

/*/
-    // {
/,
/} -> 
/
-    // {
/**/,
/} -> 
/**/
-    //
-    // {
/**/,
/**/

/} ->

/**/
-    // ^-- not valid because ** doens't follow symlinks
-    secondPhasePreProcess(globParts) {
-        for (let i = 0; i < globParts.length - 1; i++) {
-            for (let j = i + 1; j < globParts.length; j++) {
-                const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);
-                if (matched) {
-                    globParts[i] = [];
-                    globParts[j] = matched;
-                    break;
-                }
-            }
-        }
-        return globParts.filter(gs => gs.length);
-    }
-    partsMatch(a, b, emptyGSMatch = false) {
-        let ai = 0;
-        let bi = 0;
-        let result = [];
-        let which = '';
-        while (ai < a.length && bi < b.length) {
-            if (a[ai] === b[bi]) {
-                result.push(which === 'b' ? b[bi] : a[ai]);
-                ai++;
-                bi++;
-            }
-            else if (emptyGSMatch && a[ai] === '**' && b[bi] === a[ai + 1]) {
-                result.push(a[ai]);
-                ai++;
-            }
-            else if (emptyGSMatch && b[bi] === '**' && a[ai] === b[bi + 1]) {
-                result.push(b[bi]);
-                bi++;
-            }
-            else if (a[ai] === '*' &&
-                b[bi] &&
-                (this.options.dot || !b[bi].startsWith('.')) &&
-                b[bi] !== '**') {
-                if (which === 'b')
-                    return false;
-                which = 'a';
-                result.push(a[ai]);
-                ai++;
-                bi++;
-            }
-            else if (b[bi] === '*' &&
-                a[ai] &&
-                (this.options.dot || !a[ai].startsWith('.')) &&
-                a[ai] !== '**') {
-                if (which === 'a')
-                    return false;
-                which = 'b';
-                result.push(b[bi]);
-                ai++;
-                bi++;
-            }
-            else {
-                return false;
-            }
-        }
-        // if we fall out of the loop, it means they two are identical
-        // as long as their lengths match
-        return a.length === b.length && result;
-    }
-    parseNegate() {
-        if (this.nonegate)
-            return;
-        const pattern = this.pattern;
-        let negate = false;
-        let negateOffset = 0;
-        for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {
-            negate = !negate;
-            negateOffset++;
-        }
-        if (negateOffset)
-            this.pattern = pattern.slice(negateOffset);
-        this.negate = negate;
-    }
-    // set partial to true to test if, for example,
-    // "/a/b" matches the start of "/*/b/*/d"
-    // Partial means, if you run out of file before you run
-    // out of pattern, then that's fine, as long as all
-    // the parts match.
-    matchOne(file, pattern, partial = false) {
-        const options = this.options;
-        // UNC paths like //?/X:/... can match X:/... and vice versa
-        // Drive letters in absolute drive or unc paths are always compared
-        // case-insensitively.
-        if (this.isWindows) {
-            const fileDrive = typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0]);
-            const fileUNC = !fileDrive &&
-                file[0] === '' &&
-                file[1] === '' &&
-                file[2] === '?' &&
-                /^[a-z]:$/i.test(file[3]);
-            const patternDrive = typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0]);
-            const patternUNC = !patternDrive &&
-                pattern[0] === '' &&
-                pattern[1] === '' &&
-                pattern[2] === '?' &&
-                typeof pattern[3] === 'string' &&
-                /^[a-z]:$/i.test(pattern[3]);
-            const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined;
-            const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined;
-            if (typeof fdi === 'number' && typeof pdi === 'number') {
-                const [fd, pd] = [file[fdi], pattern[pdi]];
-                if (fd.toLowerCase() === pd.toLowerCase()) {
-                    pattern[pdi] = fd;
-                    if (pdi > fdi) {
-                        pattern = pattern.slice(pdi);
-                    }
-                    else if (fdi > pdi) {
-                        file = file.slice(fdi);
-                    }
-                }
-            }
-        }
-        // resolve and reduce . and .. portions in the file as well.
-        // dont' need to do the second phase, because it's only one string[]
-        const { optimizationLevel = 1 } = this.options;
-        if (optimizationLevel >= 2) {
-            file = this.levelTwoFileOptimize(file);
-        }
-        this.debug('matchOne', this, { file, pattern });
-        this.debug('matchOne', file.length, pattern.length);
-        for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
-            this.debug('matchOne loop');
-            var p = pattern[pi];
-            var f = file[fi];
-            this.debug(pattern, p, f);
-            // should be impossible.
-            // some invalid regexp stuff in the set.
-            /* c8 ignore start */
-            if (p === false) {
-                return false;
-            }
-            /* c8 ignore stop */
-            if (p === GLOBSTAR) {
-                this.debug('GLOBSTAR', [pattern, p, f]);
-                // "**"
-                // a/**/b/**/c would match the following:
-                // a/b/x/y/z/c
-                // a/x/y/z/b/c
-                // a/b/x/b/x/c
-                // a/b/c
-                // To do this, take the rest of the pattern after
-                // the **, and see if it would match the file remainder.
-                // If so, return success.
-                // If not, the ** "swallows" a segment, and try again.
-                // This is recursively awful.
-                //
-                // a/**/b/**/c matching a/b/x/y/z/c
-                // - a matches a
-                // - doublestar
-                //   - matchOne(b/x/y/z/c, b/**/c)
-                //     - b matches b
-                //     - doublestar
-                //       - matchOne(x/y/z/c, c) -> no
-                //       - matchOne(y/z/c, c) -> no
-                //       - matchOne(z/c, c) -> no
-                //       - matchOne(c, c) yes, hit
-                var fr = fi;
-                var pr = pi + 1;
-                if (pr === pl) {
-                    this.debug('** at the end');
-                    // a ** at the end will just swallow the rest.
-                    // We have found a match.
-                    // however, it will not swallow /.x, unless
-                    // options.dot is set.
-                    // . and .. are *never* matched by **, for explosively
-                    // exponential reasons.
-                    for (; fi < fl; fi++) {
-                        if (file[fi] === '.' ||
-                            file[fi] === '..' ||
-                            (!options.dot && file[fi].charAt(0) === '.'))
-                            return false;
-                    }
-                    return true;
-                }
-                // ok, let's see if we can swallow whatever we can.
-                while (fr < fl) {
-                    var swallowee = file[fr];
-                    this.debug('\nglobstar while', file, fr, pattern, pr, swallowee);
-                    // XXX remove this slice.  Just pass the start index.
-                    if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
-                        this.debug('globstar found match!', fr, fl, swallowee);
-                        // found a match.
-                        return true;
-                    }
-                    else {
-                        // can't swallow "." or ".." ever.
-                        // can only swallow ".foo" when explicitly asked.
-                        if (swallowee === '.' ||
-                            swallowee === '..' ||
-                            (!options.dot && swallowee.charAt(0) === '.')) {
-                            this.debug('dot detected!', file, fr, pattern, pr);
-                            break;
-                        }
-                        // ** swallows a segment, and continue.
-                        this.debug('globstar swallow a segment, and continue');
-                        fr++;
-                    }
-                }
-                // no match was found.
-                // However, in partial mode, we can't say this is necessarily over.
-                /* c8 ignore start */
-                if (partial) {
-                    // ran out of file
-                    this.debug('\n>>> no match, partial?', file, fr, pattern, pr);
-                    if (fr === fl) {
-                        return true;
-                    }
-                }
-                /* c8 ignore stop */
-                return false;
-            }
-            // something other than **
-            // non-magic patterns just have to match exactly
-            // patterns with magic have been turned into regexps.
-            let hit;
-            if (typeof p === 'string') {
-                hit = f === p;
-                this.debug('string match', p, f, hit);
-            }
-            else {
-                hit = p.test(f);
-                this.debug('pattern match', p, f, hit);
-            }
-            if (!hit)
-                return false;
-        }
-        // Note: ending in / means that we'll get a final ""
-        // at the end of the pattern.  This can only match a
-        // corresponding "" at the end of the file.
-        // If the file ends in /, then it can only match a
-        // a pattern that ends in /, unless the pattern just
-        // doesn't have any more for it. But, a/b/ should *not*
-        // match "a/b/*", even though "" matches against the
-        // [^/]*? pattern, except in partial mode, where it might
-        // simply not be reached yet.
-        // However, a/b/ should still satisfy a/*
-        // now either we fell off the end of the pattern, or we're done.
-        if (fi === fl && pi === pl) {
-            // ran out of pattern and filename at the same time.
-            // an exact hit!
-            return true;
-        }
-        else if (fi === fl) {
-            // ran out of file, but still had pattern left.
-            // this is ok if we're doing the match as part of
-            // a glob fs traversal.
-            return partial;
-        }
-        else if (pi === pl) {
-            // ran out of pattern, still have file left.
-            // this is only acceptable if we're on the very last
-            // empty segment of a file with a trailing slash.
-            // a/* should match a/b/
-            return fi === fl - 1 && file[fi] === '';
-            /* c8 ignore start */
-        }
-        else {
-            // should be unreachable.
-            throw new Error('wtf?');
-        }
-        /* c8 ignore stop */
-    }
-    braceExpand() {
-        return braceExpand(this.pattern, this.options);
-    }
-    parse(pattern) {
-        assertValidPattern(pattern);
-        const options = this.options;
-        // shortcuts
-        if (pattern === '**')
-            return GLOBSTAR;
-        if (pattern === '')
-            return '';
-        // far and away, the most common glob pattern parts are
-        // *, *.*, and *.  Add a fast check method for those.
-        let m;
-        let fastTest = null;
-        if ((m = pattern.match(starRE))) {
-            fastTest = options.dot ? starTestDot : starTest;
-        }
-        else if ((m = pattern.match(starDotExtRE))) {
-            fastTest = (options.nocase
-                ? options.dot
-                    ? starDotExtTestNocaseDot
-                    : starDotExtTestNocase
-                : options.dot
-                    ? starDotExtTestDot
-                    : starDotExtTest)(m[1]);
-        }
-        else if ((m = pattern.match(qmarksRE))) {
-            fastTest = (options.nocase
-                ? options.dot
-                    ? qmarksTestNocaseDot
-                    : qmarksTestNocase
-                : options.dot
-                    ? qmarksTestDot
-                    : qmarksTest)(m);
-        }
-        else if ((m = pattern.match(starDotStarRE))) {
-            fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
-        }
-        else if ((m = pattern.match(dotStarRE))) {
-            fastTest = dotStarTest;
-        }
-        const re = AST.fromGlob(pattern, this.options).toMMPattern();
-        if (fastTest && typeof re === 'object') {
-            // Avoids overriding in frozen environments
-            Reflect.defineProperty(re, 'test', { value: fastTest });
-        }
-        return re;
-    }
-    makeRe() {
-        if (this.regexp || this.regexp === false)
-            return this.regexp;
-        // at this point, this.set is a 2d array of partial
-        // pattern strings, or "**".
-        //
-        // It's better to use .match().  This function shouldn't
-        // be used, really, but it's pretty convenient sometimes,
-        // when you just want to work with a regex.
-        const set = this.set;
-        if (!set.length) {
-            this.regexp = false;
-            return this.regexp;
-        }
-        const options = this.options;
-        const twoStar = options.noglobstar
-            ? star
-            : options.dot
-                ? twoStarDot
-                : twoStarNoDot;
-        const flags = new Set(options.nocase ? ['i'] : []);
-        // regexpify non-globstar patterns
-        // if ** is only item, then we just do one twoStar
-        // if ** is first, and there are more, prepend (\/|twoStar\/)? to next
-        // if ** is last, append (\/twoStar|) to previous
-        // if ** is in the middle, append (\/|\/twoStar\/) to previous
-        // then filter out GLOBSTAR symbols
-        let re = set
-            .map(pattern => {
-            const pp = pattern.map(p => {
-                if (p instanceof RegExp) {
-                    for (const f of p.flags.split(''))
-                        flags.add(f);
-                }
-                return typeof p === 'string'
-                    ? regExpEscape(p)
-                    : p === GLOBSTAR
-                        ? GLOBSTAR
-                        : p._src;
-            });
-            pp.forEach((p, i) => {
-                const next = pp[i + 1];
-                const prev = pp[i - 1];
-                if (p !== GLOBSTAR || prev === GLOBSTAR) {
-                    return;
-                }
-                if (prev === undefined) {
-                    if (next !== undefined && next !== GLOBSTAR) {
-                        pp[i + 1] = '(?:\\/|' + twoStar + '\\/)?' + next;
-                    }
-                    else {
-                        pp[i] = twoStar;
-                    }
-                }
-                else if (next === undefined) {
-                    pp[i - 1] = prev + '(?:\\/|' + twoStar + ')?';
-                }
-                else if (next !== GLOBSTAR) {
-                    pp[i - 1] = prev + '(?:\\/|\\/' + twoStar + '\\/)' + next;
-                    pp[i + 1] = GLOBSTAR;
-                }
-            });
-            return pp.filter(p => p !== GLOBSTAR).join('/');
-        })
-            .join('|');
-        // need to wrap in parens if we had more than one thing with |,
-        // otherwise only the first will be anchored to ^ and the last to $
-        const [open, close] = set.length > 1 ? ['(?:', ')'] : ['', ''];
-        // must match entire pattern
-        // ending in a * or ** will make it less strict.
-        re = '^' + open + re + close + '$';
-        // can match anything, as long as it's not this.
-        if (this.negate)
-            re = '^(?!' + re + ').+$';
-        try {
-            this.regexp = new RegExp(re, [...flags].join(''));
-            /* c8 ignore start */
-        }
-        catch (ex) {
-            // should be impossible
-            this.regexp = false;
-        }
-        /* c8 ignore stop */
-        return this.regexp;
-    }
-    slashSplit(p) {
-        // if p starts with // on windows, we preserve that
-        // so that UNC paths aren't broken.  Otherwise, any number of
-        // / characters are coalesced into one, unless
-        // preserveMultipleSlashes is set to true.
-        if (this.preserveMultipleSlashes) {
-            return p.split('/');
-        }
-        else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
-            // add an extra '' for the one we lose
-            return ['', ...p.split(/\/+/)];
-        }
-        else {
-            return p.split(/\/+/);
-        }
-    }
-    match(f, partial = this.partial) {
-        this.debug('match', f, this.pattern);
-        // short-circuit in the case of busted things.
-        // comments, etc.
-        if (this.comment) {
-            return false;
-        }
-        if (this.empty) {
-            return f === '';
-        }
-        if (f === '/' && partial) {
-            return true;
-        }
-        const options = this.options;
-        // windows: need to use /, not \
-        if (this.isWindows) {
-            f = f.split('\\').join('/');
-        }
-        // treat the test path as a set of pathparts.
-        const ff = this.slashSplit(f);
-        this.debug(this.pattern, 'split', ff);
-        // just ONE of the pattern sets in this.set needs to match
-        // in order for it to be valid.  If negating, then just one
-        // match means that we have failed.
-        // Either way, return on the first hit.
-        const set = this.set;
-        this.debug(this.pattern, 'set', set);
-        // Find the basename of the path by looking for the last non-empty segment
-        let filename = ff[ff.length - 1];
-        if (!filename) {
-            for (let i = ff.length - 2; !filename && i >= 0; i--) {
-                filename = ff[i];
-            }
-        }
-        for (let i = 0; i < set.length; i++) {
-            const pattern = set[i];
-            let file = ff;
-            if (options.matchBase && pattern.length === 1) {
-                file = [filename];
-            }
-            const hit = this.matchOne(file, pattern, partial);
-            if (hit) {
-                if (options.flipNegate) {
-                    return true;
-                }
-                return !this.negate;
-            }
-        }
-        // didn't get any hits.  this is success if it's a negative
-        // pattern, failure otherwise.
-        if (options.flipNegate) {
-            return false;
-        }
-        return this.negate;
-    }
-    static defaults(def) {
-        return minimatch.defaults(def).Minimatch;
-    }
-}
-/* c8 ignore start */
-export { AST } from './ast.js';
-export { escape } from './escape.js';
-export { unescape } from './unescape.js';
-/* c8 ignore stop */
-minimatch.AST = AST;
-minimatch.Minimatch = Minimatch;
-minimatch.escape = escape;
-minimatch.unescape = unescape;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/minimatch/dist/esm/package.json b/node_modules/node-gyp/node_modules/minimatch/dist/esm/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/node-gyp/node_modules/minimatch/dist/esm/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/node-gyp/node_modules/minimatch/dist/esm/unescape.js b/node_modules/node-gyp/node_modules/minimatch/dist/esm/unescape.js
deleted file mode 100644
index 0faf9a2b7306f..0000000000000
--- a/node_modules/node-gyp/node_modules/minimatch/dist/esm/unescape.js
+++ /dev/null
@@ -1,20 +0,0 @@
-/**
- * Un-escape a string that has been escaped with {@link escape}.
- *
- * If the {@link windowsPathsNoEscape} option is used, then square-brace
- * escapes are removed, but not backslash escapes.  For example, it will turn
- * the string `'[*]'` into `*`, but it will not turn `'\\*'` into `'*'`,
- * becuase `\` is a path separator in `windowsPathsNoEscape` mode.
- *
- * When `windowsPathsNoEscape` is not set, then both brace escapes and
- * backslash escapes are removed.
- *
- * Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot be escaped
- * or unescaped.
- */
-export const unescape = (s, { windowsPathsNoEscape = false, } = {}) => {
-    return windowsPathsNoEscape
-        ? s.replace(/\[([^\/\\])\]/g, '$1')
-        : s.replace(/((?!\\).|^)\[([^\/\\])\]/g, '$1$2').replace(/\\([^\/])/g, '$1');
-};
-//# sourceMappingURL=unescape.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/minimatch/package.json b/node_modules/node-gyp/node_modules/minimatch/package.json
deleted file mode 100644
index 01fc48ecfd6a9..0000000000000
--- a/node_modules/node-gyp/node_modules/minimatch/package.json
+++ /dev/null
@@ -1,82 +0,0 @@
-{
-  "author": "Isaac Z. Schlueter  (http://blog.izs.me)",
-  "name": "minimatch",
-  "description": "a glob matcher in javascript",
-  "version": "9.0.5",
-  "repository": {
-    "type": "git",
-    "url": "git://github.com/isaacs/minimatch.git"
-  },
-  "main": "./dist/commonjs/index.js",
-  "types": "./dist/commonjs/index.d.ts",
-  "exports": {
-    "./package.json": "./package.json",
-    ".": {
-      "import": {
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.js"
-      },
-      "require": {
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.js"
-      }
-    }
-  },
-  "files": [
-    "dist"
-  ],
-  "scripts": {
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "prepare": "tshy",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "test": "tap",
-    "snap": "tap",
-    "format": "prettier --write . --loglevel warn",
-    "benchmark": "node benchmark/index.js",
-    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
-  },
-  "prettier": {
-    "semi": false,
-    "printWidth": 80,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "engines": {
-    "node": ">=16 || 14 >=14.17"
-  },
-  "dependencies": {
-    "brace-expansion": "^2.0.1"
-  },
-  "devDependencies": {
-    "@types/brace-expansion": "^1.1.0",
-    "@types/node": "^18.15.11",
-    "@types/tap": "^15.0.8",
-    "eslint-config-prettier": "^8.6.0",
-    "mkdirp": "1",
-    "prettier": "^2.8.2",
-    "tap": "^18.7.2",
-    "ts-node": "^10.9.1",
-    "tshy": "^1.12.0",
-    "typedoc": "^0.23.21",
-    "typescript": "^4.9.3"
-  },
-  "funding": {
-    "url": "https://github.com/sponsors/isaacs"
-  },
-  "license": "ISC",
-  "tshy": {
-    "exports": {
-      "./package.json": "./package.json",
-      ".": "./src/index.ts"
-    }
-  },
-  "type": "module"
-}
diff --git a/node_modules/node-gyp/node_modules/path-scurry/dist/commonjs/index.js b/node_modules/node-gyp/node_modules/path-scurry/dist/commonjs/index.js
deleted file mode 100644
index 555de62f04c90..0000000000000
--- a/node_modules/node-gyp/node_modules/path-scurry/dist/commonjs/index.js
+++ /dev/null
@@ -1,2014 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.PathScurry = exports.Path = exports.PathScurryDarwin = exports.PathScurryPosix = exports.PathScurryWin32 = exports.PathScurryBase = exports.PathPosix = exports.PathWin32 = exports.PathBase = exports.ChildrenCache = exports.ResolveCache = void 0;
-const lru_cache_1 = require("lru-cache");
-const node_path_1 = require("node:path");
-const node_url_1 = require("node:url");
-const fs_1 = require("fs");
-const actualFS = __importStar(require("node:fs"));
-const realpathSync = fs_1.realpathSync.native;
-// TODO: test perf of fs/promises realpath vs realpathCB,
-// since the promises one uses realpath.native
-const promises_1 = require("node:fs/promises");
-const minipass_1 = require("minipass");
-const defaultFS = {
-    lstatSync: fs_1.lstatSync,
-    readdir: fs_1.readdir,
-    readdirSync: fs_1.readdirSync,
-    readlinkSync: fs_1.readlinkSync,
-    realpathSync,
-    promises: {
-        lstat: promises_1.lstat,
-        readdir: promises_1.readdir,
-        readlink: promises_1.readlink,
-        realpath: promises_1.realpath,
-    },
-};
-// if they just gave us require('fs') then use our default
-const fsFromOption = (fsOption) => !fsOption || fsOption === defaultFS || fsOption === actualFS ?
-    defaultFS
-    : {
-        ...defaultFS,
-        ...fsOption,
-        promises: {
-            ...defaultFS.promises,
-            ...(fsOption.promises || {}),
-        },
-    };
-// turn something like //?/c:/ into c:\
-const uncDriveRegexp = /^\\\\\?\\([a-z]:)\\?$/i;
-const uncToDrive = (rootPath) => rootPath.replace(/\//g, '\\').replace(uncDriveRegexp, '$1\\');
-// windows paths are separated by either / or \
-const eitherSep = /[\\\/]/;
-const UNKNOWN = 0; // may not even exist, for all we know
-const IFIFO = 0b0001;
-const IFCHR = 0b0010;
-const IFDIR = 0b0100;
-const IFBLK = 0b0110;
-const IFREG = 0b1000;
-const IFLNK = 0b1010;
-const IFSOCK = 0b1100;
-const IFMT = 0b1111;
-// mask to unset low 4 bits
-const IFMT_UNKNOWN = ~IFMT;
-// set after successfully calling readdir() and getting entries.
-const READDIR_CALLED = 0b0000_0001_0000;
-// set after a successful lstat()
-const LSTAT_CALLED = 0b0000_0010_0000;
-// set if an entry (or one of its parents) is definitely not a dir
-const ENOTDIR = 0b0000_0100_0000;
-// set if an entry (or one of its parents) does not exist
-// (can also be set on lstat errors like EACCES or ENAMETOOLONG)
-const ENOENT = 0b0000_1000_0000;
-// cannot have child entries -- also verify &IFMT is either IFDIR or IFLNK
-// set if we fail to readlink
-const ENOREADLINK = 0b0001_0000_0000;
-// set if we know realpath() will fail
-const ENOREALPATH = 0b0010_0000_0000;
-const ENOCHILD = ENOTDIR | ENOENT | ENOREALPATH;
-const TYPEMASK = 0b0011_1111_1111;
-const entToType = (s) => s.isFile() ? IFREG
-    : s.isDirectory() ? IFDIR
-        : s.isSymbolicLink() ? IFLNK
-            : s.isCharacterDevice() ? IFCHR
-                : s.isBlockDevice() ? IFBLK
-                    : s.isSocket() ? IFSOCK
-                        : s.isFIFO() ? IFIFO
-                            : UNKNOWN;
-// normalize unicode path names
-const normalizeCache = new Map();
-const normalize = (s) => {
-    const c = normalizeCache.get(s);
-    if (c)
-        return c;
-    const n = s.normalize('NFKD');
-    normalizeCache.set(s, n);
-    return n;
-};
-const normalizeNocaseCache = new Map();
-const normalizeNocase = (s) => {
-    const c = normalizeNocaseCache.get(s);
-    if (c)
-        return c;
-    const n = normalize(s.toLowerCase());
-    normalizeNocaseCache.set(s, n);
-    return n;
-};
-/**
- * An LRUCache for storing resolved path strings or Path objects.
- * @internal
- */
-class ResolveCache extends lru_cache_1.LRUCache {
-    constructor() {
-        super({ max: 256 });
-    }
-}
-exports.ResolveCache = ResolveCache;
-// In order to prevent blowing out the js heap by allocating hundreds of
-// thousands of Path entries when walking extremely large trees, the "children"
-// in this tree are represented by storing an array of Path entries in an
-// LRUCache, indexed by the parent.  At any time, Path.children() may return an
-// empty array, indicating that it doesn't know about any of its children, and
-// thus has to rebuild that cache.  This is fine, it just means that we don't
-// benefit as much from having the cached entries, but huge directory walks
-// don't blow out the stack, and smaller ones are still as fast as possible.
-//
-//It does impose some complexity when building up the readdir data, because we
-//need to pass a reference to the children array that we started with.
-/**
- * an LRUCache for storing child entries.
- * @internal
- */
-class ChildrenCache extends lru_cache_1.LRUCache {
-    constructor(maxSize = 16 * 1024) {
-        super({
-            maxSize,
-            // parent + children
-            sizeCalculation: a => a.length + 1,
-        });
-    }
-}
-exports.ChildrenCache = ChildrenCache;
-const setAsCwd = Symbol('PathScurry setAsCwd');
-/**
- * Path objects are sort of like a super-powered
- * {@link https://nodejs.org/docs/latest/api/fs.html#class-fsdirent fs.Dirent}
- *
- * Each one represents a single filesystem entry on disk, which may or may not
- * exist. It includes methods for reading various types of information via
- * lstat, readlink, and readdir, and caches all information to the greatest
- * degree possible.
- *
- * Note that fs operations that would normally throw will instead return an
- * "empty" value. This is in order to prevent excessive overhead from error
- * stack traces.
- */
-class PathBase {
-    /**
-     * the basename of this path
-     *
-     * **Important**: *always* test the path name against any test string
-     * usingthe {@link isNamed} method, and not by directly comparing this
-     * string. Otherwise, unicode path strings that the system sees as identical
-     * will not be properly treated as the same path, leading to incorrect
-     * behavior and possible security issues.
-     */
-    name;
-    /**
-     * the Path entry corresponding to the path root.
-     *
-     * @internal
-     */
-    root;
-    /**
-     * All roots found within the current PathScurry family
-     *
-     * @internal
-     */
-    roots;
-    /**
-     * a reference to the parent path, or undefined in the case of root entries
-     *
-     * @internal
-     */
-    parent;
-    /**
-     * boolean indicating whether paths are compared case-insensitively
-     * @internal
-     */
-    nocase;
-    /**
-     * boolean indicating that this path is the current working directory
-     * of the PathScurry collection that contains it.
-     */
-    isCWD = false;
-    // potential default fs override
-    #fs;
-    // Stats fields
-    #dev;
-    get dev() {
-        return this.#dev;
-    }
-    #mode;
-    get mode() {
-        return this.#mode;
-    }
-    #nlink;
-    get nlink() {
-        return this.#nlink;
-    }
-    #uid;
-    get uid() {
-        return this.#uid;
-    }
-    #gid;
-    get gid() {
-        return this.#gid;
-    }
-    #rdev;
-    get rdev() {
-        return this.#rdev;
-    }
-    #blksize;
-    get blksize() {
-        return this.#blksize;
-    }
-    #ino;
-    get ino() {
-        return this.#ino;
-    }
-    #size;
-    get size() {
-        return this.#size;
-    }
-    #blocks;
-    get blocks() {
-        return this.#blocks;
-    }
-    #atimeMs;
-    get atimeMs() {
-        return this.#atimeMs;
-    }
-    #mtimeMs;
-    get mtimeMs() {
-        return this.#mtimeMs;
-    }
-    #ctimeMs;
-    get ctimeMs() {
-        return this.#ctimeMs;
-    }
-    #birthtimeMs;
-    get birthtimeMs() {
-        return this.#birthtimeMs;
-    }
-    #atime;
-    get atime() {
-        return this.#atime;
-    }
-    #mtime;
-    get mtime() {
-        return this.#mtime;
-    }
-    #ctime;
-    get ctime() {
-        return this.#ctime;
-    }
-    #birthtime;
-    get birthtime() {
-        return this.#birthtime;
-    }
-    #matchName;
-    #depth;
-    #fullpath;
-    #fullpathPosix;
-    #relative;
-    #relativePosix;
-    #type;
-    #children;
-    #linkTarget;
-    #realpath;
-    /**
-     * This property is for compatibility with the Dirent class as of
-     * Node v20, where Dirent['parentPath'] refers to the path of the
-     * directory that was passed to readdir. For root entries, it's the path
-     * to the entry itself.
-     */
-    get parentPath() {
-        return (this.parent || this).fullpath();
-    }
-    /**
-     * Deprecated alias for Dirent['parentPath'] Somewhat counterintuitively,
-     * this property refers to the *parent* path, not the path object itself.
-     */
-    get path() {
-        return this.parentPath;
-    }
-    /**
-     * Do not create new Path objects directly.  They should always be accessed
-     * via the PathScurry class or other methods on the Path class.
-     *
-     * @internal
-     */
-    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
-        this.name = name;
-        this.#matchName = nocase ? normalizeNocase(name) : normalize(name);
-        this.#type = type & TYPEMASK;
-        this.nocase = nocase;
-        this.roots = roots;
-        this.root = root || this;
-        this.#children = children;
-        this.#fullpath = opts.fullpath;
-        this.#relative = opts.relative;
-        this.#relativePosix = opts.relativePosix;
-        this.parent = opts.parent;
-        if (this.parent) {
-            this.#fs = this.parent.#fs;
-        }
-        else {
-            this.#fs = fsFromOption(opts.fs);
-        }
-    }
-    /**
-     * Returns the depth of the Path object from its root.
-     *
-     * For example, a path at `/foo/bar` would have a depth of 2.
-     */
-    depth() {
-        if (this.#depth !== undefined)
-            return this.#depth;
-        if (!this.parent)
-            return (this.#depth = 0);
-        return (this.#depth = this.parent.depth() + 1);
-    }
-    /**
-     * @internal
-     */
-    childrenCache() {
-        return this.#children;
-    }
-    /**
-     * Get the Path object referenced by the string path, resolved from this Path
-     */
-    resolve(path) {
-        if (!path) {
-            return this;
-        }
-        const rootPath = this.getRootString(path);
-        const dir = path.substring(rootPath.length);
-        const dirParts = dir.split(this.splitSep);
-        const result = rootPath ?
-            this.getRoot(rootPath).#resolveParts(dirParts)
-            : this.#resolveParts(dirParts);
-        return result;
-    }
-    #resolveParts(dirParts) {
-        let p = this;
-        for (const part of dirParts) {
-            p = p.child(part);
-        }
-        return p;
-    }
-    /**
-     * Returns the cached children Path objects, if still available.  If they
-     * have fallen out of the cache, then returns an empty array, and resets the
-     * READDIR_CALLED bit, so that future calls to readdir() will require an fs
-     * lookup.
-     *
-     * @internal
-     */
-    children() {
-        const cached = this.#children.get(this);
-        if (cached) {
-            return cached;
-        }
-        const children = Object.assign([], { provisional: 0 });
-        this.#children.set(this, children);
-        this.#type &= ~READDIR_CALLED;
-        return children;
-    }
-    /**
-     * Resolves a path portion and returns or creates the child Path.
-     *
-     * Returns `this` if pathPart is `''` or `'.'`, or `parent` if pathPart is
-     * `'..'`.
-     *
-     * This should not be called directly.  If `pathPart` contains any path
-     * separators, it will lead to unsafe undefined behavior.
-     *
-     * Use `Path.resolve()` instead.
-     *
-     * @internal
-     */
-    child(pathPart, opts) {
-        if (pathPart === '' || pathPart === '.') {
-            return this;
-        }
-        if (pathPart === '..') {
-            return this.parent || this;
-        }
-        // find the child
-        const children = this.children();
-        const name = this.nocase ? normalizeNocase(pathPart) : normalize(pathPart);
-        for (const p of children) {
-            if (p.#matchName === name) {
-                return p;
-            }
-        }
-        // didn't find it, create provisional child, since it might not
-        // actually exist.  If we know the parent isn't a dir, then
-        // in fact it CAN'T exist.
-        const s = this.parent ? this.sep : '';
-        const fullpath = this.#fullpath ? this.#fullpath + s + pathPart : undefined;
-        const pchild = this.newChild(pathPart, UNKNOWN, {
-            ...opts,
-            parent: this,
-            fullpath,
-        });
-        if (!this.canReaddir()) {
-            pchild.#type |= ENOENT;
-        }
-        // don't have to update provisional, because if we have real children,
-        // then provisional is set to children.length, otherwise a lower number
-        children.push(pchild);
-        return pchild;
-    }
-    /**
-     * The relative path from the cwd. If it does not share an ancestor with
-     * the cwd, then this ends up being equivalent to the fullpath()
-     */
-    relative() {
-        if (this.isCWD)
-            return '';
-        if (this.#relative !== undefined) {
-            return this.#relative;
-        }
-        const name = this.name;
-        const p = this.parent;
-        if (!p) {
-            return (this.#relative = this.name);
-        }
-        const pv = p.relative();
-        return pv + (!pv || !p.parent ? '' : this.sep) + name;
-    }
-    /**
-     * The relative path from the cwd, using / as the path separator.
-     * If it does not share an ancestor with
-     * the cwd, then this ends up being equivalent to the fullpathPosix()
-     * On posix systems, this is identical to relative().
-     */
-    relativePosix() {
-        if (this.sep === '/')
-            return this.relative();
-        if (this.isCWD)
-            return '';
-        if (this.#relativePosix !== undefined)
-            return this.#relativePosix;
-        const name = this.name;
-        const p = this.parent;
-        if (!p) {
-            return (this.#relativePosix = this.fullpathPosix());
-        }
-        const pv = p.relativePosix();
-        return pv + (!pv || !p.parent ? '' : '/') + name;
-    }
-    /**
-     * The fully resolved path string for this Path entry
-     */
-    fullpath() {
-        if (this.#fullpath !== undefined) {
-            return this.#fullpath;
-        }
-        const name = this.name;
-        const p = this.parent;
-        if (!p) {
-            return (this.#fullpath = this.name);
-        }
-        const pv = p.fullpath();
-        const fp = pv + (!p.parent ? '' : this.sep) + name;
-        return (this.#fullpath = fp);
-    }
-    /**
-     * On platforms other than windows, this is identical to fullpath.
-     *
-     * On windows, this is overridden to return the forward-slash form of the
-     * full UNC path.
-     */
-    fullpathPosix() {
-        if (this.#fullpathPosix !== undefined)
-            return this.#fullpathPosix;
-        if (this.sep === '/')
-            return (this.#fullpathPosix = this.fullpath());
-        if (!this.parent) {
-            const p = this.fullpath().replace(/\\/g, '/');
-            if (/^[a-z]:\//i.test(p)) {
-                return (this.#fullpathPosix = `//?/${p}`);
-            }
-            else {
-                return (this.#fullpathPosix = p);
-            }
-        }
-        const p = this.parent;
-        const pfpp = p.fullpathPosix();
-        const fpp = pfpp + (!pfpp || !p.parent ? '' : '/') + this.name;
-        return (this.#fullpathPosix = fpp);
-    }
-    /**
-     * Is the Path of an unknown type?
-     *
-     * Note that we might know *something* about it if there has been a previous
-     * filesystem operation, for example that it does not exist, or is not a
-     * link, or whether it has child entries.
-     */
-    isUnknown() {
-        return (this.#type & IFMT) === UNKNOWN;
-    }
-    isType(type) {
-        return this[`is${type}`]();
-    }
-    getType() {
-        return (this.isUnknown() ? 'Unknown'
-            : this.isDirectory() ? 'Directory'
-                : this.isFile() ? 'File'
-                    : this.isSymbolicLink() ? 'SymbolicLink'
-                        : this.isFIFO() ? 'FIFO'
-                            : this.isCharacterDevice() ? 'CharacterDevice'
-                                : this.isBlockDevice() ? 'BlockDevice'
-                                    : /* c8 ignore start */ this.isSocket() ? 'Socket'
-                                        : 'Unknown');
-        /* c8 ignore stop */
-    }
-    /**
-     * Is the Path a regular file?
-     */
-    isFile() {
-        return (this.#type & IFMT) === IFREG;
-    }
-    /**
-     * Is the Path a directory?
-     */
-    isDirectory() {
-        return (this.#type & IFMT) === IFDIR;
-    }
-    /**
-     * Is the path a character device?
-     */
-    isCharacterDevice() {
-        return (this.#type & IFMT) === IFCHR;
-    }
-    /**
-     * Is the path a block device?
-     */
-    isBlockDevice() {
-        return (this.#type & IFMT) === IFBLK;
-    }
-    /**
-     * Is the path a FIFO pipe?
-     */
-    isFIFO() {
-        return (this.#type & IFMT) === IFIFO;
-    }
-    /**
-     * Is the path a socket?
-     */
-    isSocket() {
-        return (this.#type & IFMT) === IFSOCK;
-    }
-    /**
-     * Is the path a symbolic link?
-     */
-    isSymbolicLink() {
-        return (this.#type & IFLNK) === IFLNK;
-    }
-    /**
-     * Return the entry if it has been subject of a successful lstat, or
-     * undefined otherwise.
-     *
-     * Does not read the filesystem, so an undefined result *could* simply
-     * mean that we haven't called lstat on it.
-     */
-    lstatCached() {
-        return this.#type & LSTAT_CALLED ? this : undefined;
-    }
-    /**
-     * Return the cached link target if the entry has been the subject of a
-     * successful readlink, or undefined otherwise.
-     *
-     * Does not read the filesystem, so an undefined result *could* just mean we
-     * don't have any cached data. Only use it if you are very sure that a
-     * readlink() has been called at some point.
-     */
-    readlinkCached() {
-        return this.#linkTarget;
-    }
-    /**
-     * Returns the cached realpath target if the entry has been the subject
-     * of a successful realpath, or undefined otherwise.
-     *
-     * Does not read the filesystem, so an undefined result *could* just mean we
-     * don't have any cached data. Only use it if you are very sure that a
-     * realpath() has been called at some point.
-     */
-    realpathCached() {
-        return this.#realpath;
-    }
-    /**
-     * Returns the cached child Path entries array if the entry has been the
-     * subject of a successful readdir(), or [] otherwise.
-     *
-     * Does not read the filesystem, so an empty array *could* just mean we
-     * don't have any cached data. Only use it if you are very sure that a
-     * readdir() has been called recently enough to still be valid.
-     */
-    readdirCached() {
-        const children = this.children();
-        return children.slice(0, children.provisional);
-    }
-    /**
-     * Return true if it's worth trying to readlink.  Ie, we don't (yet) have
-     * any indication that readlink will definitely fail.
-     *
-     * Returns false if the path is known to not be a symlink, if a previous
-     * readlink failed, or if the entry does not exist.
-     */
-    canReadlink() {
-        if (this.#linkTarget)
-            return true;
-        if (!this.parent)
-            return false;
-        // cases where it cannot possibly succeed
-        const ifmt = this.#type & IFMT;
-        return !((ifmt !== UNKNOWN && ifmt !== IFLNK) ||
-            this.#type & ENOREADLINK ||
-            this.#type & ENOENT);
-    }
-    /**
-     * Return true if readdir has previously been successfully called on this
-     * path, indicating that cachedReaddir() is likely valid.
-     */
-    calledReaddir() {
-        return !!(this.#type & READDIR_CALLED);
-    }
-    /**
-     * Returns true if the path is known to not exist. That is, a previous lstat
-     * or readdir failed to verify its existence when that would have been
-     * expected, or a parent entry was marked either enoent or enotdir.
-     */
-    isENOENT() {
-        return !!(this.#type & ENOENT);
-    }
-    /**
-     * Return true if the path is a match for the given path name.  This handles
-     * case sensitivity and unicode normalization.
-     *
-     * Note: even on case-sensitive systems, it is **not** safe to test the
-     * equality of the `.name` property to determine whether a given pathname
-     * matches, due to unicode normalization mismatches.
-     *
-     * Always use this method instead of testing the `path.name` property
-     * directly.
-     */
-    isNamed(n) {
-        return !this.nocase ?
-            this.#matchName === normalize(n)
-            : this.#matchName === normalizeNocase(n);
-    }
-    /**
-     * Return the Path object corresponding to the target of a symbolic link.
-     *
-     * If the Path is not a symbolic link, or if the readlink call fails for any
-     * reason, `undefined` is returned.
-     *
-     * Result is cached, and thus may be outdated if the filesystem is mutated.
-     */
-    async readlink() {
-        const target = this.#linkTarget;
-        if (target) {
-            return target;
-        }
-        if (!this.canReadlink()) {
-            return undefined;
-        }
-        /* c8 ignore start */
-        // already covered by the canReadlink test, here for ts grumples
-        if (!this.parent) {
-            return undefined;
-        }
-        /* c8 ignore stop */
-        try {
-            const read = await this.#fs.promises.readlink(this.fullpath());
-            const linkTarget = (await this.parent.realpath())?.resolve(read);
-            if (linkTarget) {
-                return (this.#linkTarget = linkTarget);
-            }
-        }
-        catch (er) {
-            this.#readlinkFail(er.code);
-            return undefined;
-        }
-    }
-    /**
-     * Synchronous {@link PathBase.readlink}
-     */
-    readlinkSync() {
-        const target = this.#linkTarget;
-        if (target) {
-            return target;
-        }
-        if (!this.canReadlink()) {
-            return undefined;
-        }
-        /* c8 ignore start */
-        // already covered by the canReadlink test, here for ts grumples
-        if (!this.parent) {
-            return undefined;
-        }
-        /* c8 ignore stop */
-        try {
-            const read = this.#fs.readlinkSync(this.fullpath());
-            const linkTarget = this.parent.realpathSync()?.resolve(read);
-            if (linkTarget) {
-                return (this.#linkTarget = linkTarget);
-            }
-        }
-        catch (er) {
-            this.#readlinkFail(er.code);
-            return undefined;
-        }
-    }
-    #readdirSuccess(children) {
-        // succeeded, mark readdir called bit
-        this.#type |= READDIR_CALLED;
-        // mark all remaining provisional children as ENOENT
-        for (let p = children.provisional; p < children.length; p++) {
-            const c = children[p];
-            if (c)
-                c.#markENOENT();
-        }
-    }
-    #markENOENT() {
-        // mark as UNKNOWN and ENOENT
-        if (this.#type & ENOENT)
-            return;
-        this.#type = (this.#type | ENOENT) & IFMT_UNKNOWN;
-        this.#markChildrenENOENT();
-    }
-    #markChildrenENOENT() {
-        // all children are provisional and do not exist
-        const children = this.children();
-        children.provisional = 0;
-        for (const p of children) {
-            p.#markENOENT();
-        }
-    }
-    #markENOREALPATH() {
-        this.#type |= ENOREALPATH;
-        this.#markENOTDIR();
-    }
-    // save the information when we know the entry is not a dir
-    #markENOTDIR() {
-        // entry is not a directory, so any children can't exist.
-        // this *should* be impossible, since any children created
-        // after it's been marked ENOTDIR should be marked ENOENT,
-        // so it won't even get to this point.
-        /* c8 ignore start */
-        if (this.#type & ENOTDIR)
-            return;
-        /* c8 ignore stop */
-        let t = this.#type;
-        // this could happen if we stat a dir, then delete it,
-        // then try to read it or one of its children.
-        if ((t & IFMT) === IFDIR)
-            t &= IFMT_UNKNOWN;
-        this.#type = t | ENOTDIR;
-        this.#markChildrenENOENT();
-    }
-    #readdirFail(code = '') {
-        // markENOTDIR and markENOENT also set provisional=0
-        if (code === 'ENOTDIR' || code === 'EPERM') {
-            this.#markENOTDIR();
-        }
-        else if (code === 'ENOENT') {
-            this.#markENOENT();
-        }
-        else {
-            this.children().provisional = 0;
-        }
-    }
-    #lstatFail(code = '') {
-        // Windows just raises ENOENT in this case, disable for win CI
-        /* c8 ignore start */
-        if (code === 'ENOTDIR') {
-            // already know it has a parent by this point
-            const p = this.parent;
-            p.#markENOTDIR();
-        }
-        else if (code === 'ENOENT') {
-            /* c8 ignore stop */
-            this.#markENOENT();
-        }
-    }
-    #readlinkFail(code = '') {
-        let ter = this.#type;
-        ter |= ENOREADLINK;
-        if (code === 'ENOENT')
-            ter |= ENOENT;
-        // windows gets a weird error when you try to readlink a file
-        if (code === 'EINVAL' || code === 'UNKNOWN') {
-            // exists, but not a symlink, we don't know WHAT it is, so remove
-            // all IFMT bits.
-            ter &= IFMT_UNKNOWN;
-        }
-        this.#type = ter;
-        // windows just gets ENOENT in this case.  We do cover the case,
-        // just disabled because it's impossible on Windows CI
-        /* c8 ignore start */
-        if (code === 'ENOTDIR' && this.parent) {
-            this.parent.#markENOTDIR();
-        }
-        /* c8 ignore stop */
-    }
-    #readdirAddChild(e, c) {
-        return (this.#readdirMaybePromoteChild(e, c) ||
-            this.#readdirAddNewChild(e, c));
-    }
-    #readdirAddNewChild(e, c) {
-        // alloc new entry at head, so it's never provisional
-        const type = entToType(e);
-        const child = this.newChild(e.name, type, { parent: this });
-        const ifmt = child.#type & IFMT;
-        if (ifmt !== IFDIR && ifmt !== IFLNK && ifmt !== UNKNOWN) {
-            child.#type |= ENOTDIR;
-        }
-        c.unshift(child);
-        c.provisional++;
-        return child;
-    }
-    #readdirMaybePromoteChild(e, c) {
-        for (let p = c.provisional; p < c.length; p++) {
-            const pchild = c[p];
-            const name = this.nocase ? normalizeNocase(e.name) : normalize(e.name);
-            if (name !== pchild.#matchName) {
-                continue;
-            }
-            return this.#readdirPromoteChild(e, pchild, p, c);
-        }
-    }
-    #readdirPromoteChild(e, p, index, c) {
-        const v = p.name;
-        // retain any other flags, but set ifmt from dirent
-        p.#type = (p.#type & IFMT_UNKNOWN) | entToType(e);
-        // case sensitivity fixing when we learn the true name.
-        if (v !== e.name)
-            p.name = e.name;
-        // just advance provisional index (potentially off the list),
-        // otherwise we have to splice/pop it out and re-insert at head
-        if (index !== c.provisional) {
-            if (index === c.length - 1)
-                c.pop();
-            else
-                c.splice(index, 1);
-            c.unshift(p);
-        }
-        c.provisional++;
-        return p;
-    }
-    /**
-     * Call lstat() on this Path, and update all known information that can be
-     * determined.
-     *
-     * Note that unlike `fs.lstat()`, the returned value does not contain some
-     * information, such as `mode`, `dev`, `nlink`, and `ino`.  If that
-     * information is required, you will need to call `fs.lstat` yourself.
-     *
-     * If the Path refers to a nonexistent file, or if the lstat call fails for
-     * any reason, `undefined` is returned.  Otherwise the updated Path object is
-     * returned.
-     *
-     * Results are cached, and thus may be out of date if the filesystem is
-     * mutated.
-     */
-    async lstat() {
-        if ((this.#type & ENOENT) === 0) {
-            try {
-                this.#applyStat(await this.#fs.promises.lstat(this.fullpath()));
-                return this;
-            }
-            catch (er) {
-                this.#lstatFail(er.code);
-            }
-        }
-    }
-    /**
-     * synchronous {@link PathBase.lstat}
-     */
-    lstatSync() {
-        if ((this.#type & ENOENT) === 0) {
-            try {
-                this.#applyStat(this.#fs.lstatSync(this.fullpath()));
-                return this;
-            }
-            catch (er) {
-                this.#lstatFail(er.code);
-            }
-        }
-    }
-    #applyStat(st) {
-        const { atime, atimeMs, birthtime, birthtimeMs, blksize, blocks, ctime, ctimeMs, dev, gid, ino, mode, mtime, mtimeMs, nlink, rdev, size, uid, } = st;
-        this.#atime = atime;
-        this.#atimeMs = atimeMs;
-        this.#birthtime = birthtime;
-        this.#birthtimeMs = birthtimeMs;
-        this.#blksize = blksize;
-        this.#blocks = blocks;
-        this.#ctime = ctime;
-        this.#ctimeMs = ctimeMs;
-        this.#dev = dev;
-        this.#gid = gid;
-        this.#ino = ino;
-        this.#mode = mode;
-        this.#mtime = mtime;
-        this.#mtimeMs = mtimeMs;
-        this.#nlink = nlink;
-        this.#rdev = rdev;
-        this.#size = size;
-        this.#uid = uid;
-        const ifmt = entToType(st);
-        // retain any other flags, but set the ifmt
-        this.#type = (this.#type & IFMT_UNKNOWN) | ifmt | LSTAT_CALLED;
-        if (ifmt !== UNKNOWN && ifmt !== IFDIR && ifmt !== IFLNK) {
-            this.#type |= ENOTDIR;
-        }
-    }
-    #onReaddirCB = [];
-    #readdirCBInFlight = false;
-    #callOnReaddirCB(children) {
-        this.#readdirCBInFlight = false;
-        const cbs = this.#onReaddirCB.slice();
-        this.#onReaddirCB.length = 0;
-        cbs.forEach(cb => cb(null, children));
-    }
-    /**
-     * Standard node-style callback interface to get list of directory entries.
-     *
-     * If the Path cannot or does not contain any children, then an empty array
-     * is returned.
-     *
-     * Results are cached, and thus may be out of date if the filesystem is
-     * mutated.
-     *
-     * @param cb The callback called with (er, entries).  Note that the `er`
-     * param is somewhat extraneous, as all readdir() errors are handled and
-     * simply result in an empty set of entries being returned.
-     * @param allowZalgo Boolean indicating that immediately known results should
-     * *not* be deferred with `queueMicrotask`. Defaults to `false`. Release
-     * zalgo at your peril, the dark pony lord is devious and unforgiving.
-     */
-    readdirCB(cb, allowZalgo = false) {
-        if (!this.canReaddir()) {
-            if (allowZalgo)
-                cb(null, []);
-            else
-                queueMicrotask(() => cb(null, []));
-            return;
-        }
-        const children = this.children();
-        if (this.calledReaddir()) {
-            const c = children.slice(0, children.provisional);
-            if (allowZalgo)
-                cb(null, c);
-            else
-                queueMicrotask(() => cb(null, c));
-            return;
-        }
-        // don't have to worry about zalgo at this point.
-        this.#onReaddirCB.push(cb);
-        if (this.#readdirCBInFlight) {
-            return;
-        }
-        this.#readdirCBInFlight = true;
-        // else read the directory, fill up children
-        // de-provisionalize any provisional children.
-        const fullpath = this.fullpath();
-        this.#fs.readdir(fullpath, { withFileTypes: true }, (er, entries) => {
-            if (er) {
-                this.#readdirFail(er.code);
-                children.provisional = 0;
-            }
-            else {
-                // if we didn't get an error, we always get entries.
-                //@ts-ignore
-                for (const e of entries) {
-                    this.#readdirAddChild(e, children);
-                }
-                this.#readdirSuccess(children);
-            }
-            this.#callOnReaddirCB(children.slice(0, children.provisional));
-            return;
-        });
-    }
-    #asyncReaddirInFlight;
-    /**
-     * Return an array of known child entries.
-     *
-     * If the Path cannot or does not contain any children, then an empty array
-     * is returned.
-     *
-     * Results are cached, and thus may be out of date if the filesystem is
-     * mutated.
-     */
-    async readdir() {
-        if (!this.canReaddir()) {
-            return [];
-        }
-        const children = this.children();
-        if (this.calledReaddir()) {
-            return children.slice(0, children.provisional);
-        }
-        // else read the directory, fill up children
-        // de-provisionalize any provisional children.
-        const fullpath = this.fullpath();
-        if (this.#asyncReaddirInFlight) {
-            await this.#asyncReaddirInFlight;
-        }
-        else {
-            /* c8 ignore start */
-            let resolve = () => { };
-            /* c8 ignore stop */
-            this.#asyncReaddirInFlight = new Promise(res => (resolve = res));
-            try {
-                for (const e of await this.#fs.promises.readdir(fullpath, {
-                    withFileTypes: true,
-                })) {
-                    this.#readdirAddChild(e, children);
-                }
-                this.#readdirSuccess(children);
-            }
-            catch (er) {
-                this.#readdirFail(er.code);
-                children.provisional = 0;
-            }
-            this.#asyncReaddirInFlight = undefined;
-            resolve();
-        }
-        return children.slice(0, children.provisional);
-    }
-    /**
-     * synchronous {@link PathBase.readdir}
-     */
-    readdirSync() {
-        if (!this.canReaddir()) {
-            return [];
-        }
-        const children = this.children();
-        if (this.calledReaddir()) {
-            return children.slice(0, children.provisional);
-        }
-        // else read the directory, fill up children
-        // de-provisionalize any provisional children.
-        const fullpath = this.fullpath();
-        try {
-            for (const e of this.#fs.readdirSync(fullpath, {
-                withFileTypes: true,
-            })) {
-                this.#readdirAddChild(e, children);
-            }
-            this.#readdirSuccess(children);
-        }
-        catch (er) {
-            this.#readdirFail(er.code);
-            children.provisional = 0;
-        }
-        return children.slice(0, children.provisional);
-    }
-    canReaddir() {
-        if (this.#type & ENOCHILD)
-            return false;
-        const ifmt = IFMT & this.#type;
-        // we always set ENOTDIR when setting IFMT, so should be impossible
-        /* c8 ignore start */
-        if (!(ifmt === UNKNOWN || ifmt === IFDIR || ifmt === IFLNK)) {
-            return false;
-        }
-        /* c8 ignore stop */
-        return true;
-    }
-    shouldWalk(dirs, walkFilter) {
-        return ((this.#type & IFDIR) === IFDIR &&
-            !(this.#type & ENOCHILD) &&
-            !dirs.has(this) &&
-            (!walkFilter || walkFilter(this)));
-    }
-    /**
-     * Return the Path object corresponding to path as resolved
-     * by realpath(3).
-     *
-     * If the realpath call fails for any reason, `undefined` is returned.
-     *
-     * Result is cached, and thus may be outdated if the filesystem is mutated.
-     * On success, returns a Path object.
-     */
-    async realpath() {
-        if (this.#realpath)
-            return this.#realpath;
-        if ((ENOREALPATH | ENOREADLINK | ENOENT) & this.#type)
-            return undefined;
-        try {
-            const rp = await this.#fs.promises.realpath(this.fullpath());
-            return (this.#realpath = this.resolve(rp));
-        }
-        catch (_) {
-            this.#markENOREALPATH();
-        }
-    }
-    /**
-     * Synchronous {@link realpath}
-     */
-    realpathSync() {
-        if (this.#realpath)
-            return this.#realpath;
-        if ((ENOREALPATH | ENOREADLINK | ENOENT) & this.#type)
-            return undefined;
-        try {
-            const rp = this.#fs.realpathSync(this.fullpath());
-            return (this.#realpath = this.resolve(rp));
-        }
-        catch (_) {
-            this.#markENOREALPATH();
-        }
-    }
-    /**
-     * Internal method to mark this Path object as the scurry cwd,
-     * called by {@link PathScurry#chdir}
-     *
-     * @internal
-     */
-    [setAsCwd](oldCwd) {
-        if (oldCwd === this)
-            return;
-        oldCwd.isCWD = false;
-        this.isCWD = true;
-        const changed = new Set([]);
-        let rp = [];
-        let p = this;
-        while (p && p.parent) {
-            changed.add(p);
-            p.#relative = rp.join(this.sep);
-            p.#relativePosix = rp.join('/');
-            p = p.parent;
-            rp.push('..');
-        }
-        // now un-memoize parents of old cwd
-        p = oldCwd;
-        while (p && p.parent && !changed.has(p)) {
-            p.#relative = undefined;
-            p.#relativePosix = undefined;
-            p = p.parent;
-        }
-    }
-}
-exports.PathBase = PathBase;
-/**
- * Path class used on win32 systems
- *
- * Uses `'\\'` as the path separator for returned paths, either `'\\'` or `'/'`
- * as the path separator for parsing paths.
- */
-class PathWin32 extends PathBase {
-    /**
-     * Separator for generating path strings.
-     */
-    sep = '\\';
-    /**
-     * Separator for parsing path strings.
-     */
-    splitSep = eitherSep;
-    /**
-     * Do not create new Path objects directly.  They should always be accessed
-     * via the PathScurry class or other methods on the Path class.
-     *
-     * @internal
-     */
-    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
-        super(name, type, root, roots, nocase, children, opts);
-    }
-    /**
-     * @internal
-     */
-    newChild(name, type = UNKNOWN, opts = {}) {
-        return new PathWin32(name, type, this.root, this.roots, this.nocase, this.childrenCache(), opts);
-    }
-    /**
-     * @internal
-     */
-    getRootString(path) {
-        return node_path_1.win32.parse(path).root;
-    }
-    /**
-     * @internal
-     */
-    getRoot(rootPath) {
-        rootPath = uncToDrive(rootPath.toUpperCase());
-        if (rootPath === this.root.name) {
-            return this.root;
-        }
-        // ok, not that one, check if it matches another we know about
-        for (const [compare, root] of Object.entries(this.roots)) {
-            if (this.sameRoot(rootPath, compare)) {
-                return (this.roots[rootPath] = root);
-            }
-        }
-        // otherwise, have to create a new one.
-        return (this.roots[rootPath] = new PathScurryWin32(rootPath, this).root);
-    }
-    /**
-     * @internal
-     */
-    sameRoot(rootPath, compare = this.root.name) {
-        // windows can (rarely) have case-sensitive filesystem, but
-        // UNC and drive letters are always case-insensitive, and canonically
-        // represented uppercase.
-        rootPath = rootPath
-            .toUpperCase()
-            .replace(/\//g, '\\')
-            .replace(uncDriveRegexp, '$1\\');
-        return rootPath === compare;
-    }
-}
-exports.PathWin32 = PathWin32;
-/**
- * Path class used on all posix systems.
- *
- * Uses `'/'` as the path separator.
- */
-class PathPosix extends PathBase {
-    /**
-     * separator for parsing path strings
-     */
-    splitSep = '/';
-    /**
-     * separator for generating path strings
-     */
-    sep = '/';
-    /**
-     * Do not create new Path objects directly.  They should always be accessed
-     * via the PathScurry class or other methods on the Path class.
-     *
-     * @internal
-     */
-    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
-        super(name, type, root, roots, nocase, children, opts);
-    }
-    /**
-     * @internal
-     */
-    getRootString(path) {
-        return path.startsWith('/') ? '/' : '';
-    }
-    /**
-     * @internal
-     */
-    getRoot(_rootPath) {
-        return this.root;
-    }
-    /**
-     * @internal
-     */
-    newChild(name, type = UNKNOWN, opts = {}) {
-        return new PathPosix(name, type, this.root, this.roots, this.nocase, this.childrenCache(), opts);
-    }
-}
-exports.PathPosix = PathPosix;
-/**
- * The base class for all PathScurry classes, providing the interface for path
- * resolution and filesystem operations.
- *
- * Typically, you should *not* instantiate this class directly, but rather one
- * of the platform-specific classes, or the exported {@link PathScurry} which
- * defaults to the current platform.
- */
-class PathScurryBase {
-    /**
-     * The root Path entry for the current working directory of this Scurry
-     */
-    root;
-    /**
-     * The string path for the root of this Scurry's current working directory
-     */
-    rootPath;
-    /**
-     * A collection of all roots encountered, referenced by rootPath
-     */
-    roots;
-    /**
-     * The Path entry corresponding to this PathScurry's current working directory.
-     */
-    cwd;
-    #resolveCache;
-    #resolvePosixCache;
-    #children;
-    /**
-     * Perform path comparisons case-insensitively.
-     *
-     * Defaults true on Darwin and Windows systems, false elsewhere.
-     */
-    nocase;
-    #fs;
-    /**
-     * This class should not be instantiated directly.
-     *
-     * Use PathScurryWin32, PathScurryDarwin, PathScurryPosix, or PathScurry
-     *
-     * @internal
-     */
-    constructor(cwd = process.cwd(), pathImpl, sep, { nocase, childrenCacheSize = 16 * 1024, fs = defaultFS, } = {}) {
-        this.#fs = fsFromOption(fs);
-        if (cwd instanceof URL || cwd.startsWith('file://')) {
-            cwd = (0, node_url_1.fileURLToPath)(cwd);
-        }
-        // resolve and split root, and then add to the store.
-        // this is the only time we call path.resolve()
-        const cwdPath = pathImpl.resolve(cwd);
-        this.roots = Object.create(null);
-        this.rootPath = this.parseRootPath(cwdPath);
-        this.#resolveCache = new ResolveCache();
-        this.#resolvePosixCache = new ResolveCache();
-        this.#children = new ChildrenCache(childrenCacheSize);
-        const split = cwdPath.substring(this.rootPath.length).split(sep);
-        // resolve('/') leaves '', splits to [''], we don't want that.
-        if (split.length === 1 && !split[0]) {
-            split.pop();
-        }
-        /* c8 ignore start */
-        if (nocase === undefined) {
-            throw new TypeError('must provide nocase setting to PathScurryBase ctor');
-        }
-        /* c8 ignore stop */
-        this.nocase = nocase;
-        this.root = this.newRoot(this.#fs);
-        this.roots[this.rootPath] = this.root;
-        let prev = this.root;
-        let len = split.length - 1;
-        const joinSep = pathImpl.sep;
-        let abs = this.rootPath;
-        let sawFirst = false;
-        for (const part of split) {
-            const l = len--;
-            prev = prev.child(part, {
-                relative: new Array(l).fill('..').join(joinSep),
-                relativePosix: new Array(l).fill('..').join('/'),
-                fullpath: (abs += (sawFirst ? '' : joinSep) + part),
-            });
-            sawFirst = true;
-        }
-        this.cwd = prev;
-    }
-    /**
-     * Get the depth of a provided path, string, or the cwd
-     */
-    depth(path = this.cwd) {
-        if (typeof path === 'string') {
-            path = this.cwd.resolve(path);
-        }
-        return path.depth();
-    }
-    /**
-     * Return the cache of child entries.  Exposed so subclasses can create
-     * child Path objects in a platform-specific way.
-     *
-     * @internal
-     */
-    childrenCache() {
-        return this.#children;
-    }
-    /**
-     * Resolve one or more path strings to a resolved string
-     *
-     * Same interface as require('path').resolve.
-     *
-     * Much faster than path.resolve() when called multiple times for the same
-     * path, because the resolved Path objects are cached.  Much slower
-     * otherwise.
-     */
-    resolve(...paths) {
-        // first figure out the minimum number of paths we have to test
-        // we always start at cwd, but any absolutes will bump the start
-        let r = '';
-        for (let i = paths.length - 1; i >= 0; i--) {
-            const p = paths[i];
-            if (!p || p === '.')
-                continue;
-            r = r ? `${p}/${r}` : p;
-            if (this.isAbsolute(p)) {
-                break;
-            }
-        }
-        const cached = this.#resolveCache.get(r);
-        if (cached !== undefined) {
-            return cached;
-        }
-        const result = this.cwd.resolve(r).fullpath();
-        this.#resolveCache.set(r, result);
-        return result;
-    }
-    /**
-     * Resolve one or more path strings to a resolved string, returning
-     * the posix path.  Identical to .resolve() on posix systems, but on
-     * windows will return a forward-slash separated UNC path.
-     *
-     * Same interface as require('path').resolve.
-     *
-     * Much faster than path.resolve() when called multiple times for the same
-     * path, because the resolved Path objects are cached.  Much slower
-     * otherwise.
-     */
-    resolvePosix(...paths) {
-        // first figure out the minimum number of paths we have to test
-        // we always start at cwd, but any absolutes will bump the start
-        let r = '';
-        for (let i = paths.length - 1; i >= 0; i--) {
-            const p = paths[i];
-            if (!p || p === '.')
-                continue;
-            r = r ? `${p}/${r}` : p;
-            if (this.isAbsolute(p)) {
-                break;
-            }
-        }
-        const cached = this.#resolvePosixCache.get(r);
-        if (cached !== undefined) {
-            return cached;
-        }
-        const result = this.cwd.resolve(r).fullpathPosix();
-        this.#resolvePosixCache.set(r, result);
-        return result;
-    }
-    /**
-     * find the relative path from the cwd to the supplied path string or entry
-     */
-    relative(entry = this.cwd) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        return entry.relative();
-    }
-    /**
-     * find the relative path from the cwd to the supplied path string or
-     * entry, using / as the path delimiter, even on Windows.
-     */
-    relativePosix(entry = this.cwd) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        return entry.relativePosix();
-    }
-    /**
-     * Return the basename for the provided string or Path object
-     */
-    basename(entry = this.cwd) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        return entry.name;
-    }
-    /**
-     * Return the dirname for the provided string or Path object
-     */
-    dirname(entry = this.cwd) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        return (entry.parent || entry).fullpath();
-    }
-    async readdir(entry = this.cwd, opts = {
-        withFileTypes: true,
-    }) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes } = opts;
-        if (!entry.canReaddir()) {
-            return [];
-        }
-        else {
-            const p = await entry.readdir();
-            return withFileTypes ? p : p.map(e => e.name);
-        }
-    }
-    readdirSync(entry = this.cwd, opts = {
-        withFileTypes: true,
-    }) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes = true } = opts;
-        if (!entry.canReaddir()) {
-            return [];
-        }
-        else if (withFileTypes) {
-            return entry.readdirSync();
-        }
-        else {
-            return entry.readdirSync().map(e => e.name);
-        }
-    }
-    /**
-     * Call lstat() on the string or Path object, and update all known
-     * information that can be determined.
-     *
-     * Note that unlike `fs.lstat()`, the returned value does not contain some
-     * information, such as `mode`, `dev`, `nlink`, and `ino`.  If that
-     * information is required, you will need to call `fs.lstat` yourself.
-     *
-     * If the Path refers to a nonexistent file, or if the lstat call fails for
-     * any reason, `undefined` is returned.  Otherwise the updated Path object is
-     * returned.
-     *
-     * Results are cached, and thus may be out of date if the filesystem is
-     * mutated.
-     */
-    async lstat(entry = this.cwd) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        return entry.lstat();
-    }
-    /**
-     * synchronous {@link PathScurryBase.lstat}
-     */
-    lstatSync(entry = this.cwd) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        return entry.lstatSync();
-    }
-    async readlink(entry = this.cwd, { withFileTypes } = {
-        withFileTypes: false,
-    }) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            withFileTypes = entry.withFileTypes;
-            entry = this.cwd;
-        }
-        const e = await entry.readlink();
-        return withFileTypes ? e : e?.fullpath();
-    }
-    readlinkSync(entry = this.cwd, { withFileTypes } = {
-        withFileTypes: false,
-    }) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            withFileTypes = entry.withFileTypes;
-            entry = this.cwd;
-        }
-        const e = entry.readlinkSync();
-        return withFileTypes ? e : e?.fullpath();
-    }
-    async realpath(entry = this.cwd, { withFileTypes } = {
-        withFileTypes: false,
-    }) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            withFileTypes = entry.withFileTypes;
-            entry = this.cwd;
-        }
-        const e = await entry.realpath();
-        return withFileTypes ? e : e?.fullpath();
-    }
-    realpathSync(entry = this.cwd, { withFileTypes } = {
-        withFileTypes: false,
-    }) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            withFileTypes = entry.withFileTypes;
-            entry = this.cwd;
-        }
-        const e = entry.realpathSync();
-        return withFileTypes ? e : e?.fullpath();
-    }
-    async walk(entry = this.cwd, opts = {}) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
-        const results = [];
-        if (!filter || filter(entry)) {
-            results.push(withFileTypes ? entry : entry.fullpath());
-        }
-        const dirs = new Set();
-        const walk = (dir, cb) => {
-            dirs.add(dir);
-            dir.readdirCB((er, entries) => {
-                /* c8 ignore start */
-                if (er) {
-                    return cb(er);
-                }
-                /* c8 ignore stop */
-                let len = entries.length;
-                if (!len)
-                    return cb();
-                const next = () => {
-                    if (--len === 0) {
-                        cb();
-                    }
-                };
-                for (const e of entries) {
-                    if (!filter || filter(e)) {
-                        results.push(withFileTypes ? e : e.fullpath());
-                    }
-                    if (follow && e.isSymbolicLink()) {
-                        e.realpath()
-                            .then(r => (r?.isUnknown() ? r.lstat() : r))
-                            .then(r => r?.shouldWalk(dirs, walkFilter) ? walk(r, next) : next());
-                    }
-                    else {
-                        if (e.shouldWalk(dirs, walkFilter)) {
-                            walk(e, next);
-                        }
-                        else {
-                            next();
-                        }
-                    }
-                }
-            }, true); // zalgooooooo
-        };
-        const start = entry;
-        return new Promise((res, rej) => {
-            walk(start, er => {
-                /* c8 ignore start */
-                if (er)
-                    return rej(er);
-                /* c8 ignore stop */
-                res(results);
-            });
-        });
-    }
-    walkSync(entry = this.cwd, opts = {}) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
-        const results = [];
-        if (!filter || filter(entry)) {
-            results.push(withFileTypes ? entry : entry.fullpath());
-        }
-        const dirs = new Set([entry]);
-        for (const dir of dirs) {
-            const entries = dir.readdirSync();
-            for (const e of entries) {
-                if (!filter || filter(e)) {
-                    results.push(withFileTypes ? e : e.fullpath());
-                }
-                let r = e;
-                if (e.isSymbolicLink()) {
-                    if (!(follow && (r = e.realpathSync())))
-                        continue;
-                    if (r.isUnknown())
-                        r.lstatSync();
-                }
-                if (r.shouldWalk(dirs, walkFilter)) {
-                    dirs.add(r);
-                }
-            }
-        }
-        return results;
-    }
-    /**
-     * Support for `for await`
-     *
-     * Alias for {@link PathScurryBase.iterate}
-     *
-     * Note: As of Node 19, this is very slow, compared to other methods of
-     * walking.  Consider using {@link PathScurryBase.stream} if memory overhead
-     * and backpressure are concerns, or {@link PathScurryBase.walk} if not.
-     */
-    [Symbol.asyncIterator]() {
-        return this.iterate();
-    }
-    iterate(entry = this.cwd, options = {}) {
-        // iterating async over the stream is significantly more performant,
-        // especially in the warm-cache scenario, because it buffers up directory
-        // entries in the background instead of waiting for a yield for each one.
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            options = entry;
-            entry = this.cwd;
-        }
-        return this.stream(entry, options)[Symbol.asyncIterator]();
-    }
-    /**
-     * Iterating over a PathScurry performs a synchronous walk.
-     *
-     * Alias for {@link PathScurryBase.iterateSync}
-     */
-    [Symbol.iterator]() {
-        return this.iterateSync();
-    }
-    *iterateSync(entry = this.cwd, opts = {}) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
-        if (!filter || filter(entry)) {
-            yield withFileTypes ? entry : entry.fullpath();
-        }
-        const dirs = new Set([entry]);
-        for (const dir of dirs) {
-            const entries = dir.readdirSync();
-            for (const e of entries) {
-                if (!filter || filter(e)) {
-                    yield withFileTypes ? e : e.fullpath();
-                }
-                let r = e;
-                if (e.isSymbolicLink()) {
-                    if (!(follow && (r = e.realpathSync())))
-                        continue;
-                    if (r.isUnknown())
-                        r.lstatSync();
-                }
-                if (r.shouldWalk(dirs, walkFilter)) {
-                    dirs.add(r);
-                }
-            }
-        }
-    }
-    stream(entry = this.cwd, opts = {}) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
-        const results = new minipass_1.Minipass({ objectMode: true });
-        if (!filter || filter(entry)) {
-            results.write(withFileTypes ? entry : entry.fullpath());
-        }
-        const dirs = new Set();
-        const queue = [entry];
-        let processing = 0;
-        const process = () => {
-            let paused = false;
-            while (!paused) {
-                const dir = queue.shift();
-                if (!dir) {
-                    if (processing === 0)
-                        results.end();
-                    return;
-                }
-                processing++;
-                dirs.add(dir);
-                const onReaddir = (er, entries, didRealpaths = false) => {
-                    /* c8 ignore start */
-                    if (er)
-                        return results.emit('error', er);
-                    /* c8 ignore stop */
-                    if (follow && !didRealpaths) {
-                        const promises = [];
-                        for (const e of entries) {
-                            if (e.isSymbolicLink()) {
-                                promises.push(e
-                                    .realpath()
-                                    .then((r) => r?.isUnknown() ? r.lstat() : r));
-                            }
-                        }
-                        if (promises.length) {
-                            Promise.all(promises).then(() => onReaddir(null, entries, true));
-                            return;
-                        }
-                    }
-                    for (const e of entries) {
-                        if (e && (!filter || filter(e))) {
-                            if (!results.write(withFileTypes ? e : e.fullpath())) {
-                                paused = true;
-                            }
-                        }
-                    }
-                    processing--;
-                    for (const e of entries) {
-                        const r = e.realpathCached() || e;
-                        if (r.shouldWalk(dirs, walkFilter)) {
-                            queue.push(r);
-                        }
-                    }
-                    if (paused && !results.flowing) {
-                        results.once('drain', process);
-                    }
-                    else if (!sync) {
-                        process();
-                    }
-                };
-                // zalgo containment
-                let sync = true;
-                dir.readdirCB(onReaddir, true);
-                sync = false;
-            }
-        };
-        process();
-        return results;
-    }
-    streamSync(entry = this.cwd, opts = {}) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
-        const results = new minipass_1.Minipass({ objectMode: true });
-        const dirs = new Set();
-        if (!filter || filter(entry)) {
-            results.write(withFileTypes ? entry : entry.fullpath());
-        }
-        const queue = [entry];
-        let processing = 0;
-        const process = () => {
-            let paused = false;
-            while (!paused) {
-                const dir = queue.shift();
-                if (!dir) {
-                    if (processing === 0)
-                        results.end();
-                    return;
-                }
-                processing++;
-                dirs.add(dir);
-                const entries = dir.readdirSync();
-                for (const e of entries) {
-                    if (!filter || filter(e)) {
-                        if (!results.write(withFileTypes ? e : e.fullpath())) {
-                            paused = true;
-                        }
-                    }
-                }
-                processing--;
-                for (const e of entries) {
-                    let r = e;
-                    if (e.isSymbolicLink()) {
-                        if (!(follow && (r = e.realpathSync())))
-                            continue;
-                        if (r.isUnknown())
-                            r.lstatSync();
-                    }
-                    if (r.shouldWalk(dirs, walkFilter)) {
-                        queue.push(r);
-                    }
-                }
-            }
-            if (paused && !results.flowing)
-                results.once('drain', process);
-        };
-        process();
-        return results;
-    }
-    chdir(path = this.cwd) {
-        const oldCwd = this.cwd;
-        this.cwd = typeof path === 'string' ? this.cwd.resolve(path) : path;
-        this.cwd[setAsCwd](oldCwd);
-    }
-}
-exports.PathScurryBase = PathScurryBase;
-/**
- * Windows implementation of {@link PathScurryBase}
- *
- * Defaults to case insensitve, uses `'\\'` to generate path strings.  Uses
- * {@link PathWin32} for Path objects.
- */
-class PathScurryWin32 extends PathScurryBase {
-    /**
-     * separator for generating path strings
-     */
-    sep = '\\';
-    constructor(cwd = process.cwd(), opts = {}) {
-        const { nocase = true } = opts;
-        super(cwd, node_path_1.win32, '\\', { ...opts, nocase });
-        this.nocase = nocase;
-        for (let p = this.cwd; p; p = p.parent) {
-            p.nocase = this.nocase;
-        }
-    }
-    /**
-     * @internal
-     */
-    parseRootPath(dir) {
-        // if the path starts with a single separator, it's not a UNC, and we'll
-        // just get separator as the root, and driveFromUNC will return \
-        // In that case, mount \ on the root from the cwd.
-        return node_path_1.win32.parse(dir).root.toUpperCase();
-    }
-    /**
-     * @internal
-     */
-    newRoot(fs) {
-        return new PathWin32(this.rootPath, IFDIR, undefined, this.roots, this.nocase, this.childrenCache(), { fs });
-    }
-    /**
-     * Return true if the provided path string is an absolute path
-     */
-    isAbsolute(p) {
-        return (p.startsWith('/') || p.startsWith('\\') || /^[a-z]:(\/|\\)/i.test(p));
-    }
-}
-exports.PathScurryWin32 = PathScurryWin32;
-/**
- * {@link PathScurryBase} implementation for all posix systems other than Darwin.
- *
- * Defaults to case-sensitive matching, uses `'/'` to generate path strings.
- *
- * Uses {@link PathPosix} for Path objects.
- */
-class PathScurryPosix extends PathScurryBase {
-    /**
-     * separator for generating path strings
-     */
-    sep = '/';
-    constructor(cwd = process.cwd(), opts = {}) {
-        const { nocase = false } = opts;
-        super(cwd, node_path_1.posix, '/', { ...opts, nocase });
-        this.nocase = nocase;
-    }
-    /**
-     * @internal
-     */
-    parseRootPath(_dir) {
-        return '/';
-    }
-    /**
-     * @internal
-     */
-    newRoot(fs) {
-        return new PathPosix(this.rootPath, IFDIR, undefined, this.roots, this.nocase, this.childrenCache(), { fs });
-    }
-    /**
-     * Return true if the provided path string is an absolute path
-     */
-    isAbsolute(p) {
-        return p.startsWith('/');
-    }
-}
-exports.PathScurryPosix = PathScurryPosix;
-/**
- * {@link PathScurryBase} implementation for Darwin (macOS) systems.
- *
- * Defaults to case-insensitive matching, uses `'/'` for generating path
- * strings.
- *
- * Uses {@link PathPosix} for Path objects.
- */
-class PathScurryDarwin extends PathScurryPosix {
-    constructor(cwd = process.cwd(), opts = {}) {
-        const { nocase = true } = opts;
-        super(cwd, { ...opts, nocase });
-    }
-}
-exports.PathScurryDarwin = PathScurryDarwin;
-/**
- * Default {@link PathBase} implementation for the current platform.
- *
- * {@link PathWin32} on Windows systems, {@link PathPosix} on all others.
- */
-exports.Path = process.platform === 'win32' ? PathWin32 : PathPosix;
-/**
- * Default {@link PathScurryBase} implementation for the current platform.
- *
- * {@link PathScurryWin32} on Windows systems, {@link PathScurryDarwin} on
- * Darwin (macOS) systems, {@link PathScurryPosix} on all others.
- */
-exports.PathScurry = process.platform === 'win32' ? PathScurryWin32
-    : process.platform === 'darwin' ? PathScurryDarwin
-        : PathScurryPosix;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/path-scurry/dist/commonjs/package.json b/node_modules/node-gyp/node_modules/path-scurry/dist/commonjs/package.json
deleted file mode 100644
index 5bbefffbabee3..0000000000000
--- a/node_modules/node-gyp/node_modules/path-scurry/dist/commonjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "commonjs"
-}
diff --git a/node_modules/node-gyp/node_modules/path-scurry/dist/esm/index.js b/node_modules/node-gyp/node_modules/path-scurry/dist/esm/index.js
deleted file mode 100644
index 3b11b819faece..0000000000000
--- a/node_modules/node-gyp/node_modules/path-scurry/dist/esm/index.js
+++ /dev/null
@@ -1,1979 +0,0 @@
-import { LRUCache } from 'lru-cache';
-import { posix, win32 } from 'node:path';
-import { fileURLToPath } from 'node:url';
-import { lstatSync, readdir as readdirCB, readdirSync, readlinkSync, realpathSync as rps, } from 'fs';
-import * as actualFS from 'node:fs';
-const realpathSync = rps.native;
-// TODO: test perf of fs/promises realpath vs realpathCB,
-// since the promises one uses realpath.native
-import { lstat, readdir, readlink, realpath } from 'node:fs/promises';
-import { Minipass } from 'minipass';
-const defaultFS = {
-    lstatSync,
-    readdir: readdirCB,
-    readdirSync,
-    readlinkSync,
-    realpathSync,
-    promises: {
-        lstat,
-        readdir,
-        readlink,
-        realpath,
-    },
-};
-// if they just gave us require('fs') then use our default
-const fsFromOption = (fsOption) => !fsOption || fsOption === defaultFS || fsOption === actualFS ?
-    defaultFS
-    : {
-        ...defaultFS,
-        ...fsOption,
-        promises: {
-            ...defaultFS.promises,
-            ...(fsOption.promises || {}),
-        },
-    };
-// turn something like //?/c:/ into c:\
-const uncDriveRegexp = /^\\\\\?\\([a-z]:)\\?$/i;
-const uncToDrive = (rootPath) => rootPath.replace(/\//g, '\\').replace(uncDriveRegexp, '$1\\');
-// windows paths are separated by either / or \
-const eitherSep = /[\\\/]/;
-const UNKNOWN = 0; // may not even exist, for all we know
-const IFIFO = 0b0001;
-const IFCHR = 0b0010;
-const IFDIR = 0b0100;
-const IFBLK = 0b0110;
-const IFREG = 0b1000;
-const IFLNK = 0b1010;
-const IFSOCK = 0b1100;
-const IFMT = 0b1111;
-// mask to unset low 4 bits
-const IFMT_UNKNOWN = ~IFMT;
-// set after successfully calling readdir() and getting entries.
-const READDIR_CALLED = 0b0000_0001_0000;
-// set after a successful lstat()
-const LSTAT_CALLED = 0b0000_0010_0000;
-// set if an entry (or one of its parents) is definitely not a dir
-const ENOTDIR = 0b0000_0100_0000;
-// set if an entry (or one of its parents) does not exist
-// (can also be set on lstat errors like EACCES or ENAMETOOLONG)
-const ENOENT = 0b0000_1000_0000;
-// cannot have child entries -- also verify &IFMT is either IFDIR or IFLNK
-// set if we fail to readlink
-const ENOREADLINK = 0b0001_0000_0000;
-// set if we know realpath() will fail
-const ENOREALPATH = 0b0010_0000_0000;
-const ENOCHILD = ENOTDIR | ENOENT | ENOREALPATH;
-const TYPEMASK = 0b0011_1111_1111;
-const entToType = (s) => s.isFile() ? IFREG
-    : s.isDirectory() ? IFDIR
-        : s.isSymbolicLink() ? IFLNK
-            : s.isCharacterDevice() ? IFCHR
-                : s.isBlockDevice() ? IFBLK
-                    : s.isSocket() ? IFSOCK
-                        : s.isFIFO() ? IFIFO
-                            : UNKNOWN;
-// normalize unicode path names
-const normalizeCache = new Map();
-const normalize = (s) => {
-    const c = normalizeCache.get(s);
-    if (c)
-        return c;
-    const n = s.normalize('NFKD');
-    normalizeCache.set(s, n);
-    return n;
-};
-const normalizeNocaseCache = new Map();
-const normalizeNocase = (s) => {
-    const c = normalizeNocaseCache.get(s);
-    if (c)
-        return c;
-    const n = normalize(s.toLowerCase());
-    normalizeNocaseCache.set(s, n);
-    return n;
-};
-/**
- * An LRUCache for storing resolved path strings or Path objects.
- * @internal
- */
-export class ResolveCache extends LRUCache {
-    constructor() {
-        super({ max: 256 });
-    }
-}
-// In order to prevent blowing out the js heap by allocating hundreds of
-// thousands of Path entries when walking extremely large trees, the "children"
-// in this tree are represented by storing an array of Path entries in an
-// LRUCache, indexed by the parent.  At any time, Path.children() may return an
-// empty array, indicating that it doesn't know about any of its children, and
-// thus has to rebuild that cache.  This is fine, it just means that we don't
-// benefit as much from having the cached entries, but huge directory walks
-// don't blow out the stack, and smaller ones are still as fast as possible.
-//
-//It does impose some complexity when building up the readdir data, because we
-//need to pass a reference to the children array that we started with.
-/**
- * an LRUCache for storing child entries.
- * @internal
- */
-export class ChildrenCache extends LRUCache {
-    constructor(maxSize = 16 * 1024) {
-        super({
-            maxSize,
-            // parent + children
-            sizeCalculation: a => a.length + 1,
-        });
-    }
-}
-const setAsCwd = Symbol('PathScurry setAsCwd');
-/**
- * Path objects are sort of like a super-powered
- * {@link https://nodejs.org/docs/latest/api/fs.html#class-fsdirent fs.Dirent}
- *
- * Each one represents a single filesystem entry on disk, which may or may not
- * exist. It includes methods for reading various types of information via
- * lstat, readlink, and readdir, and caches all information to the greatest
- * degree possible.
- *
- * Note that fs operations that would normally throw will instead return an
- * "empty" value. This is in order to prevent excessive overhead from error
- * stack traces.
- */
-export class PathBase {
-    /**
-     * the basename of this path
-     *
-     * **Important**: *always* test the path name against any test string
-     * usingthe {@link isNamed} method, and not by directly comparing this
-     * string. Otherwise, unicode path strings that the system sees as identical
-     * will not be properly treated as the same path, leading to incorrect
-     * behavior and possible security issues.
-     */
-    name;
-    /**
-     * the Path entry corresponding to the path root.
-     *
-     * @internal
-     */
-    root;
-    /**
-     * All roots found within the current PathScurry family
-     *
-     * @internal
-     */
-    roots;
-    /**
-     * a reference to the parent path, or undefined in the case of root entries
-     *
-     * @internal
-     */
-    parent;
-    /**
-     * boolean indicating whether paths are compared case-insensitively
-     * @internal
-     */
-    nocase;
-    /**
-     * boolean indicating that this path is the current working directory
-     * of the PathScurry collection that contains it.
-     */
-    isCWD = false;
-    // potential default fs override
-    #fs;
-    // Stats fields
-    #dev;
-    get dev() {
-        return this.#dev;
-    }
-    #mode;
-    get mode() {
-        return this.#mode;
-    }
-    #nlink;
-    get nlink() {
-        return this.#nlink;
-    }
-    #uid;
-    get uid() {
-        return this.#uid;
-    }
-    #gid;
-    get gid() {
-        return this.#gid;
-    }
-    #rdev;
-    get rdev() {
-        return this.#rdev;
-    }
-    #blksize;
-    get blksize() {
-        return this.#blksize;
-    }
-    #ino;
-    get ino() {
-        return this.#ino;
-    }
-    #size;
-    get size() {
-        return this.#size;
-    }
-    #blocks;
-    get blocks() {
-        return this.#blocks;
-    }
-    #atimeMs;
-    get atimeMs() {
-        return this.#atimeMs;
-    }
-    #mtimeMs;
-    get mtimeMs() {
-        return this.#mtimeMs;
-    }
-    #ctimeMs;
-    get ctimeMs() {
-        return this.#ctimeMs;
-    }
-    #birthtimeMs;
-    get birthtimeMs() {
-        return this.#birthtimeMs;
-    }
-    #atime;
-    get atime() {
-        return this.#atime;
-    }
-    #mtime;
-    get mtime() {
-        return this.#mtime;
-    }
-    #ctime;
-    get ctime() {
-        return this.#ctime;
-    }
-    #birthtime;
-    get birthtime() {
-        return this.#birthtime;
-    }
-    #matchName;
-    #depth;
-    #fullpath;
-    #fullpathPosix;
-    #relative;
-    #relativePosix;
-    #type;
-    #children;
-    #linkTarget;
-    #realpath;
-    /**
-     * This property is for compatibility with the Dirent class as of
-     * Node v20, where Dirent['parentPath'] refers to the path of the
-     * directory that was passed to readdir. For root entries, it's the path
-     * to the entry itself.
-     */
-    get parentPath() {
-        return (this.parent || this).fullpath();
-    }
-    /**
-     * Deprecated alias for Dirent['parentPath'] Somewhat counterintuitively,
-     * this property refers to the *parent* path, not the path object itself.
-     */
-    get path() {
-        return this.parentPath;
-    }
-    /**
-     * Do not create new Path objects directly.  They should always be accessed
-     * via the PathScurry class or other methods on the Path class.
-     *
-     * @internal
-     */
-    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
-        this.name = name;
-        this.#matchName = nocase ? normalizeNocase(name) : normalize(name);
-        this.#type = type & TYPEMASK;
-        this.nocase = nocase;
-        this.roots = roots;
-        this.root = root || this;
-        this.#children = children;
-        this.#fullpath = opts.fullpath;
-        this.#relative = opts.relative;
-        this.#relativePosix = opts.relativePosix;
-        this.parent = opts.parent;
-        if (this.parent) {
-            this.#fs = this.parent.#fs;
-        }
-        else {
-            this.#fs = fsFromOption(opts.fs);
-        }
-    }
-    /**
-     * Returns the depth of the Path object from its root.
-     *
-     * For example, a path at `/foo/bar` would have a depth of 2.
-     */
-    depth() {
-        if (this.#depth !== undefined)
-            return this.#depth;
-        if (!this.parent)
-            return (this.#depth = 0);
-        return (this.#depth = this.parent.depth() + 1);
-    }
-    /**
-     * @internal
-     */
-    childrenCache() {
-        return this.#children;
-    }
-    /**
-     * Get the Path object referenced by the string path, resolved from this Path
-     */
-    resolve(path) {
-        if (!path) {
-            return this;
-        }
-        const rootPath = this.getRootString(path);
-        const dir = path.substring(rootPath.length);
-        const dirParts = dir.split(this.splitSep);
-        const result = rootPath ?
-            this.getRoot(rootPath).#resolveParts(dirParts)
-            : this.#resolveParts(dirParts);
-        return result;
-    }
-    #resolveParts(dirParts) {
-        let p = this;
-        for (const part of dirParts) {
-            p = p.child(part);
-        }
-        return p;
-    }
-    /**
-     * Returns the cached children Path objects, if still available.  If they
-     * have fallen out of the cache, then returns an empty array, and resets the
-     * READDIR_CALLED bit, so that future calls to readdir() will require an fs
-     * lookup.
-     *
-     * @internal
-     */
-    children() {
-        const cached = this.#children.get(this);
-        if (cached) {
-            return cached;
-        }
-        const children = Object.assign([], { provisional: 0 });
-        this.#children.set(this, children);
-        this.#type &= ~READDIR_CALLED;
-        return children;
-    }
-    /**
-     * Resolves a path portion and returns or creates the child Path.
-     *
-     * Returns `this` if pathPart is `''` or `'.'`, or `parent` if pathPart is
-     * `'..'`.
-     *
-     * This should not be called directly.  If `pathPart` contains any path
-     * separators, it will lead to unsafe undefined behavior.
-     *
-     * Use `Path.resolve()` instead.
-     *
-     * @internal
-     */
-    child(pathPart, opts) {
-        if (pathPart === '' || pathPart === '.') {
-            return this;
-        }
-        if (pathPart === '..') {
-            return this.parent || this;
-        }
-        // find the child
-        const children = this.children();
-        const name = this.nocase ? normalizeNocase(pathPart) : normalize(pathPart);
-        for (const p of children) {
-            if (p.#matchName === name) {
-                return p;
-            }
-        }
-        // didn't find it, create provisional child, since it might not
-        // actually exist.  If we know the parent isn't a dir, then
-        // in fact it CAN'T exist.
-        const s = this.parent ? this.sep : '';
-        const fullpath = this.#fullpath ? this.#fullpath + s + pathPart : undefined;
-        const pchild = this.newChild(pathPart, UNKNOWN, {
-            ...opts,
-            parent: this,
-            fullpath,
-        });
-        if (!this.canReaddir()) {
-            pchild.#type |= ENOENT;
-        }
-        // don't have to update provisional, because if we have real children,
-        // then provisional is set to children.length, otherwise a lower number
-        children.push(pchild);
-        return pchild;
-    }
-    /**
-     * The relative path from the cwd. If it does not share an ancestor with
-     * the cwd, then this ends up being equivalent to the fullpath()
-     */
-    relative() {
-        if (this.isCWD)
-            return '';
-        if (this.#relative !== undefined) {
-            return this.#relative;
-        }
-        const name = this.name;
-        const p = this.parent;
-        if (!p) {
-            return (this.#relative = this.name);
-        }
-        const pv = p.relative();
-        return pv + (!pv || !p.parent ? '' : this.sep) + name;
-    }
-    /**
-     * The relative path from the cwd, using / as the path separator.
-     * If it does not share an ancestor with
-     * the cwd, then this ends up being equivalent to the fullpathPosix()
-     * On posix systems, this is identical to relative().
-     */
-    relativePosix() {
-        if (this.sep === '/')
-            return this.relative();
-        if (this.isCWD)
-            return '';
-        if (this.#relativePosix !== undefined)
-            return this.#relativePosix;
-        const name = this.name;
-        const p = this.parent;
-        if (!p) {
-            return (this.#relativePosix = this.fullpathPosix());
-        }
-        const pv = p.relativePosix();
-        return pv + (!pv || !p.parent ? '' : '/') + name;
-    }
-    /**
-     * The fully resolved path string for this Path entry
-     */
-    fullpath() {
-        if (this.#fullpath !== undefined) {
-            return this.#fullpath;
-        }
-        const name = this.name;
-        const p = this.parent;
-        if (!p) {
-            return (this.#fullpath = this.name);
-        }
-        const pv = p.fullpath();
-        const fp = pv + (!p.parent ? '' : this.sep) + name;
-        return (this.#fullpath = fp);
-    }
-    /**
-     * On platforms other than windows, this is identical to fullpath.
-     *
-     * On windows, this is overridden to return the forward-slash form of the
-     * full UNC path.
-     */
-    fullpathPosix() {
-        if (this.#fullpathPosix !== undefined)
-            return this.#fullpathPosix;
-        if (this.sep === '/')
-            return (this.#fullpathPosix = this.fullpath());
-        if (!this.parent) {
-            const p = this.fullpath().replace(/\\/g, '/');
-            if (/^[a-z]:\//i.test(p)) {
-                return (this.#fullpathPosix = `//?/${p}`);
-            }
-            else {
-                return (this.#fullpathPosix = p);
-            }
-        }
-        const p = this.parent;
-        const pfpp = p.fullpathPosix();
-        const fpp = pfpp + (!pfpp || !p.parent ? '' : '/') + this.name;
-        return (this.#fullpathPosix = fpp);
-    }
-    /**
-     * Is the Path of an unknown type?
-     *
-     * Note that we might know *something* about it if there has been a previous
-     * filesystem operation, for example that it does not exist, or is not a
-     * link, or whether it has child entries.
-     */
-    isUnknown() {
-        return (this.#type & IFMT) === UNKNOWN;
-    }
-    isType(type) {
-        return this[`is${type}`]();
-    }
-    getType() {
-        return (this.isUnknown() ? 'Unknown'
-            : this.isDirectory() ? 'Directory'
-                : this.isFile() ? 'File'
-                    : this.isSymbolicLink() ? 'SymbolicLink'
-                        : this.isFIFO() ? 'FIFO'
-                            : this.isCharacterDevice() ? 'CharacterDevice'
-                                : this.isBlockDevice() ? 'BlockDevice'
-                                    : /* c8 ignore start */ this.isSocket() ? 'Socket'
-                                        : 'Unknown');
-        /* c8 ignore stop */
-    }
-    /**
-     * Is the Path a regular file?
-     */
-    isFile() {
-        return (this.#type & IFMT) === IFREG;
-    }
-    /**
-     * Is the Path a directory?
-     */
-    isDirectory() {
-        return (this.#type & IFMT) === IFDIR;
-    }
-    /**
-     * Is the path a character device?
-     */
-    isCharacterDevice() {
-        return (this.#type & IFMT) === IFCHR;
-    }
-    /**
-     * Is the path a block device?
-     */
-    isBlockDevice() {
-        return (this.#type & IFMT) === IFBLK;
-    }
-    /**
-     * Is the path a FIFO pipe?
-     */
-    isFIFO() {
-        return (this.#type & IFMT) === IFIFO;
-    }
-    /**
-     * Is the path a socket?
-     */
-    isSocket() {
-        return (this.#type & IFMT) === IFSOCK;
-    }
-    /**
-     * Is the path a symbolic link?
-     */
-    isSymbolicLink() {
-        return (this.#type & IFLNK) === IFLNK;
-    }
-    /**
-     * Return the entry if it has been subject of a successful lstat, or
-     * undefined otherwise.
-     *
-     * Does not read the filesystem, so an undefined result *could* simply
-     * mean that we haven't called lstat on it.
-     */
-    lstatCached() {
-        return this.#type & LSTAT_CALLED ? this : undefined;
-    }
-    /**
-     * Return the cached link target if the entry has been the subject of a
-     * successful readlink, or undefined otherwise.
-     *
-     * Does not read the filesystem, so an undefined result *could* just mean we
-     * don't have any cached data. Only use it if you are very sure that a
-     * readlink() has been called at some point.
-     */
-    readlinkCached() {
-        return this.#linkTarget;
-    }
-    /**
-     * Returns the cached realpath target if the entry has been the subject
-     * of a successful realpath, or undefined otherwise.
-     *
-     * Does not read the filesystem, so an undefined result *could* just mean we
-     * don't have any cached data. Only use it if you are very sure that a
-     * realpath() has been called at some point.
-     */
-    realpathCached() {
-        return this.#realpath;
-    }
-    /**
-     * Returns the cached child Path entries array if the entry has been the
-     * subject of a successful readdir(), or [] otherwise.
-     *
-     * Does not read the filesystem, so an empty array *could* just mean we
-     * don't have any cached data. Only use it if you are very sure that a
-     * readdir() has been called recently enough to still be valid.
-     */
-    readdirCached() {
-        const children = this.children();
-        return children.slice(0, children.provisional);
-    }
-    /**
-     * Return true if it's worth trying to readlink.  Ie, we don't (yet) have
-     * any indication that readlink will definitely fail.
-     *
-     * Returns false if the path is known to not be a symlink, if a previous
-     * readlink failed, or if the entry does not exist.
-     */
-    canReadlink() {
-        if (this.#linkTarget)
-            return true;
-        if (!this.parent)
-            return false;
-        // cases where it cannot possibly succeed
-        const ifmt = this.#type & IFMT;
-        return !((ifmt !== UNKNOWN && ifmt !== IFLNK) ||
-            this.#type & ENOREADLINK ||
-            this.#type & ENOENT);
-    }
-    /**
-     * Return true if readdir has previously been successfully called on this
-     * path, indicating that cachedReaddir() is likely valid.
-     */
-    calledReaddir() {
-        return !!(this.#type & READDIR_CALLED);
-    }
-    /**
-     * Returns true if the path is known to not exist. That is, a previous lstat
-     * or readdir failed to verify its existence when that would have been
-     * expected, or a parent entry was marked either enoent or enotdir.
-     */
-    isENOENT() {
-        return !!(this.#type & ENOENT);
-    }
-    /**
-     * Return true if the path is a match for the given path name.  This handles
-     * case sensitivity and unicode normalization.
-     *
-     * Note: even on case-sensitive systems, it is **not** safe to test the
-     * equality of the `.name` property to determine whether a given pathname
-     * matches, due to unicode normalization mismatches.
-     *
-     * Always use this method instead of testing the `path.name` property
-     * directly.
-     */
-    isNamed(n) {
-        return !this.nocase ?
-            this.#matchName === normalize(n)
-            : this.#matchName === normalizeNocase(n);
-    }
-    /**
-     * Return the Path object corresponding to the target of a symbolic link.
-     *
-     * If the Path is not a symbolic link, or if the readlink call fails for any
-     * reason, `undefined` is returned.
-     *
-     * Result is cached, and thus may be outdated if the filesystem is mutated.
-     */
-    async readlink() {
-        const target = this.#linkTarget;
-        if (target) {
-            return target;
-        }
-        if (!this.canReadlink()) {
-            return undefined;
-        }
-        /* c8 ignore start */
-        // already covered by the canReadlink test, here for ts grumples
-        if (!this.parent) {
-            return undefined;
-        }
-        /* c8 ignore stop */
-        try {
-            const read = await this.#fs.promises.readlink(this.fullpath());
-            const linkTarget = (await this.parent.realpath())?.resolve(read);
-            if (linkTarget) {
-                return (this.#linkTarget = linkTarget);
-            }
-        }
-        catch (er) {
-            this.#readlinkFail(er.code);
-            return undefined;
-        }
-    }
-    /**
-     * Synchronous {@link PathBase.readlink}
-     */
-    readlinkSync() {
-        const target = this.#linkTarget;
-        if (target) {
-            return target;
-        }
-        if (!this.canReadlink()) {
-            return undefined;
-        }
-        /* c8 ignore start */
-        // already covered by the canReadlink test, here for ts grumples
-        if (!this.parent) {
-            return undefined;
-        }
-        /* c8 ignore stop */
-        try {
-            const read = this.#fs.readlinkSync(this.fullpath());
-            const linkTarget = this.parent.realpathSync()?.resolve(read);
-            if (linkTarget) {
-                return (this.#linkTarget = linkTarget);
-            }
-        }
-        catch (er) {
-            this.#readlinkFail(er.code);
-            return undefined;
-        }
-    }
-    #readdirSuccess(children) {
-        // succeeded, mark readdir called bit
-        this.#type |= READDIR_CALLED;
-        // mark all remaining provisional children as ENOENT
-        for (let p = children.provisional; p < children.length; p++) {
-            const c = children[p];
-            if (c)
-                c.#markENOENT();
-        }
-    }
-    #markENOENT() {
-        // mark as UNKNOWN and ENOENT
-        if (this.#type & ENOENT)
-            return;
-        this.#type = (this.#type | ENOENT) & IFMT_UNKNOWN;
-        this.#markChildrenENOENT();
-    }
-    #markChildrenENOENT() {
-        // all children are provisional and do not exist
-        const children = this.children();
-        children.provisional = 0;
-        for (const p of children) {
-            p.#markENOENT();
-        }
-    }
-    #markENOREALPATH() {
-        this.#type |= ENOREALPATH;
-        this.#markENOTDIR();
-    }
-    // save the information when we know the entry is not a dir
-    #markENOTDIR() {
-        // entry is not a directory, so any children can't exist.
-        // this *should* be impossible, since any children created
-        // after it's been marked ENOTDIR should be marked ENOENT,
-        // so it won't even get to this point.
-        /* c8 ignore start */
-        if (this.#type & ENOTDIR)
-            return;
-        /* c8 ignore stop */
-        let t = this.#type;
-        // this could happen if we stat a dir, then delete it,
-        // then try to read it or one of its children.
-        if ((t & IFMT) === IFDIR)
-            t &= IFMT_UNKNOWN;
-        this.#type = t | ENOTDIR;
-        this.#markChildrenENOENT();
-    }
-    #readdirFail(code = '') {
-        // markENOTDIR and markENOENT also set provisional=0
-        if (code === 'ENOTDIR' || code === 'EPERM') {
-            this.#markENOTDIR();
-        }
-        else if (code === 'ENOENT') {
-            this.#markENOENT();
-        }
-        else {
-            this.children().provisional = 0;
-        }
-    }
-    #lstatFail(code = '') {
-        // Windows just raises ENOENT in this case, disable for win CI
-        /* c8 ignore start */
-        if (code === 'ENOTDIR') {
-            // already know it has a parent by this point
-            const p = this.parent;
-            p.#markENOTDIR();
-        }
-        else if (code === 'ENOENT') {
-            /* c8 ignore stop */
-            this.#markENOENT();
-        }
-    }
-    #readlinkFail(code = '') {
-        let ter = this.#type;
-        ter |= ENOREADLINK;
-        if (code === 'ENOENT')
-            ter |= ENOENT;
-        // windows gets a weird error when you try to readlink a file
-        if (code === 'EINVAL' || code === 'UNKNOWN') {
-            // exists, but not a symlink, we don't know WHAT it is, so remove
-            // all IFMT bits.
-            ter &= IFMT_UNKNOWN;
-        }
-        this.#type = ter;
-        // windows just gets ENOENT in this case.  We do cover the case,
-        // just disabled because it's impossible on Windows CI
-        /* c8 ignore start */
-        if (code === 'ENOTDIR' && this.parent) {
-            this.parent.#markENOTDIR();
-        }
-        /* c8 ignore stop */
-    }
-    #readdirAddChild(e, c) {
-        return (this.#readdirMaybePromoteChild(e, c) ||
-            this.#readdirAddNewChild(e, c));
-    }
-    #readdirAddNewChild(e, c) {
-        // alloc new entry at head, so it's never provisional
-        const type = entToType(e);
-        const child = this.newChild(e.name, type, { parent: this });
-        const ifmt = child.#type & IFMT;
-        if (ifmt !== IFDIR && ifmt !== IFLNK && ifmt !== UNKNOWN) {
-            child.#type |= ENOTDIR;
-        }
-        c.unshift(child);
-        c.provisional++;
-        return child;
-    }
-    #readdirMaybePromoteChild(e, c) {
-        for (let p = c.provisional; p < c.length; p++) {
-            const pchild = c[p];
-            const name = this.nocase ? normalizeNocase(e.name) : normalize(e.name);
-            if (name !== pchild.#matchName) {
-                continue;
-            }
-            return this.#readdirPromoteChild(e, pchild, p, c);
-        }
-    }
-    #readdirPromoteChild(e, p, index, c) {
-        const v = p.name;
-        // retain any other flags, but set ifmt from dirent
-        p.#type = (p.#type & IFMT_UNKNOWN) | entToType(e);
-        // case sensitivity fixing when we learn the true name.
-        if (v !== e.name)
-            p.name = e.name;
-        // just advance provisional index (potentially off the list),
-        // otherwise we have to splice/pop it out and re-insert at head
-        if (index !== c.provisional) {
-            if (index === c.length - 1)
-                c.pop();
-            else
-                c.splice(index, 1);
-            c.unshift(p);
-        }
-        c.provisional++;
-        return p;
-    }
-    /**
-     * Call lstat() on this Path, and update all known information that can be
-     * determined.
-     *
-     * Note that unlike `fs.lstat()`, the returned value does not contain some
-     * information, such as `mode`, `dev`, `nlink`, and `ino`.  If that
-     * information is required, you will need to call `fs.lstat` yourself.
-     *
-     * If the Path refers to a nonexistent file, or if the lstat call fails for
-     * any reason, `undefined` is returned.  Otherwise the updated Path object is
-     * returned.
-     *
-     * Results are cached, and thus may be out of date if the filesystem is
-     * mutated.
-     */
-    async lstat() {
-        if ((this.#type & ENOENT) === 0) {
-            try {
-                this.#applyStat(await this.#fs.promises.lstat(this.fullpath()));
-                return this;
-            }
-            catch (er) {
-                this.#lstatFail(er.code);
-            }
-        }
-    }
-    /**
-     * synchronous {@link PathBase.lstat}
-     */
-    lstatSync() {
-        if ((this.#type & ENOENT) === 0) {
-            try {
-                this.#applyStat(this.#fs.lstatSync(this.fullpath()));
-                return this;
-            }
-            catch (er) {
-                this.#lstatFail(er.code);
-            }
-        }
-    }
-    #applyStat(st) {
-        const { atime, atimeMs, birthtime, birthtimeMs, blksize, blocks, ctime, ctimeMs, dev, gid, ino, mode, mtime, mtimeMs, nlink, rdev, size, uid, } = st;
-        this.#atime = atime;
-        this.#atimeMs = atimeMs;
-        this.#birthtime = birthtime;
-        this.#birthtimeMs = birthtimeMs;
-        this.#blksize = blksize;
-        this.#blocks = blocks;
-        this.#ctime = ctime;
-        this.#ctimeMs = ctimeMs;
-        this.#dev = dev;
-        this.#gid = gid;
-        this.#ino = ino;
-        this.#mode = mode;
-        this.#mtime = mtime;
-        this.#mtimeMs = mtimeMs;
-        this.#nlink = nlink;
-        this.#rdev = rdev;
-        this.#size = size;
-        this.#uid = uid;
-        const ifmt = entToType(st);
-        // retain any other flags, but set the ifmt
-        this.#type = (this.#type & IFMT_UNKNOWN) | ifmt | LSTAT_CALLED;
-        if (ifmt !== UNKNOWN && ifmt !== IFDIR && ifmt !== IFLNK) {
-            this.#type |= ENOTDIR;
-        }
-    }
-    #onReaddirCB = [];
-    #readdirCBInFlight = false;
-    #callOnReaddirCB(children) {
-        this.#readdirCBInFlight = false;
-        const cbs = this.#onReaddirCB.slice();
-        this.#onReaddirCB.length = 0;
-        cbs.forEach(cb => cb(null, children));
-    }
-    /**
-     * Standard node-style callback interface to get list of directory entries.
-     *
-     * If the Path cannot or does not contain any children, then an empty array
-     * is returned.
-     *
-     * Results are cached, and thus may be out of date if the filesystem is
-     * mutated.
-     *
-     * @param cb The callback called with (er, entries).  Note that the `er`
-     * param is somewhat extraneous, as all readdir() errors are handled and
-     * simply result in an empty set of entries being returned.
-     * @param allowZalgo Boolean indicating that immediately known results should
-     * *not* be deferred with `queueMicrotask`. Defaults to `false`. Release
-     * zalgo at your peril, the dark pony lord is devious and unforgiving.
-     */
-    readdirCB(cb, allowZalgo = false) {
-        if (!this.canReaddir()) {
-            if (allowZalgo)
-                cb(null, []);
-            else
-                queueMicrotask(() => cb(null, []));
-            return;
-        }
-        const children = this.children();
-        if (this.calledReaddir()) {
-            const c = children.slice(0, children.provisional);
-            if (allowZalgo)
-                cb(null, c);
-            else
-                queueMicrotask(() => cb(null, c));
-            return;
-        }
-        // don't have to worry about zalgo at this point.
-        this.#onReaddirCB.push(cb);
-        if (this.#readdirCBInFlight) {
-            return;
-        }
-        this.#readdirCBInFlight = true;
-        // else read the directory, fill up children
-        // de-provisionalize any provisional children.
-        const fullpath = this.fullpath();
-        this.#fs.readdir(fullpath, { withFileTypes: true }, (er, entries) => {
-            if (er) {
-                this.#readdirFail(er.code);
-                children.provisional = 0;
-            }
-            else {
-                // if we didn't get an error, we always get entries.
-                //@ts-ignore
-                for (const e of entries) {
-                    this.#readdirAddChild(e, children);
-                }
-                this.#readdirSuccess(children);
-            }
-            this.#callOnReaddirCB(children.slice(0, children.provisional));
-            return;
-        });
-    }
-    #asyncReaddirInFlight;
-    /**
-     * Return an array of known child entries.
-     *
-     * If the Path cannot or does not contain any children, then an empty array
-     * is returned.
-     *
-     * Results are cached, and thus may be out of date if the filesystem is
-     * mutated.
-     */
-    async readdir() {
-        if (!this.canReaddir()) {
-            return [];
-        }
-        const children = this.children();
-        if (this.calledReaddir()) {
-            return children.slice(0, children.provisional);
-        }
-        // else read the directory, fill up children
-        // de-provisionalize any provisional children.
-        const fullpath = this.fullpath();
-        if (this.#asyncReaddirInFlight) {
-            await this.#asyncReaddirInFlight;
-        }
-        else {
-            /* c8 ignore start */
-            let resolve = () => { };
-            /* c8 ignore stop */
-            this.#asyncReaddirInFlight = new Promise(res => (resolve = res));
-            try {
-                for (const e of await this.#fs.promises.readdir(fullpath, {
-                    withFileTypes: true,
-                })) {
-                    this.#readdirAddChild(e, children);
-                }
-                this.#readdirSuccess(children);
-            }
-            catch (er) {
-                this.#readdirFail(er.code);
-                children.provisional = 0;
-            }
-            this.#asyncReaddirInFlight = undefined;
-            resolve();
-        }
-        return children.slice(0, children.provisional);
-    }
-    /**
-     * synchronous {@link PathBase.readdir}
-     */
-    readdirSync() {
-        if (!this.canReaddir()) {
-            return [];
-        }
-        const children = this.children();
-        if (this.calledReaddir()) {
-            return children.slice(0, children.provisional);
-        }
-        // else read the directory, fill up children
-        // de-provisionalize any provisional children.
-        const fullpath = this.fullpath();
-        try {
-            for (const e of this.#fs.readdirSync(fullpath, {
-                withFileTypes: true,
-            })) {
-                this.#readdirAddChild(e, children);
-            }
-            this.#readdirSuccess(children);
-        }
-        catch (er) {
-            this.#readdirFail(er.code);
-            children.provisional = 0;
-        }
-        return children.slice(0, children.provisional);
-    }
-    canReaddir() {
-        if (this.#type & ENOCHILD)
-            return false;
-        const ifmt = IFMT & this.#type;
-        // we always set ENOTDIR when setting IFMT, so should be impossible
-        /* c8 ignore start */
-        if (!(ifmt === UNKNOWN || ifmt === IFDIR || ifmt === IFLNK)) {
-            return false;
-        }
-        /* c8 ignore stop */
-        return true;
-    }
-    shouldWalk(dirs, walkFilter) {
-        return ((this.#type & IFDIR) === IFDIR &&
-            !(this.#type & ENOCHILD) &&
-            !dirs.has(this) &&
-            (!walkFilter || walkFilter(this)));
-    }
-    /**
-     * Return the Path object corresponding to path as resolved
-     * by realpath(3).
-     *
-     * If the realpath call fails for any reason, `undefined` is returned.
-     *
-     * Result is cached, and thus may be outdated if the filesystem is mutated.
-     * On success, returns a Path object.
-     */
-    async realpath() {
-        if (this.#realpath)
-            return this.#realpath;
-        if ((ENOREALPATH | ENOREADLINK | ENOENT) & this.#type)
-            return undefined;
-        try {
-            const rp = await this.#fs.promises.realpath(this.fullpath());
-            return (this.#realpath = this.resolve(rp));
-        }
-        catch (_) {
-            this.#markENOREALPATH();
-        }
-    }
-    /**
-     * Synchronous {@link realpath}
-     */
-    realpathSync() {
-        if (this.#realpath)
-            return this.#realpath;
-        if ((ENOREALPATH | ENOREADLINK | ENOENT) & this.#type)
-            return undefined;
-        try {
-            const rp = this.#fs.realpathSync(this.fullpath());
-            return (this.#realpath = this.resolve(rp));
-        }
-        catch (_) {
-            this.#markENOREALPATH();
-        }
-    }
-    /**
-     * Internal method to mark this Path object as the scurry cwd,
-     * called by {@link PathScurry#chdir}
-     *
-     * @internal
-     */
-    [setAsCwd](oldCwd) {
-        if (oldCwd === this)
-            return;
-        oldCwd.isCWD = false;
-        this.isCWD = true;
-        const changed = new Set([]);
-        let rp = [];
-        let p = this;
-        while (p && p.parent) {
-            changed.add(p);
-            p.#relative = rp.join(this.sep);
-            p.#relativePosix = rp.join('/');
-            p = p.parent;
-            rp.push('..');
-        }
-        // now un-memoize parents of old cwd
-        p = oldCwd;
-        while (p && p.parent && !changed.has(p)) {
-            p.#relative = undefined;
-            p.#relativePosix = undefined;
-            p = p.parent;
-        }
-    }
-}
-/**
- * Path class used on win32 systems
- *
- * Uses `'\\'` as the path separator for returned paths, either `'\\'` or `'/'`
- * as the path separator for parsing paths.
- */
-export class PathWin32 extends PathBase {
-    /**
-     * Separator for generating path strings.
-     */
-    sep = '\\';
-    /**
-     * Separator for parsing path strings.
-     */
-    splitSep = eitherSep;
-    /**
-     * Do not create new Path objects directly.  They should always be accessed
-     * via the PathScurry class or other methods on the Path class.
-     *
-     * @internal
-     */
-    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
-        super(name, type, root, roots, nocase, children, opts);
-    }
-    /**
-     * @internal
-     */
-    newChild(name, type = UNKNOWN, opts = {}) {
-        return new PathWin32(name, type, this.root, this.roots, this.nocase, this.childrenCache(), opts);
-    }
-    /**
-     * @internal
-     */
-    getRootString(path) {
-        return win32.parse(path).root;
-    }
-    /**
-     * @internal
-     */
-    getRoot(rootPath) {
-        rootPath = uncToDrive(rootPath.toUpperCase());
-        if (rootPath === this.root.name) {
-            return this.root;
-        }
-        // ok, not that one, check if it matches another we know about
-        for (const [compare, root] of Object.entries(this.roots)) {
-            if (this.sameRoot(rootPath, compare)) {
-                return (this.roots[rootPath] = root);
-            }
-        }
-        // otherwise, have to create a new one.
-        return (this.roots[rootPath] = new PathScurryWin32(rootPath, this).root);
-    }
-    /**
-     * @internal
-     */
-    sameRoot(rootPath, compare = this.root.name) {
-        // windows can (rarely) have case-sensitive filesystem, but
-        // UNC and drive letters are always case-insensitive, and canonically
-        // represented uppercase.
-        rootPath = rootPath
-            .toUpperCase()
-            .replace(/\//g, '\\')
-            .replace(uncDriveRegexp, '$1\\');
-        return rootPath === compare;
-    }
-}
-/**
- * Path class used on all posix systems.
- *
- * Uses `'/'` as the path separator.
- */
-export class PathPosix extends PathBase {
-    /**
-     * separator for parsing path strings
-     */
-    splitSep = '/';
-    /**
-     * separator for generating path strings
-     */
-    sep = '/';
-    /**
-     * Do not create new Path objects directly.  They should always be accessed
-     * via the PathScurry class or other methods on the Path class.
-     *
-     * @internal
-     */
-    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
-        super(name, type, root, roots, nocase, children, opts);
-    }
-    /**
-     * @internal
-     */
-    getRootString(path) {
-        return path.startsWith('/') ? '/' : '';
-    }
-    /**
-     * @internal
-     */
-    getRoot(_rootPath) {
-        return this.root;
-    }
-    /**
-     * @internal
-     */
-    newChild(name, type = UNKNOWN, opts = {}) {
-        return new PathPosix(name, type, this.root, this.roots, this.nocase, this.childrenCache(), opts);
-    }
-}
-/**
- * The base class for all PathScurry classes, providing the interface for path
- * resolution and filesystem operations.
- *
- * Typically, you should *not* instantiate this class directly, but rather one
- * of the platform-specific classes, or the exported {@link PathScurry} which
- * defaults to the current platform.
- */
-export class PathScurryBase {
-    /**
-     * The root Path entry for the current working directory of this Scurry
-     */
-    root;
-    /**
-     * The string path for the root of this Scurry's current working directory
-     */
-    rootPath;
-    /**
-     * A collection of all roots encountered, referenced by rootPath
-     */
-    roots;
-    /**
-     * The Path entry corresponding to this PathScurry's current working directory.
-     */
-    cwd;
-    #resolveCache;
-    #resolvePosixCache;
-    #children;
-    /**
-     * Perform path comparisons case-insensitively.
-     *
-     * Defaults true on Darwin and Windows systems, false elsewhere.
-     */
-    nocase;
-    #fs;
-    /**
-     * This class should not be instantiated directly.
-     *
-     * Use PathScurryWin32, PathScurryDarwin, PathScurryPosix, or PathScurry
-     *
-     * @internal
-     */
-    constructor(cwd = process.cwd(), pathImpl, sep, { nocase, childrenCacheSize = 16 * 1024, fs = defaultFS, } = {}) {
-        this.#fs = fsFromOption(fs);
-        if (cwd instanceof URL || cwd.startsWith('file://')) {
-            cwd = fileURLToPath(cwd);
-        }
-        // resolve and split root, and then add to the store.
-        // this is the only time we call path.resolve()
-        const cwdPath = pathImpl.resolve(cwd);
-        this.roots = Object.create(null);
-        this.rootPath = this.parseRootPath(cwdPath);
-        this.#resolveCache = new ResolveCache();
-        this.#resolvePosixCache = new ResolveCache();
-        this.#children = new ChildrenCache(childrenCacheSize);
-        const split = cwdPath.substring(this.rootPath.length).split(sep);
-        // resolve('/') leaves '', splits to [''], we don't want that.
-        if (split.length === 1 && !split[0]) {
-            split.pop();
-        }
-        /* c8 ignore start */
-        if (nocase === undefined) {
-            throw new TypeError('must provide nocase setting to PathScurryBase ctor');
-        }
-        /* c8 ignore stop */
-        this.nocase = nocase;
-        this.root = this.newRoot(this.#fs);
-        this.roots[this.rootPath] = this.root;
-        let prev = this.root;
-        let len = split.length - 1;
-        const joinSep = pathImpl.sep;
-        let abs = this.rootPath;
-        let sawFirst = false;
-        for (const part of split) {
-            const l = len--;
-            prev = prev.child(part, {
-                relative: new Array(l).fill('..').join(joinSep),
-                relativePosix: new Array(l).fill('..').join('/'),
-                fullpath: (abs += (sawFirst ? '' : joinSep) + part),
-            });
-            sawFirst = true;
-        }
-        this.cwd = prev;
-    }
-    /**
-     * Get the depth of a provided path, string, or the cwd
-     */
-    depth(path = this.cwd) {
-        if (typeof path === 'string') {
-            path = this.cwd.resolve(path);
-        }
-        return path.depth();
-    }
-    /**
-     * Return the cache of child entries.  Exposed so subclasses can create
-     * child Path objects in a platform-specific way.
-     *
-     * @internal
-     */
-    childrenCache() {
-        return this.#children;
-    }
-    /**
-     * Resolve one or more path strings to a resolved string
-     *
-     * Same interface as require('path').resolve.
-     *
-     * Much faster than path.resolve() when called multiple times for the same
-     * path, because the resolved Path objects are cached.  Much slower
-     * otherwise.
-     */
-    resolve(...paths) {
-        // first figure out the minimum number of paths we have to test
-        // we always start at cwd, but any absolutes will bump the start
-        let r = '';
-        for (let i = paths.length - 1; i >= 0; i--) {
-            const p = paths[i];
-            if (!p || p === '.')
-                continue;
-            r = r ? `${p}/${r}` : p;
-            if (this.isAbsolute(p)) {
-                break;
-            }
-        }
-        const cached = this.#resolveCache.get(r);
-        if (cached !== undefined) {
-            return cached;
-        }
-        const result = this.cwd.resolve(r).fullpath();
-        this.#resolveCache.set(r, result);
-        return result;
-    }
-    /**
-     * Resolve one or more path strings to a resolved string, returning
-     * the posix path.  Identical to .resolve() on posix systems, but on
-     * windows will return a forward-slash separated UNC path.
-     *
-     * Same interface as require('path').resolve.
-     *
-     * Much faster than path.resolve() when called multiple times for the same
-     * path, because the resolved Path objects are cached.  Much slower
-     * otherwise.
-     */
-    resolvePosix(...paths) {
-        // first figure out the minimum number of paths we have to test
-        // we always start at cwd, but any absolutes will bump the start
-        let r = '';
-        for (let i = paths.length - 1; i >= 0; i--) {
-            const p = paths[i];
-            if (!p || p === '.')
-                continue;
-            r = r ? `${p}/${r}` : p;
-            if (this.isAbsolute(p)) {
-                break;
-            }
-        }
-        const cached = this.#resolvePosixCache.get(r);
-        if (cached !== undefined) {
-            return cached;
-        }
-        const result = this.cwd.resolve(r).fullpathPosix();
-        this.#resolvePosixCache.set(r, result);
-        return result;
-    }
-    /**
-     * find the relative path from the cwd to the supplied path string or entry
-     */
-    relative(entry = this.cwd) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        return entry.relative();
-    }
-    /**
-     * find the relative path from the cwd to the supplied path string or
-     * entry, using / as the path delimiter, even on Windows.
-     */
-    relativePosix(entry = this.cwd) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        return entry.relativePosix();
-    }
-    /**
-     * Return the basename for the provided string or Path object
-     */
-    basename(entry = this.cwd) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        return entry.name;
-    }
-    /**
-     * Return the dirname for the provided string or Path object
-     */
-    dirname(entry = this.cwd) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        return (entry.parent || entry).fullpath();
-    }
-    async readdir(entry = this.cwd, opts = {
-        withFileTypes: true,
-    }) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes } = opts;
-        if (!entry.canReaddir()) {
-            return [];
-        }
-        else {
-            const p = await entry.readdir();
-            return withFileTypes ? p : p.map(e => e.name);
-        }
-    }
-    readdirSync(entry = this.cwd, opts = {
-        withFileTypes: true,
-    }) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes = true } = opts;
-        if (!entry.canReaddir()) {
-            return [];
-        }
-        else if (withFileTypes) {
-            return entry.readdirSync();
-        }
-        else {
-            return entry.readdirSync().map(e => e.name);
-        }
-    }
-    /**
-     * Call lstat() on the string or Path object, and update all known
-     * information that can be determined.
-     *
-     * Note that unlike `fs.lstat()`, the returned value does not contain some
-     * information, such as `mode`, `dev`, `nlink`, and `ino`.  If that
-     * information is required, you will need to call `fs.lstat` yourself.
-     *
-     * If the Path refers to a nonexistent file, or if the lstat call fails for
-     * any reason, `undefined` is returned.  Otherwise the updated Path object is
-     * returned.
-     *
-     * Results are cached, and thus may be out of date if the filesystem is
-     * mutated.
-     */
-    async lstat(entry = this.cwd) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        return entry.lstat();
-    }
-    /**
-     * synchronous {@link PathScurryBase.lstat}
-     */
-    lstatSync(entry = this.cwd) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        return entry.lstatSync();
-    }
-    async readlink(entry = this.cwd, { withFileTypes } = {
-        withFileTypes: false,
-    }) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            withFileTypes = entry.withFileTypes;
-            entry = this.cwd;
-        }
-        const e = await entry.readlink();
-        return withFileTypes ? e : e?.fullpath();
-    }
-    readlinkSync(entry = this.cwd, { withFileTypes } = {
-        withFileTypes: false,
-    }) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            withFileTypes = entry.withFileTypes;
-            entry = this.cwd;
-        }
-        const e = entry.readlinkSync();
-        return withFileTypes ? e : e?.fullpath();
-    }
-    async realpath(entry = this.cwd, { withFileTypes } = {
-        withFileTypes: false,
-    }) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            withFileTypes = entry.withFileTypes;
-            entry = this.cwd;
-        }
-        const e = await entry.realpath();
-        return withFileTypes ? e : e?.fullpath();
-    }
-    realpathSync(entry = this.cwd, { withFileTypes } = {
-        withFileTypes: false,
-    }) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            withFileTypes = entry.withFileTypes;
-            entry = this.cwd;
-        }
-        const e = entry.realpathSync();
-        return withFileTypes ? e : e?.fullpath();
-    }
-    async walk(entry = this.cwd, opts = {}) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
-        const results = [];
-        if (!filter || filter(entry)) {
-            results.push(withFileTypes ? entry : entry.fullpath());
-        }
-        const dirs = new Set();
-        const walk = (dir, cb) => {
-            dirs.add(dir);
-            dir.readdirCB((er, entries) => {
-                /* c8 ignore start */
-                if (er) {
-                    return cb(er);
-                }
-                /* c8 ignore stop */
-                let len = entries.length;
-                if (!len)
-                    return cb();
-                const next = () => {
-                    if (--len === 0) {
-                        cb();
-                    }
-                };
-                for (const e of entries) {
-                    if (!filter || filter(e)) {
-                        results.push(withFileTypes ? e : e.fullpath());
-                    }
-                    if (follow && e.isSymbolicLink()) {
-                        e.realpath()
-                            .then(r => (r?.isUnknown() ? r.lstat() : r))
-                            .then(r => r?.shouldWalk(dirs, walkFilter) ? walk(r, next) : next());
-                    }
-                    else {
-                        if (e.shouldWalk(dirs, walkFilter)) {
-                            walk(e, next);
-                        }
-                        else {
-                            next();
-                        }
-                    }
-                }
-            }, true); // zalgooooooo
-        };
-        const start = entry;
-        return new Promise((res, rej) => {
-            walk(start, er => {
-                /* c8 ignore start */
-                if (er)
-                    return rej(er);
-                /* c8 ignore stop */
-                res(results);
-            });
-        });
-    }
-    walkSync(entry = this.cwd, opts = {}) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
-        const results = [];
-        if (!filter || filter(entry)) {
-            results.push(withFileTypes ? entry : entry.fullpath());
-        }
-        const dirs = new Set([entry]);
-        for (const dir of dirs) {
-            const entries = dir.readdirSync();
-            for (const e of entries) {
-                if (!filter || filter(e)) {
-                    results.push(withFileTypes ? e : e.fullpath());
-                }
-                let r = e;
-                if (e.isSymbolicLink()) {
-                    if (!(follow && (r = e.realpathSync())))
-                        continue;
-                    if (r.isUnknown())
-                        r.lstatSync();
-                }
-                if (r.shouldWalk(dirs, walkFilter)) {
-                    dirs.add(r);
-                }
-            }
-        }
-        return results;
-    }
-    /**
-     * Support for `for await`
-     *
-     * Alias for {@link PathScurryBase.iterate}
-     *
-     * Note: As of Node 19, this is very slow, compared to other methods of
-     * walking.  Consider using {@link PathScurryBase.stream} if memory overhead
-     * and backpressure are concerns, or {@link PathScurryBase.walk} if not.
-     */
-    [Symbol.asyncIterator]() {
-        return this.iterate();
-    }
-    iterate(entry = this.cwd, options = {}) {
-        // iterating async over the stream is significantly more performant,
-        // especially in the warm-cache scenario, because it buffers up directory
-        // entries in the background instead of waiting for a yield for each one.
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            options = entry;
-            entry = this.cwd;
-        }
-        return this.stream(entry, options)[Symbol.asyncIterator]();
-    }
-    /**
-     * Iterating over a PathScurry performs a synchronous walk.
-     *
-     * Alias for {@link PathScurryBase.iterateSync}
-     */
-    [Symbol.iterator]() {
-        return this.iterateSync();
-    }
-    *iterateSync(entry = this.cwd, opts = {}) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
-        if (!filter || filter(entry)) {
-            yield withFileTypes ? entry : entry.fullpath();
-        }
-        const dirs = new Set([entry]);
-        for (const dir of dirs) {
-            const entries = dir.readdirSync();
-            for (const e of entries) {
-                if (!filter || filter(e)) {
-                    yield withFileTypes ? e : e.fullpath();
-                }
-                let r = e;
-                if (e.isSymbolicLink()) {
-                    if (!(follow && (r = e.realpathSync())))
-                        continue;
-                    if (r.isUnknown())
-                        r.lstatSync();
-                }
-                if (r.shouldWalk(dirs, walkFilter)) {
-                    dirs.add(r);
-                }
-            }
-        }
-    }
-    stream(entry = this.cwd, opts = {}) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
-        const results = new Minipass({ objectMode: true });
-        if (!filter || filter(entry)) {
-            results.write(withFileTypes ? entry : entry.fullpath());
-        }
-        const dirs = new Set();
-        const queue = [entry];
-        let processing = 0;
-        const process = () => {
-            let paused = false;
-            while (!paused) {
-                const dir = queue.shift();
-                if (!dir) {
-                    if (processing === 0)
-                        results.end();
-                    return;
-                }
-                processing++;
-                dirs.add(dir);
-                const onReaddir = (er, entries, didRealpaths = false) => {
-                    /* c8 ignore start */
-                    if (er)
-                        return results.emit('error', er);
-                    /* c8 ignore stop */
-                    if (follow && !didRealpaths) {
-                        const promises = [];
-                        for (const e of entries) {
-                            if (e.isSymbolicLink()) {
-                                promises.push(e
-                                    .realpath()
-                                    .then((r) => r?.isUnknown() ? r.lstat() : r));
-                            }
-                        }
-                        if (promises.length) {
-                            Promise.all(promises).then(() => onReaddir(null, entries, true));
-                            return;
-                        }
-                    }
-                    for (const e of entries) {
-                        if (e && (!filter || filter(e))) {
-                            if (!results.write(withFileTypes ? e : e.fullpath())) {
-                                paused = true;
-                            }
-                        }
-                    }
-                    processing--;
-                    for (const e of entries) {
-                        const r = e.realpathCached() || e;
-                        if (r.shouldWalk(dirs, walkFilter)) {
-                            queue.push(r);
-                        }
-                    }
-                    if (paused && !results.flowing) {
-                        results.once('drain', process);
-                    }
-                    else if (!sync) {
-                        process();
-                    }
-                };
-                // zalgo containment
-                let sync = true;
-                dir.readdirCB(onReaddir, true);
-                sync = false;
-            }
-        };
-        process();
-        return results;
-    }
-    streamSync(entry = this.cwd, opts = {}) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
-        const results = new Minipass({ objectMode: true });
-        const dirs = new Set();
-        if (!filter || filter(entry)) {
-            results.write(withFileTypes ? entry : entry.fullpath());
-        }
-        const queue = [entry];
-        let processing = 0;
-        const process = () => {
-            let paused = false;
-            while (!paused) {
-                const dir = queue.shift();
-                if (!dir) {
-                    if (processing === 0)
-                        results.end();
-                    return;
-                }
-                processing++;
-                dirs.add(dir);
-                const entries = dir.readdirSync();
-                for (const e of entries) {
-                    if (!filter || filter(e)) {
-                        if (!results.write(withFileTypes ? e : e.fullpath())) {
-                            paused = true;
-                        }
-                    }
-                }
-                processing--;
-                for (const e of entries) {
-                    let r = e;
-                    if (e.isSymbolicLink()) {
-                        if (!(follow && (r = e.realpathSync())))
-                            continue;
-                        if (r.isUnknown())
-                            r.lstatSync();
-                    }
-                    if (r.shouldWalk(dirs, walkFilter)) {
-                        queue.push(r);
-                    }
-                }
-            }
-            if (paused && !results.flowing)
-                results.once('drain', process);
-        };
-        process();
-        return results;
-    }
-    chdir(path = this.cwd) {
-        const oldCwd = this.cwd;
-        this.cwd = typeof path === 'string' ? this.cwd.resolve(path) : path;
-        this.cwd[setAsCwd](oldCwd);
-    }
-}
-/**
- * Windows implementation of {@link PathScurryBase}
- *
- * Defaults to case insensitve, uses `'\\'` to generate path strings.  Uses
- * {@link PathWin32} for Path objects.
- */
-export class PathScurryWin32 extends PathScurryBase {
-    /**
-     * separator for generating path strings
-     */
-    sep = '\\';
-    constructor(cwd = process.cwd(), opts = {}) {
-        const { nocase = true } = opts;
-        super(cwd, win32, '\\', { ...opts, nocase });
-        this.nocase = nocase;
-        for (let p = this.cwd; p; p = p.parent) {
-            p.nocase = this.nocase;
-        }
-    }
-    /**
-     * @internal
-     */
-    parseRootPath(dir) {
-        // if the path starts with a single separator, it's not a UNC, and we'll
-        // just get separator as the root, and driveFromUNC will return \
-        // In that case, mount \ on the root from the cwd.
-        return win32.parse(dir).root.toUpperCase();
-    }
-    /**
-     * @internal
-     */
-    newRoot(fs) {
-        return new PathWin32(this.rootPath, IFDIR, undefined, this.roots, this.nocase, this.childrenCache(), { fs });
-    }
-    /**
-     * Return true if the provided path string is an absolute path
-     */
-    isAbsolute(p) {
-        return (p.startsWith('/') || p.startsWith('\\') || /^[a-z]:(\/|\\)/i.test(p));
-    }
-}
-/**
- * {@link PathScurryBase} implementation for all posix systems other than Darwin.
- *
- * Defaults to case-sensitive matching, uses `'/'` to generate path strings.
- *
- * Uses {@link PathPosix} for Path objects.
- */
-export class PathScurryPosix extends PathScurryBase {
-    /**
-     * separator for generating path strings
-     */
-    sep = '/';
-    constructor(cwd = process.cwd(), opts = {}) {
-        const { nocase = false } = opts;
-        super(cwd, posix, '/', { ...opts, nocase });
-        this.nocase = nocase;
-    }
-    /**
-     * @internal
-     */
-    parseRootPath(_dir) {
-        return '/';
-    }
-    /**
-     * @internal
-     */
-    newRoot(fs) {
-        return new PathPosix(this.rootPath, IFDIR, undefined, this.roots, this.nocase, this.childrenCache(), { fs });
-    }
-    /**
-     * Return true if the provided path string is an absolute path
-     */
-    isAbsolute(p) {
-        return p.startsWith('/');
-    }
-}
-/**
- * {@link PathScurryBase} implementation for Darwin (macOS) systems.
- *
- * Defaults to case-insensitive matching, uses `'/'` for generating path
- * strings.
- *
- * Uses {@link PathPosix} for Path objects.
- */
-export class PathScurryDarwin extends PathScurryPosix {
-    constructor(cwd = process.cwd(), opts = {}) {
-        const { nocase = true } = opts;
-        super(cwd, { ...opts, nocase });
-    }
-}
-/**
- * Default {@link PathBase} implementation for the current platform.
- *
- * {@link PathWin32} on Windows systems, {@link PathPosix} on all others.
- */
-export const Path = process.platform === 'win32' ? PathWin32 : PathPosix;
-/**
- * Default {@link PathScurryBase} implementation for the current platform.
- *
- * {@link PathScurryWin32} on Windows systems, {@link PathScurryDarwin} on
- * Darwin (macOS) systems, {@link PathScurryPosix} on all others.
- */
-export const PathScurry = process.platform === 'win32' ? PathScurryWin32
-    : process.platform === 'darwin' ? PathScurryDarwin
-        : PathScurryPosix;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/path-scurry/dist/esm/package.json b/node_modules/node-gyp/node_modules/path-scurry/dist/esm/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/node-gyp/node_modules/path-scurry/dist/esm/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/node-gyp/node_modules/path-scurry/package.json b/node_modules/node-gyp/node_modules/path-scurry/package.json
deleted file mode 100644
index e1766157894c8..0000000000000
--- a/node_modules/node-gyp/node_modules/path-scurry/package.json
+++ /dev/null
@@ -1,89 +0,0 @@
-{
-  "name": "path-scurry",
-  "version": "1.11.1",
-  "description": "walk paths fast and efficiently",
-  "author": "Isaac Z. Schlueter  (https://blog.izs.me)",
-  "main": "./dist/commonjs/index.js",
-  "type": "module",
-  "exports": {
-    "./package.json": "./package.json",
-    ".": {
-      "import": {
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.js"
-      },
-      "require": {
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.js"
-      }
-    }
-  },
-  "files": [
-    "dist"
-  ],
-  "license": "BlueOak-1.0.0",
-  "scripts": {
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "prepare": "tshy",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "test": "tap",
-    "snap": "tap",
-    "format": "prettier --write . --loglevel warn",
-    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts",
-    "bench": "bash ./scripts/bench.sh"
-  },
-  "prettier": {
-    "experimentalTernaries": true,
-    "semi": false,
-    "printWidth": 75,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "devDependencies": {
-    "@nodelib/fs.walk": "^1.2.8",
-    "@types/node": "^20.12.11",
-    "c8": "^7.12.0",
-    "eslint-config-prettier": "^8.6.0",
-    "mkdirp": "^3.0.0",
-    "prettier": "^3.2.5",
-    "rimraf": "^5.0.1",
-    "tap": "^18.7.2",
-    "ts-node": "^10.9.2",
-    "tshy": "^1.14.0",
-    "typedoc": "^0.25.12",
-    "typescript": "^5.4.3"
-  },
-  "tap": {
-    "typecheck": true
-  },
-  "engines": {
-    "node": ">=16 || 14 >=14.18"
-  },
-  "funding": {
-    "url": "https://github.com/sponsors/isaacs"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/isaacs/path-scurry"
-  },
-  "dependencies": {
-    "lru-cache": "^10.2.0",
-    "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0"
-  },
-  "tshy": {
-    "selfLink": false,
-    "exports": {
-      "./package.json": "./package.json",
-      ".": "./src/index.ts"
-    }
-  },
-  "types": "./dist/commonjs/index.d.ts"
-}
diff --git a/node_modules/node-gyp/package.json b/node_modules/node-gyp/package.json
index 018391bd38c47..ae60687844133 100644
--- a/node_modules/node-gyp/package.json
+++ b/node_modules/node-gyp/package.json
@@ -11,7 +11,7 @@
     "bindings",
     "gyp"
   ],
-  "version": "11.4.2",
+  "version": "12.1.0",
   "installVersion": 11,
   "author": "Nathan Rajlich  (http://tootallnate.net)",
   "repository": {
@@ -25,28 +25,28 @@
     "env-paths": "^2.2.0",
     "exponential-backoff": "^3.1.1",
     "graceful-fs": "^4.2.6",
-    "make-fetch-happen": "^14.0.3",
-    "nopt": "^8.0.0",
-    "proc-log": "^5.0.0",
+    "make-fetch-happen": "^15.0.0",
+    "nopt": "^9.0.0",
+    "proc-log": "^6.0.0",
     "semver": "^7.3.5",
-    "tar": "^7.4.3",
+    "tar": "^7.5.2",
     "tinyglobby": "^0.2.12",
-    "which": "^5.0.0"
+    "which": "^6.0.0"
   },
   "engines": {
-    "node": "^18.17.0 || >=20.5.0"
+    "node": "^20.17.0 || >=22.9.0"
   },
   "devDependencies": {
     "bindings": "^1.5.0",
-    "cross-env": "^7.0.3",
-    "eslint": "^9.16.0",
-    "mocha": "^11.0.1",
-    "nan": "^2.14.2",
-    "neostandard": "^0.11.9",
+    "cross-env": "^10.1.0",
+    "eslint": "^9.39.1",
+    "mocha": "^11.7.5",
+    "nan": "^2.23.1",
+    "neostandard": "^0.12.2",
     "require-inject": "^1.4.4"
   },
   "scripts": {
     "lint": "eslint \"*/*.js\" \"test/**/*.js\" \".github/**/*.js\"",
-    "test": "cross-env NODE_GYP_NULL_LOGGER=true mocha --timeout 15000 test/test-download.js test/test-*"
+    "test": "cross-env NODE_GYP_NULL_LOGGER=true mocha --timeout 30000 test/test-download.js test/test-*"
   }
 }
diff --git a/node_modules/nopt/package.json b/node_modules/nopt/package.json
index 0732ada73c1d0..bb91642931009 100644
--- a/node_modules/nopt/package.json
+++ b/node_modules/nopt/package.json
@@ -1,6 +1,6 @@
 {
   "name": "nopt",
-  "version": "8.1.0",
+  "version": "9.0.0",
   "description": "Option parsing for Node, supporting types, shorthands, etc. Used by npm.",
   "author": "GitHub Inc.",
   "main": "lib/nopt.js",
@@ -23,11 +23,11 @@
   },
   "license": "ISC",
   "dependencies": {
-    "abbrev": "^3.0.0"
+    "abbrev": "^4.0.0"
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.23.6",
+    "@npmcli/template-oss": "4.27.1",
     "tap": "^16.3.0"
   },
   "tap": {
@@ -41,12 +41,12 @@
     "lib/"
   ],
   "engines": {
-    "node": "^18.17.0 || >=20.5.0"
+    "node": "^20.17.0 || >=22.9.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
     "windowsCI": false,
-    "version": "4.23.6",
+    "version": "4.27.1",
     "publish": true
   }
 }
diff --git a/node_modules/npm-bundled/package.json b/node_modules/npm-bundled/package.json
index c5daf35dbaa84..1fa4bdc72c593 100644
--- a/node_modules/npm-bundled/package.json
+++ b/node_modules/npm-bundled/package.json
@@ -1,6 +1,6 @@
 {
   "name": "npm-bundled",
-  "version": "4.0.0",
+  "version": "5.0.0",
   "description": "list things in node_modules that are bundledDependencies, or transitive dependencies thereof",
   "main": "lib/index.js",
   "repository": {
@@ -11,7 +11,7 @@
   "license": "ISC",
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.23.3",
+    "@npmcli/template-oss": "4.27.1",
     "mutate-fs": "^2.1.1",
     "tap": "^16.3.0"
   },
@@ -30,14 +30,14 @@
     "lib/"
   ],
   "dependencies": {
-    "npm-normalize-package-bin": "^4.0.0"
+    "npm-normalize-package-bin": "^5.0.0"
   },
   "engines": {
-    "node": "^18.17.0 || >=20.5.0"
+    "node": "^20.17.0 || >=22.9.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.23.3",
+    "version": "4.27.1",
     "publish": true
   },
   "tap": {
diff --git a/node_modules/npm-install-checks/package.json b/node_modules/npm-install-checks/package.json
index 28a23354bdbfe..aae100c2d4f3c 100644
--- a/node_modules/npm-install-checks/package.json
+++ b/node_modules/npm-install-checks/package.json
@@ -1,6 +1,6 @@
 {
   "name": "npm-install-checks",
-  "version": "7.1.2",
+  "version": "8.0.0",
   "description": "Check the engines and platform fields in package.json",
   "main": "lib/index.js",
   "dependencies": {
@@ -8,7 +8,7 @@
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.25.0",
+    "@npmcli/template-oss": "4.27.1",
     "tap": "^16.0.1"
   },
   "scripts": {
@@ -35,12 +35,12 @@
     "lib/"
   ],
   "engines": {
-    "node": "^18.17.0 || >=20.5.0"
+    "node": "^20.17.0 || >=22.9.0"
   },
   "author": "GitHub Inc.",
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.25.0",
+    "version": "4.27.1",
     "publish": "true"
   },
   "tap": {
diff --git a/node_modules/npm-normalize-package-bin/package.json b/node_modules/npm-normalize-package-bin/package.json
index a1aeef0e1e751..55dc65ad5ee92 100644
--- a/node_modules/npm-normalize-package-bin/package.json
+++ b/node_modules/npm-normalize-package-bin/package.json
@@ -1,6 +1,6 @@
 {
   "name": "npm-normalize-package-bin",
-  "version": "4.0.0",
+  "version": "5.0.0",
   "description": "Turn any flavor of allowable package.json bin into a normalized object",
   "main": "lib/index.js",
   "repository": {
@@ -21,7 +21,7 @@
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.23.3",
+    "@npmcli/template-oss": "4.27.1",
     "tap": "^16.3.0"
   },
   "files": [
@@ -29,11 +29,11 @@
     "lib/"
   ],
   "engines": {
-    "node": "^18.17.0 || >=20.5.0"
+    "node": "^20.17.0 || >=22.9.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.23.3",
+    "version": "4.27.1",
     "publish": "true"
   },
   "tap": {
diff --git a/node_modules/npm-package-arg/lib/npa.js b/node_modules/npm-package-arg/lib/npa.js
index 50121b99efbe3..a25c0a5368941 100644
--- a/node_modules/npm-package-arg/lib/npa.js
+++ b/node_modules/npm-package-arg/lib/npa.js
@@ -66,8 +66,6 @@ function isFileSpec (spec) {
   if (isWindows) {
     return isWindowsFile.test(spec)
   }
-  // We never hit this in windows tests, obviously
-  /* istanbul ignore next */
   return isPosixFile.test(spec)
 }
 
diff --git a/node_modules/npm-package-arg/package.json b/node_modules/npm-package-arg/package.json
index 2d8f91deaeed2..2e2d027f05582 100644
--- a/node_modules/npm-package-arg/package.json
+++ b/node_modules/npm-package-arg/package.json
@@ -1,6 +1,6 @@
 {
   "name": "npm-package-arg",
-  "version": "13.0.1",
+  "version": "13.0.2",
   "description": "Parse the things that can be arguments to `npm install`",
   "main": "./lib/npa.js",
   "directories": {
@@ -12,13 +12,13 @@
   ],
   "dependencies": {
     "hosted-git-info": "^9.0.0",
-    "proc-log": "^5.0.0",
+    "proc-log": "^6.0.0",
     "semver": "^7.3.5",
-    "validate-npm-package-name": "^6.0.0"
+    "validate-npm-package-name": "^7.0.0"
   },
   "devDependencies": {
-    "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.23.5",
+    "@npmcli/eslint-config": "^6.0.0",
+    "@npmcli/template-oss": "4.28.0",
     "tap": "^16.0.1"
   },
   "scripts": {
@@ -47,7 +47,6 @@
     "node": "^20.17.0 || >=22.9.0"
   },
   "tap": {
-    "branches": 97,
     "nyc-arg": [
       "--exclude",
       "tap-snapshots/**"
@@ -55,7 +54,7 @@
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.23.5",
+    "version": "4.28.0",
     "publish": true
   }
 }
diff --git a/node_modules/npm-packlist/package.json b/node_modules/npm-packlist/package.json
index 1ca942a536dbd..30cddb4df2e37 100644
--- a/node_modules/npm-packlist/package.json
+++ b/node_modules/npm-packlist/package.json
@@ -1,6 +1,6 @@
 {
   "name": "npm-packlist",
-  "version": "10.0.2",
+  "version": "10.0.3",
   "description": "Get a list of the files to add from a folder into an npm package",
   "directories": {
     "test": "test"
@@ -8,7 +8,7 @@
   "main": "lib/index.js",
   "dependencies": {
     "ignore-walk": "^8.0.0",
-    "proc-log": "^5.0.0"
+    "proc-log": "^6.0.0"
   },
   "author": "GitHub Inc.",
   "license": "ISC",
@@ -19,7 +19,7 @@
   "devDependencies": {
     "@npmcli/arborist": "^9.0.0",
     "@npmcli/eslint-config": "^5.0.1",
-    "@npmcli/template-oss": "4.25.1",
+    "@npmcli/template-oss": "4.27.1",
     "mutate-fs": "^2.1.1",
     "tap": "^16.0.1"
   },
@@ -56,7 +56,7 @@
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.25.1",
+    "version": "4.27.1",
     "publish": true
   }
 }
diff --git a/node_modules/npm-pick-manifest/package.json b/node_modules/npm-pick-manifest/package.json
index f1ca18ed32108..5cfafcfb70885 100644
--- a/node_modules/npm-pick-manifest/package.json
+++ b/node_modules/npm-pick-manifest/package.json
@@ -1,6 +1,6 @@
 {
   "name": "npm-pick-manifest",
-  "version": "11.0.1",
+  "version": "11.0.3",
   "description": "Resolves a matching manifest from a package metadata document according to standard npm semver resolution rules.",
   "main": "./lib",
   "files": [
@@ -30,14 +30,14 @@
   "author": "GitHub Inc.",
   "license": "ISC",
   "dependencies": {
-    "npm-install-checks": "^7.1.0",
-    "npm-normalize-package-bin": "^4.0.0",
+    "npm-install-checks": "^8.0.0",
+    "npm-normalize-package-bin": "^5.0.0",
     "npm-package-arg": "^13.0.0",
     "semver": "^7.3.5"
   },
   "devDependencies": {
-    "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.25.0",
+    "@npmcli/eslint-config": "^6.0.0",
+    "@npmcli/template-oss": "4.27.1",
     "tap": "^16.0.1"
   },
   "tap": {
@@ -52,7 +52,7 @@
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.25.0",
+    "version": "4.27.1",
     "publish": true
   }
 }
diff --git a/node_modules/npm-profile/package.json b/node_modules/npm-profile/package.json
index fb4ce118c9cf2..0f97cc1efa193 100644
--- a/node_modules/npm-profile/package.json
+++ b/node_modules/npm-profile/package.json
@@ -1,13 +1,13 @@
 {
   "name": "npm-profile",
-  "version": "12.0.0",
+  "version": "12.0.1",
   "description": "Library for updating an npmjs.com profile",
   "keywords": [],
   "author": "GitHub Inc.",
   "license": "ISC",
   "dependencies": {
     "npm-registry-fetch": "^19.0.0",
-    "proc-log": "^5.0.0"
+    "proc-log": "^6.0.0"
   },
   "main": "./lib/index.js",
   "repository": {
@@ -20,7 +20,7 @@
   ],
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.25.0",
+    "@npmcli/template-oss": "4.27.1",
     "nock": "^13.5.6",
     "tap": "^16.0.1"
   },
@@ -46,7 +46,7 @@
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.25.0",
+    "version": "4.27.1",
     "publish": true
   }
 }
diff --git a/node_modules/npm-registry-fetch/lib/index.js b/node_modules/npm-registry-fetch/lib/index.js
index 898c8125bfe0e..91d450f245f05 100644
--- a/node_modules/npm-registry-fetch/lib/index.js
+++ b/node_modules/npm-registry-fetch/lib/index.js
@@ -130,6 +130,7 @@ function regFetch (uri, /* istanbul ignore next */ opts_ = {}) {
       },
       strictSSL: opts.strictSSL,
       timeout: opts.timeout || 30 * 1000,
+      signal: opts.signal,
     }).then(res => checkResponse({
       method,
       uri,
diff --git a/node_modules/npm-registry-fetch/package.json b/node_modules/npm-registry-fetch/package.json
index a8e954cdf3c14..6f43ed7036796 100644
--- a/node_modules/npm-registry-fetch/package.json
+++ b/node_modules/npm-registry-fetch/package.json
@@ -1,6 +1,6 @@
 {
   "name": "npm-registry-fetch",
-  "version": "19.0.0",
+  "version": "19.1.1",
   "description": "Fetch-based http client for use with npm registry APIs",
   "main": "lib",
   "files": [
@@ -31,22 +31,22 @@
   "author": "GitHub Inc.",
   "license": "ISC",
   "dependencies": {
-    "@npmcli/redact": "^3.0.0",
+    "@npmcli/redact": "^4.0.0",
     "jsonparse": "^1.3.1",
     "make-fetch-happen": "^15.0.0",
     "minipass": "^7.0.2",
-    "minipass-fetch": "^4.0.0",
+    "minipass-fetch": "^5.0.0",
     "minizlib": "^3.0.1",
     "npm-package-arg": "^13.0.0",
-    "proc-log": "^5.0.0"
+    "proc-log": "^6.0.0"
   },
   "devDependencies": {
-    "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.25.0",
+    "@npmcli/eslint-config": "^6.0.0",
+    "@npmcli/template-oss": "4.28.0",
     "cacache": "^20.0.0",
     "nock": "^13.2.4",
     "require-inject": "^1.4.4",
-    "ssri": "^12.0.0",
+    "ssri": "^13.0.0",
     "tap": "^16.0.1"
   },
   "tap": {
@@ -62,7 +62,7 @@
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.25.0",
+    "version": "4.28.0",
     "publish": "true"
   }
 }
diff --git a/node_modules/pacote/package.json b/node_modules/pacote/package.json
index 3cc141a104796..1837cd09ffc9a 100644
--- a/node_modules/pacote/package.json
+++ b/node_modules/pacote/package.json
@@ -1,6 +1,6 @@
 {
   "name": "pacote",
-  "version": "21.0.3",
+  "version": "21.0.4",
   "description": "JavaScript package downloader",
   "author": "GitHub Inc.",
   "bin": {
@@ -27,8 +27,8 @@
   },
   "devDependencies": {
     "@npmcli/arborist": "^9.0.2",
-    "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.23.4",
+    "@npmcli/eslint-config": "^6.0.0",
+    "@npmcli/template-oss": "4.28.0",
     "hosted-git-info": "^9.0.0",
     "mutate-fs": "^2.1.1",
     "nock": "^13.2.4",
@@ -47,9 +47,9 @@
   ],
   "dependencies": {
     "@npmcli/git": "^7.0.0",
-    "@npmcli/installed-package-contents": "^3.0.0",
+    "@npmcli/installed-package-contents": "^4.0.0",
     "@npmcli/package-json": "^7.0.0",
-    "@npmcli/promise-spawn": "^8.0.0",
+    "@npmcli/promise-spawn": "^9.0.0",
     "@npmcli/run-script": "^10.0.0",
     "cacache": "^20.0.0",
     "fs-minipass": "^3.0.0",
@@ -58,10 +58,10 @@
     "npm-packlist": "^10.0.1",
     "npm-pick-manifest": "^11.0.1",
     "npm-registry-fetch": "^19.0.0",
-    "proc-log": "^5.0.0",
+    "proc-log": "^6.0.0",
     "promise-retry": "^2.0.1",
     "sigstore": "^4.0.0",
-    "ssri": "^12.0.0",
+    "ssri": "^13.0.0",
     "tar": "^7.4.3"
   },
   "engines": {
@@ -73,7 +73,7 @@
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.23.4",
+    "version": "4.28.0",
     "windowsCI": false,
     "publish": "true"
   }
diff --git a/node_modules/parse-conflict-json/package.json b/node_modules/parse-conflict-json/package.json
index 824ca8ed2bd14..1d4bc1021695a 100644
--- a/node_modules/parse-conflict-json/package.json
+++ b/node_modules/parse-conflict-json/package.json
@@ -1,6 +1,6 @@
 {
   "name": "parse-conflict-json",
-  "version": "4.0.0",
+  "version": "5.0.1",
   "description": "Parse a JSON string that has git merge conflicts, resolving if possible",
   "author": "GitHub Inc.",
   "license": "ISC",
@@ -24,11 +24,11 @@
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.23.3",
+    "@npmcli/template-oss": "4.27.1",
     "tap": "^16.0.1"
   },
   "dependencies": {
-    "json-parse-even-better-errors": "^4.0.0",
+    "json-parse-even-better-errors": "^5.0.0",
     "just-diff": "^6.0.0",
     "just-diff-apply": "^5.2.0"
   },
@@ -41,11 +41,11 @@
     "lib/"
   ],
   "engines": {
-    "node": "^18.17.0 || >=20.5.0"
+    "node": "^20.17.0 || >=22.9.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.23.3",
+    "version": "4.27.1",
     "publish": true
   }
 }
diff --git a/node_modules/proc-log/package.json b/node_modules/proc-log/package.json
index 957209d3954e5..afa07dfd9b30d 100644
--- a/node_modules/proc-log/package.json
+++ b/node_modules/proc-log/package.json
@@ -1,6 +1,6 @@
 {
   "name": "proc-log",
-  "version": "5.0.0",
+  "version": "6.0.0",
   "files": [
     "bin/",
     "lib/"
@@ -26,15 +26,15 @@
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.23.3",
+    "@npmcli/template-oss": "4.27.1",
     "tap": "^16.0.1"
   },
   "engines": {
-    "node": "^18.17.0 || >=20.5.0"
+    "node": "^20.17.0 || >=22.9.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.23.3",
+    "version": "4.27.1",
     "publish": true
   },
   "tap": {
diff --git a/node_modules/read-cmd-shim/package.json b/node_modules/read-cmd-shim/package.json
index 3b16802df3ce2..d3ebc40e311bb 100644
--- a/node_modules/read-cmd-shim/package.json
+++ b/node_modules/read-cmd-shim/package.json
@@ -1,11 +1,11 @@
 {
   "name": "read-cmd-shim",
-  "version": "5.0.0",
+  "version": "6.0.0",
   "description": "Figure out what a cmd-shim is pointing at. This acts as the equivalent of fs.readlink.",
   "main": "lib/index.js",
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.23.3",
+    "@npmcli/template-oss": "4.27.1",
     "cmd-shim": "^7.0.0",
     "tap": "^16.0.1"
   },
@@ -38,11 +38,11 @@
   ],
   "author": "GitHub Inc.",
   "engines": {
-    "node": "^18.17.0 || >=20.5.0"
+    "node": "^20.17.0 || >=22.9.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.23.3",
+    "version": "4.27.1",
     "publish": true
   }
 }
diff --git a/node_modules/ssri/lib/index.js b/node_modules/ssri/lib/index.js
index 7d749ed480fb9..9acc432261248 100644
--- a/node_modules/ssri/lib/index.js
+++ b/node_modules/ssri/lib/index.js
@@ -9,7 +9,7 @@ const DEFAULT_ALGORITHMS = ['sha512']
 // TODO: this should really be a hardcoded list of algorithms we support,
 // rather than [a-z0-9].
 const BASE64_REGEX = /^[a-z0-9+/]+(?:=?=?)$/i
-const SRI_REGEX = /^([a-z0-9]+)-([^?]+)([?\S*]*)$/
+const SRI_REGEX = /^([a-z0-9]+)-([^?]+)(\?[?\S*]*)?$/
 const STRICT_SRI_REGEX = /^([a-z0-9]+)-([A-Za-z0-9+/=]{44,88})(\?[\x21-\x7E]*)?$/
 const VCHAR_REGEX = /^[\x21-\x7E]+$/
 
diff --git a/node_modules/ssri/package.json b/node_modules/ssri/package.json
index 83306cd044ec3..8781bdf5f80c0 100644
--- a/node_modules/ssri/package.json
+++ b/node_modules/ssri/package.json
@@ -1,6 +1,6 @@
 {
   "name": "ssri",
-  "version": "12.0.0",
+  "version": "13.0.0",
   "description": "Standard Subresource Integrity library -- parses, serializes, generates, and verifies integrity metadata according to the SRI spec.",
   "main": "lib/index.js",
   "files": [
@@ -52,15 +52,15 @@
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.23.3",
+    "@npmcli/template-oss": "4.27.1",
     "tap": "^16.0.1"
   },
   "engines": {
-    "node": "^18.17.0 || >=20.5.0"
+    "node": "^20.17.0 || >=22.9.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.23.3",
+    "version": "4.27.1",
     "publish": "true"
   }
 }
diff --git a/node_modules/node-gyp/node_modules/path-scurry/LICENSE.md b/node_modules/tar/LICENSE.md
similarity index 100%
rename from node_modules/node-gyp/node_modules/path-scurry/LICENSE.md
rename to node_modules/tar/LICENSE.md
diff --git a/node_modules/tar/dist/commonjs/header.js b/node_modules/tar/dist/commonjs/header.js
index b3a48037b849a..09d8a3dfa80da 100644
--- a/node_modules/tar/dist/commonjs/header.js
+++ b/node_modules/tar/dist/commonjs/header.js
@@ -68,12 +68,13 @@ class Header {
         if (!buf || !(buf.length >= off + 512)) {
             throw new Error('need 512 bytes for header');
         }
-        this.path = decString(buf, off, 100);
-        this.mode = decNumber(buf, off + 100, 8);
-        this.uid = decNumber(buf, off + 108, 8);
-        this.gid = decNumber(buf, off + 116, 8);
-        this.size = decNumber(buf, off + 124, 12);
-        this.mtime = decDate(buf, off + 136, 12);
+        this.path = ex?.path ?? decString(buf, off, 100);
+        this.mode = ex?.mode ?? gex?.mode ?? decNumber(buf, off + 100, 8);
+        this.uid = ex?.uid ?? gex?.uid ?? decNumber(buf, off + 108, 8);
+        this.gid = ex?.gid ?? gex?.gid ?? decNumber(buf, off + 116, 8);
+        this.size = ex?.size ?? gex?.size ?? decNumber(buf, off + 124, 12);
+        this.mtime =
+            ex?.mtime ?? gex?.mtime ?? decDate(buf, off + 136, 12);
         this.cksum = decNumber(buf, off + 148, 12);
         // if we have extended or global extended headers, apply them now
         // See https://github.com/npm/node-tar/pull/187
@@ -101,11 +102,15 @@ class Header {
         this.linkpath = decString(buf, off + 157, 100);
         if (buf.subarray(off + 257, off + 265).toString() ===
             'ustar\u000000') {
-            this.uname = decString(buf, off + 265, 32);
-            this.gname = decString(buf, off + 297, 32);
             /* c8 ignore start */
-            this.devmaj = decNumber(buf, off + 329, 8) ?? 0;
-            this.devmin = decNumber(buf, off + 337, 8) ?? 0;
+            this.uname =
+                ex?.uname ?? gex?.uname ?? decString(buf, off + 265, 32);
+            this.gname =
+                ex?.gname ?? gex?.gname ?? decString(buf, off + 297, 32);
+            this.devmaj =
+                ex?.devmaj ?? gex?.devmaj ?? decNumber(buf, off + 329, 8) ?? 0;
+            this.devmin =
+                ex?.devmin ?? gex?.devmin ?? decNumber(buf, off + 337, 8) ?? 0;
             /* c8 ignore stop */
             if (buf[off + 475] !== 0) {
                 // definitely a prefix, definitely >130 chars.
@@ -117,8 +122,12 @@ class Header {
                 if (prefix) {
                     this.path = prefix + '/' + this.path;
                 }
-                this.atime = decDate(buf, off + 476, 12);
-                this.ctime = decDate(buf, off + 488, 12);
+                /* c8 ignore start */
+                this.atime =
+                    ex?.atime ?? gex?.atime ?? decDate(buf, off + 476, 12);
+                this.ctime =
+                    ex?.ctime ?? gex?.ctime ?? decDate(buf, off + 488, 12);
+                /* c8 ignore stop */
             }
         }
         let sum = 8 * 0x20;
diff --git a/node_modules/tar/dist/commonjs/list.js b/node_modules/tar/dist/commonjs/list.js
index 3bc56453f5ed6..5efad803a4cf5 100644
--- a/node_modules/tar/dist/commonjs/list.js
+++ b/node_modules/tar/dist/commonjs/list.js
@@ -82,14 +82,16 @@ const listFileSync = (opt) => {
         const readSize = opt.maxReadSize || 16 * 1024 * 1024;
         if (stat.size < readSize) {
             const buf = Buffer.allocUnsafe(stat.size);
-            node_fs_1.default.readSync(fd, buf, 0, stat.size, 0);
-            p.end(buf);
+            const read = node_fs_1.default.readSync(fd, buf, 0, stat.size, 0);
+            p.end(read === buf.byteLength ? buf : buf.subarray(0, read));
         }
         else {
             let pos = 0;
             const buf = Buffer.allocUnsafe(readSize);
             while (pos < stat.size) {
                 const bytesRead = node_fs_1.default.readSync(fd, buf, 0, readSize, pos);
+                if (bytesRead === 0)
+                    break;
                 pos += bytesRead;
                 p.write(buf.subarray(0, bytesRead));
             }
diff --git a/node_modules/tar/dist/commonjs/pack.js b/node_modules/tar/dist/commonjs/pack.js
index 07e921ca959bf..237f368e5d225 100644
--- a/node_modules/tar/dist/commonjs/pack.js
+++ b/node_modules/tar/dist/commonjs/pack.js
@@ -135,7 +135,10 @@ class Pack extends minipass_1.Minipass {
         }
         this.portable = !!opt.portable;
         if (opt.gzip || opt.brotli || opt.zstd) {
-            if ((opt.gzip ? 1 : 0) + (opt.brotli ? 1 : 0) + (opt.zstd ? 1 : 0) > 1) {
+            if ((opt.gzip ? 1 : 0) +
+                (opt.brotli ? 1 : 0) +
+                (opt.zstd ? 1 : 0) >
+                1) {
                 throw new TypeError('gzip, brotli, zstd are mutually exclusive');
             }
             if (opt.gzip) {
diff --git a/node_modules/tar/dist/commonjs/parse.js b/node_modules/tar/dist/commonjs/parse.js
index 0222b5547439f..db7b0124a3c05 100644
--- a/node_modules/tar/dist/commonjs/parse.js
+++ b/node_modules/tar/dist/commonjs/parse.js
@@ -449,10 +449,8 @@ class Parser extends events_1.EventEmitter {
                 const ended = this[ENDED];
                 this[ENDED] = false;
                 this[UNZIP] =
-                    this[UNZIP] === undefined ?
-                        new minizlib_1.Unzip({})
-                        : isZstd ?
-                            new minizlib_1.ZstdDecompress({})
+                    this[UNZIP] === undefined ? new minizlib_1.Unzip({})
+                        : isZstd ? new minizlib_1.ZstdDecompress({})
                             : new minizlib_1.BrotliDecompress({});
                 this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk));
                 this[UNZIP].on('error', er => this.abort(er));
diff --git a/node_modules/tar/dist/esm/header.js b/node_modules/tar/dist/esm/header.js
index e15192b14b16e..66db54527d838 100644
--- a/node_modules/tar/dist/esm/header.js
+++ b/node_modules/tar/dist/esm/header.js
@@ -42,12 +42,13 @@ export class Header {
         if (!buf || !(buf.length >= off + 512)) {
             throw new Error('need 512 bytes for header');
         }
-        this.path = decString(buf, off, 100);
-        this.mode = decNumber(buf, off + 100, 8);
-        this.uid = decNumber(buf, off + 108, 8);
-        this.gid = decNumber(buf, off + 116, 8);
-        this.size = decNumber(buf, off + 124, 12);
-        this.mtime = decDate(buf, off + 136, 12);
+        this.path = ex?.path ?? decString(buf, off, 100);
+        this.mode = ex?.mode ?? gex?.mode ?? decNumber(buf, off + 100, 8);
+        this.uid = ex?.uid ?? gex?.uid ?? decNumber(buf, off + 108, 8);
+        this.gid = ex?.gid ?? gex?.gid ?? decNumber(buf, off + 116, 8);
+        this.size = ex?.size ?? gex?.size ?? decNumber(buf, off + 124, 12);
+        this.mtime =
+            ex?.mtime ?? gex?.mtime ?? decDate(buf, off + 136, 12);
         this.cksum = decNumber(buf, off + 148, 12);
         // if we have extended or global extended headers, apply them now
         // See https://github.com/npm/node-tar/pull/187
@@ -75,11 +76,15 @@ export class Header {
         this.linkpath = decString(buf, off + 157, 100);
         if (buf.subarray(off + 257, off + 265).toString() ===
             'ustar\u000000') {
-            this.uname = decString(buf, off + 265, 32);
-            this.gname = decString(buf, off + 297, 32);
             /* c8 ignore start */
-            this.devmaj = decNumber(buf, off + 329, 8) ?? 0;
-            this.devmin = decNumber(buf, off + 337, 8) ?? 0;
+            this.uname =
+                ex?.uname ?? gex?.uname ?? decString(buf, off + 265, 32);
+            this.gname =
+                ex?.gname ?? gex?.gname ?? decString(buf, off + 297, 32);
+            this.devmaj =
+                ex?.devmaj ?? gex?.devmaj ?? decNumber(buf, off + 329, 8) ?? 0;
+            this.devmin =
+                ex?.devmin ?? gex?.devmin ?? decNumber(buf, off + 337, 8) ?? 0;
             /* c8 ignore stop */
             if (buf[off + 475] !== 0) {
                 // definitely a prefix, definitely >130 chars.
@@ -91,8 +96,12 @@ export class Header {
                 if (prefix) {
                     this.path = prefix + '/' + this.path;
                 }
-                this.atime = decDate(buf, off + 476, 12);
-                this.ctime = decDate(buf, off + 488, 12);
+                /* c8 ignore start */
+                this.atime =
+                    ex?.atime ?? gex?.atime ?? decDate(buf, off + 476, 12);
+                this.ctime =
+                    ex?.ctime ?? gex?.ctime ?? decDate(buf, off + 488, 12);
+                /* c8 ignore stop */
             }
         }
         let sum = 8 * 0x20;
diff --git a/node_modules/tar/dist/esm/list.js b/node_modules/tar/dist/esm/list.js
index 489ece51b9fa3..a63a8841769a3 100644
--- a/node_modules/tar/dist/esm/list.js
+++ b/node_modules/tar/dist/esm/list.js
@@ -52,14 +52,16 @@ const listFileSync = (opt) => {
         const readSize = opt.maxReadSize || 16 * 1024 * 1024;
         if (stat.size < readSize) {
             const buf = Buffer.allocUnsafe(stat.size);
-            fs.readSync(fd, buf, 0, stat.size, 0);
-            p.end(buf);
+            const read = fs.readSync(fd, buf, 0, stat.size, 0);
+            p.end(read === buf.byteLength ? buf : buf.subarray(0, read));
         }
         else {
             let pos = 0;
             const buf = Buffer.allocUnsafe(readSize);
             while (pos < stat.size) {
                 const bytesRead = fs.readSync(fd, buf, 0, readSize, pos);
+                if (bytesRead === 0)
+                    break;
                 pos += bytesRead;
                 p.write(buf.subarray(0, bytesRead));
             }
diff --git a/node_modules/tar/dist/esm/pack.js b/node_modules/tar/dist/esm/pack.js
index 14661783455d5..36dc28f8077d4 100644
--- a/node_modules/tar/dist/esm/pack.js
+++ b/node_modules/tar/dist/esm/pack.js
@@ -105,7 +105,10 @@ export class Pack extends Minipass {
         }
         this.portable = !!opt.portable;
         if (opt.gzip || opt.brotli || opt.zstd) {
-            if ((opt.gzip ? 1 : 0) + (opt.brotli ? 1 : 0) + (opt.zstd ? 1 : 0) > 1) {
+            if ((opt.gzip ? 1 : 0) +
+                (opt.brotli ? 1 : 0) +
+                (opt.zstd ? 1 : 0) >
+                1) {
                 throw new TypeError('gzip, brotli, zstd are mutually exclusive');
             }
             if (opt.gzip) {
diff --git a/node_modules/tar/dist/esm/parse.js b/node_modules/tar/dist/esm/parse.js
index 5b6bfe4bc4f15..50592a1dec635 100644
--- a/node_modules/tar/dist/esm/parse.js
+++ b/node_modules/tar/dist/esm/parse.js
@@ -446,10 +446,8 @@ export class Parser extends EE {
                 const ended = this[ENDED];
                 this[ENDED] = false;
                 this[UNZIP] =
-                    this[UNZIP] === undefined ?
-                        new Unzip({})
-                        : isZstd ?
-                            new ZstdDecompress({})
+                    this[UNZIP] === undefined ? new Unzip({})
+                        : isZstd ? new ZstdDecompress({})
                             : new BrotliDecompress({});
                 this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk));
                 this[UNZIP].on('error', er => this.abort(er));
diff --git a/node_modules/tar/package.json b/node_modules/tar/package.json
index be0f1e8fd8000..27368e26b2553 100644
--- a/node_modules/tar/package.json
+++ b/node_modules/tar/package.json
@@ -2,7 +2,7 @@
   "author": "Isaac Z. Schlueter",
   "name": "tar",
   "description": "tar for node",
-  "version": "7.5.1",
+  "version": "7.5.2",
   "repository": {
     "type": "git",
     "url": "https://github.com/isaacs/node-tar.git"
@@ -40,7 +40,7 @@
     "tshy": "^1.13.1",
     "typedoc": "^0.25.13"
   },
-  "license": "ISC",
+  "license": "BlueOak-1.0.0",
   "engines": {
     "node": ">=18"
   },
diff --git a/node_modules/validate-npm-package-name/package.json b/node_modules/validate-npm-package-name/package.json
index e5162f847fe53..87204aaf66398 100644
--- a/node_modules/validate-npm-package-name/package.json
+++ b/node_modules/validate-npm-package-name/package.json
@@ -1,6 +1,6 @@
 {
   "name": "validate-npm-package-name",
-  "version": "6.0.2",
+  "version": "7.0.0",
   "description": "Give me a string and I'll tell you if it's a valid npm package name",
   "main": "lib/",
   "directories": {
@@ -8,7 +8,7 @@
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.25.0",
+    "@npmcli/template-oss": "4.27.1",
     "tap": "^16.0.1"
   },
   "scripts": {
@@ -45,11 +45,11 @@
     "lib/"
   ],
   "engines": {
-    "node": "^18.17.0 || >=20.5.0"
+    "node": "^20.17.0 || >=22.9.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.25.0",
+    "version": "4.27.1",
     "publish": true
   },
   "tap": {
diff --git a/node_modules/which/package.json b/node_modules/which/package.json
index 94184233c61c4..915dc4bd27c3a 100644
--- a/node_modules/which/package.json
+++ b/node_modules/which/package.json
@@ -2,7 +2,7 @@
   "author": "GitHub Inc.",
   "name": "which",
   "description": "Like which(1) unix command. Find the first instance of an executable in the PATH.",
-  "version": "5.0.0",
+  "version": "6.0.0",
   "repository": {
     "type": "git",
     "url": "git+https://github.com/npm/node-which.git"
@@ -17,7 +17,7 @@
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.23.3",
+    "@npmcli/template-oss": "4.27.1",
     "tap": "^16.3.0"
   },
   "scripts": {
@@ -42,11 +42,11 @@
     ]
   },
   "engines": {
-    "node": "^18.17.0 || >=20.5.0"
+    "node": "^20.17.0 || >=22.9.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.23.3",
+    "version": "4.27.1",
     "publish": "true"
   }
 }
diff --git a/node_modules/write-file-atomic/package.json b/node_modules/write-file-atomic/package.json
index 1e88b5b889ec9..c72d839f0e661 100644
--- a/node_modules/write-file-atomic/package.json
+++ b/node_modules/write-file-atomic/package.json
@@ -1,6 +1,6 @@
 {
   "name": "write-file-atomic",
-  "version": "6.0.0",
+  "version": "7.0.0",
   "description": "Write files in an atomic fashion w/configurable ownership",
   "main": "./lib/index.js",
   "scripts": {
@@ -33,7 +33,7 @@
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.23.3",
+    "@npmcli/template-oss": "4.27.1",
     "tap": "^16.0.1"
   },
   "files": [
@@ -41,12 +41,12 @@
     "lib/"
   ],
   "engines": {
-    "node": "^18.17.0 || >=20.5.0"
+    "node": "^20.17.0 || >=22.9.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
     "windowsCI": false,
-    "version": "4.23.3",
+    "version": "4.27.1",
     "publish": "true"
   },
   "tap": {
diff --git a/package-lock.json b/package-lock.json
index 39bb8af70eacd..a9ab59f4dc338 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -13,6 +13,7 @@
         "@npmcli/config",
         "@npmcli/fs",
         "@npmcli/map-workspaces",
+        "@npmcli/metavuln-calculator",
         "@npmcli/package-json",
         "@npmcli/promise-spawn",
         "@npmcli/redact",
@@ -87,13 +88,14 @@
         "@npmcli/arborist": "^9.1.6",
         "@npmcli/config": "^10.4.2",
         "@npmcli/fs": "^4.0.0",
-        "@npmcli/map-workspaces": "^5.0.0",
-        "@npmcli/package-json": "^7.0.1",
-        "@npmcli/promise-spawn": "^8.0.3",
-        "@npmcli/redact": "^3.2.2",
-        "@npmcli/run-script": "^10.0.0",
+        "@npmcli/map-workspaces": "^5.0.1",
+        "@npmcli/metavuln-calculator": "^9.0.3",
+        "@npmcli/package-json": "^7.0.2",
+        "@npmcli/promise-spawn": "^9.0.1",
+        "@npmcli/redact": "^4.0.0",
+        "@npmcli/run-script": "^10.0.3",
         "@sigstore/tuf": "^4.0.0",
-        "abbrev": "^3.0.1",
+        "abbrev": "^4.0.0",
         "archy": "~1.0.0",
         "cacache": "^20.0.1",
         "chalk": "^5.6.2",
@@ -104,10 +106,10 @@
         "glob": "^11.0.3",
         "graceful-fs": "^4.2.11",
         "hosted-git-info": "^9.0.2",
-        "ini": "^5.0.0",
+        "ini": "^6.0.0",
         "init-package-json": "^8.2.2",
         "is-cidr": "^6.0.1",
-        "json-parse-even-better-errors": "^4.0.0",
+        "json-parse-even-better-errors": "^5.0.0",
         "libnpmaccess": "^10.0.3",
         "libnpmdiff": "^8.0.9",
         "libnpmexec": "^10.1.8",
@@ -118,36 +120,36 @@
         "libnpmsearch": "^9.0.1",
         "libnpmteam": "^8.0.2",
         "libnpmversion": "^8.0.2",
-        "make-fetch-happen": "^15.0.2",
-        "minimatch": "^10.0.3",
+        "make-fetch-happen": "^15.0.3",
+        "minimatch": "^10.1.1",
         "minipass": "^7.1.1",
         "minipass-pipeline": "^1.2.4",
         "ms": "^2.1.2",
-        "node-gyp": "^11.4.2",
-        "nopt": "^8.1.0",
+        "node-gyp": "^12.1.0",
+        "nopt": "^9.0.0",
         "npm-audit-report": "^6.0.0",
-        "npm-install-checks": "^7.1.2",
-        "npm-package-arg": "^13.0.1",
-        "npm-pick-manifest": "^11.0.1",
-        "npm-profile": "^12.0.0",
-        "npm-registry-fetch": "^19.0.0",
+        "npm-install-checks": "^8.0.0",
+        "npm-package-arg": "^13.0.2",
+        "npm-pick-manifest": "^11.0.3",
+        "npm-profile": "^12.0.1",
+        "npm-registry-fetch": "^19.1.1",
         "npm-user-validate": "^3.0.0",
         "p-map": "^7.0.3",
-        "pacote": "^21.0.3",
-        "parse-conflict-json": "^4.0.0",
-        "proc-log": "^5.0.0",
+        "pacote": "^21.0.4",
+        "parse-conflict-json": "^5.0.1",
+        "proc-log": "^6.0.0",
         "qrcode-terminal": "^0.12.0",
         "read": "^4.1.0",
         "semver": "^7.7.3",
         "spdx-expression-parse": "^4.0.0",
-        "ssri": "^12.0.0",
+        "ssri": "^13.0.0",
         "supports-color": "^10.2.2",
-        "tar": "^7.5.1",
+        "tar": "^7.5.2",
         "text-table": "~0.2.0",
         "tiny-relative-date": "^2.0.2",
         "treeverse": "^3.0.0",
-        "validate-npm-package-name": "^6.0.2",
-        "which": "^5.0.0"
+        "validate-npm-package-name": "^7.0.0",
+        "which": "^6.0.0"
       },
       "bin": {
         "npm": "bin/npm-cli.js",
@@ -156,7 +158,7 @@
       "devDependencies": {
         "@npmcli/docs": "^1.0.0",
         "@npmcli/eslint-config": "^5.1.0",
-        "@npmcli/git": "^7.0.0",
+        "@npmcli/git": "^7.0.1",
         "@npmcli/mock-globals": "^1.0.0",
         "@npmcli/mock-registry": "^1.0.0",
         "@npmcli/template-oss": "4.25.1",
@@ -167,7 +169,7 @@
         "cli-table3": "^0.6.4",
         "diff": "^8.0.2",
         "nock": "^13.4.0",
-        "npm-packlist": "^10.0.2",
+        "npm-packlist": "^10.0.3",
         "remark": "^15.0.1",
         "remark-gfm": "^4.0.1",
         "remark-github": "^12.0.0",
@@ -1621,6 +1623,22 @@
         "eslint-plugin-promise": "^6.0.0"
       }
     },
+    "node_modules/@npmcli/eslint-config/node_modules/which": {
+      "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/which/-/which-5.0.0.tgz",
+      "integrity": "sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "isexe": "^3.1.1"
+      },
+      "bin": {
+        "node-which": "bin/which.js"
+      },
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
     "node_modules/@npmcli/fs": {
       "version": "4.0.0",
       "inBundle": true,
@@ -1633,44 +1651,50 @@
       }
     },
     "node_modules/@npmcli/git": {
-      "version": "7.0.0",
+      "version": "7.0.1",
+      "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-7.0.1.tgz",
+      "integrity": "sha512-+XTFxK2jJF/EJJ5SoAzXk3qwIDfvFc5/g+bD274LZ7uY7LE8sTfG6Z8rOanPl2ZEvZWqNvmEdtXC25cE54VcoA==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
-        "@npmcli/promise-spawn": "^8.0.0",
-        "ini": "^5.0.0",
+        "@npmcli/promise-spawn": "^9.0.0",
+        "ini": "^6.0.0",
         "lru-cache": "^11.2.1",
         "npm-pick-manifest": "^11.0.1",
-        "proc-log": "^5.0.0",
+        "proc-log": "^6.0.0",
         "promise-retry": "^2.0.1",
         "semver": "^7.3.5",
-        "which": "^5.0.0"
+        "which": "^6.0.0"
       },
       "engines": {
         "node": "^20.17.0 || >=22.9.0"
       }
     },
     "node_modules/@npmcli/installed-package-contents": {
-      "version": "3.0.0",
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/@npmcli/installed-package-contents/-/installed-package-contents-4.0.0.tgz",
+      "integrity": "sha512-yNyAdkBxB72gtZ4GrwXCM0ZUedo9nIbOMKfGjt6Cu6DXf0p8y1PViZAKDC8q8kv/fufx0WTjRBdSlyrvnP7hmA==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
-        "npm-bundled": "^4.0.0",
-        "npm-normalize-package-bin": "^4.0.0"
+        "npm-bundled": "^5.0.0",
+        "npm-normalize-package-bin": "^5.0.0"
       },
       "bin": {
         "installed-package-contents": "bin/index.js"
       },
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^20.17.0 || >=22.9.0"
       }
     },
     "node_modules/@npmcli/map-workspaces": {
-      "version": "5.0.0",
+      "version": "5.0.1",
+      "resolved": "https://registry.npmjs.org/@npmcli/map-workspaces/-/map-workspaces-5.0.1.tgz",
+      "integrity": "sha512-LFEh3vY5nyiVI9IY9rko7FtAtS9fjgQySARlccKbnS7BMWFyQF73OT/n8NG22/8xyp57xPIl13gwO/OD63nktg==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
-        "@npmcli/name-from-folder": "^3.0.0",
+        "@npmcli/name-from-folder": "^4.0.0",
         "@npmcli/package-json": "^7.0.0",
         "glob": "^11.0.3",
         "minimatch": "^10.0.3"
@@ -1680,13 +1704,16 @@
       }
     },
     "node_modules/@npmcli/metavuln-calculator": {
-      "version": "9.0.2",
+      "version": "9.0.3",
+      "resolved": "https://registry.npmjs.org/@npmcli/metavuln-calculator/-/metavuln-calculator-9.0.3.tgz",
+      "integrity": "sha512-94GLSYhLXF2t2LAC7pDwLaM4uCARzxShyAQKsirmlNcpidH89VA4/+K1LbJmRMgz5gy65E/QBBWQdUvGLe2Frg==",
+      "inBundle": true,
       "license": "ISC",
       "dependencies": {
         "cacache": "^20.0.0",
-        "json-parse-even-better-errors": "^4.0.0",
+        "json-parse-even-better-errors": "^5.0.0",
         "pacote": "^21.0.0",
-        "proc-log": "^5.0.0",
+        "proc-log": "^6.0.0",
         "semver": "^7.3.5"
       },
       "engines": {
@@ -1702,31 +1729,37 @@
       "link": true
     },
     "node_modules/@npmcli/name-from-folder": {
-      "version": "3.0.0",
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/@npmcli/name-from-folder/-/name-from-folder-4.0.0.tgz",
+      "integrity": "sha512-qfrhVlOSqmKM8i6rkNdZzABj8MKEITGFAY+4teqBziksCQAOLutiAxM1wY2BKEd8KjUSpWmWCYxvXr0y4VTlPg==",
       "inBundle": true,
       "license": "ISC",
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^20.17.0 || >=22.9.0"
       }
     },
     "node_modules/@npmcli/node-gyp": {
-      "version": "4.0.0",
+      "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/@npmcli/node-gyp/-/node-gyp-5.0.0.tgz",
+      "integrity": "sha512-uuG5HZFXLfyFKqg8QypsmgLQW7smiRjVc45bqD/ofZZcR/uxEjgQU8qDPv0s9TEeMUiAAU/GC5bR6++UdTirIQ==",
       "inBundle": true,
       "license": "ISC",
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^20.17.0 || >=22.9.0"
       }
     },
     "node_modules/@npmcli/package-json": {
-      "version": "7.0.1",
+      "version": "7.0.2",
+      "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-7.0.2.tgz",
+      "integrity": "sha512-0ylN3U5htO1SJTmy2YI78PZZjLkKUGg7EKgukb2CRi0kzyoDr0cfjHAzi7kozVhj2V3SxN1oyKqZ2NSo40z00g==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
         "@npmcli/git": "^7.0.0",
         "glob": "^11.0.3",
         "hosted-git-info": "^9.0.0",
-        "json-parse-even-better-errors": "^4.0.0",
-        "proc-log": "^5.0.0",
+        "json-parse-even-better-errors": "^5.0.0",
+        "proc-log": "^6.0.0",
         "semver": "^7.5.3",
         "validate-npm-package-license": "^3.0.4"
       },
@@ -1735,14 +1768,16 @@
       }
     },
     "node_modules/@npmcli/promise-spawn": {
-      "version": "8.0.3",
+      "version": "9.0.1",
+      "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-9.0.1.tgz",
+      "integrity": "sha512-OLUaoqBuyxeTqUvjA3FZFiXUfYC1alp3Sa99gW3EUDz3tZ3CbXDdcZ7qWKBzicrJleIgucoWamWH1saAmH/l2Q==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
-        "which": "^5.0.0"
+        "which": "^6.0.0"
       },
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^20.17.0 || >=22.9.0"
       }
     },
     "node_modules/@npmcli/query": {
@@ -1756,24 +1791,28 @@
       }
     },
     "node_modules/@npmcli/redact": {
-      "version": "3.2.2",
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/@npmcli/redact/-/redact-4.0.0.tgz",
+      "integrity": "sha512-gOBg5YHMfZy+TfHArfVogwgfBeQnKbbGo3pSUyK/gSI0AVu+pEiDVcKlQb0D8Mg1LNRZILZ6XG8I5dJ4KuAd9Q==",
       "inBundle": true,
       "license": "ISC",
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^20.17.0 || >=22.9.0"
       }
     },
     "node_modules/@npmcli/run-script": {
-      "version": "10.0.0",
+      "version": "10.0.3",
+      "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-10.0.3.tgz",
+      "integrity": "sha512-ER2N6itRkzWbbtVmZ9WKaWxVlKlOeBFF1/7xx+KA5J1xKa4JjUwBdb6tDpk0v1qA+d+VDwHI9qmLcXSWcmi+Rw==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
-        "@npmcli/node-gyp": "^4.0.0",
+        "@npmcli/node-gyp": "^5.0.0",
         "@npmcli/package-json": "^7.0.0",
-        "@npmcli/promise-spawn": "^8.0.0",
-        "node-gyp": "^11.0.0",
-        "proc-log": "^5.0.0",
-        "which": "^5.0.0"
+        "@npmcli/promise-spawn": "^9.0.0",
+        "node-gyp": "^12.1.0",
+        "proc-log": "^6.0.0",
+        "which": "^6.0.0"
       },
       "engines": {
         "node": "^20.17.0 || >=22.9.0"
@@ -1829,6 +1868,36 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/ini": {
+      "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/ini/-/ini-5.0.0.tgz",
+      "integrity": "sha512-+N0ngpO3e7cRUWOJAS7qw0IZIVc6XPrW4MlFBdD066F2L4k1L6ker3hLqSq7iXxU5tgS4WGkIUElWn5vogAEnw==",
+      "dev": true,
+      "license": "ISC",
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
+    "node_modules/@npmcli/template-oss/node_modules/json-parse-even-better-errors": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-4.0.0.tgz",
+      "integrity": "sha512-lR4MXjGNgkJc7tkQ97kb2nuEMnNCyU//XYVH0MKTGcXEiSudQ5MKGKen3C5QubYy0vmq+JGitUg92uuywGEwIA==",
+      "dev": true,
+      "license": "MIT",
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
+    "node_modules/@npmcli/template-oss/node_modules/proc-log": {
+      "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-5.0.0.tgz",
+      "integrity": "sha512-Azwzvl90HaF0aCz1JrDdXQykFakSSNPaPoiZ9fm5qJIMHioDZEi7OAdRwSm6rSoPtY3Qutnm3L7ogmg3dc+wbQ==",
+      "dev": true,
+      "license": "ISC",
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
     "node_modules/@octokit/auth-token": {
       "version": "6.0.0",
       "dev": true,
@@ -1983,15 +2052,6 @@
         "@octokit/openapi-types": "^26.0.0"
       }
     },
-    "node_modules/@pkgjs/parseargs": {
-      "version": "0.11.0",
-      "inBundle": true,
-      "license": "MIT",
-      "optional": true,
-      "engines": {
-        "node": ">=14"
-      }
-    },
     "node_modules/@rtsao/scc": {
       "version": "1.1.0",
       "dev": true,
@@ -2043,6 +2103,16 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
+    "node_modules/@sigstore/sign/node_modules/proc-log": {
+      "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-5.0.0.tgz",
+      "integrity": "sha512-Azwzvl90HaF0aCz1JrDdXQykFakSSNPaPoiZ9fm5qJIMHioDZEi7OAdRwSm6rSoPtY3Qutnm3L7ogmg3dc+wbQ==",
+      "inBundle": true,
+      "license": "ISC",
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
     "node_modules/@sigstore/tuf": {
       "version": "4.0.0",
       "inBundle": true,
@@ -2214,11 +2284,13 @@
       }
     },
     "node_modules/abbrev": {
-      "version": "3.0.1",
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-4.0.0.tgz",
+      "integrity": "sha512-a1wflyaL0tHtJSmLSOVybYhy22vRih4eduhhrkcjgrWGnRfrZtovJ2FRjxuTtkkj47O/baf0R86QU5OuYpz8fA==",
       "inBundle": true,
       "license": "ISC",
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^20.17.0 || >=22.9.0"
       }
     },
     "node_modules/acorn": {
@@ -2720,17 +2792,19 @@
       }
     },
     "node_modules/bin-links": {
-      "version": "5.0.0",
+      "version": "6.0.0",
+      "resolved": "https://registry.npmjs.org/bin-links/-/bin-links-6.0.0.tgz",
+      "integrity": "sha512-X4CiKlcV2GjnCMwnKAfbVWpHa++65th9TuzAEYtZoATiOE2DQKhSp4CJlyLoTqdhBKlXjpXjCTYPNNFS33Fi6w==",
       "license": "ISC",
       "dependencies": {
-        "cmd-shim": "^7.0.0",
-        "npm-normalize-package-bin": "^4.0.0",
-        "proc-log": "^5.0.0",
-        "read-cmd-shim": "^5.0.0",
-        "write-file-atomic": "^6.0.0"
+        "cmd-shim": "^8.0.0",
+        "npm-normalize-package-bin": "^5.0.0",
+        "proc-log": "^6.0.0",
+        "read-cmd-shim": "^6.0.0",
+        "write-file-atomic": "^7.0.0"
       },
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^20.17.0 || >=22.9.0"
       }
     },
     "node_modules/binary-extensions": {
@@ -2837,6 +2911,19 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
+    "node_modules/cacache/node_modules/ssri": {
+      "version": "12.0.0",
+      "resolved": "https://registry.npmjs.org/ssri/-/ssri-12.0.0.tgz",
+      "integrity": "sha512-S7iGNosepx9RadX82oimUkvr0Ct7IjJbEbs4mJcTxst8um95J3sDYU1RBEOvdu6oL1Wek2ODI5i4MAw+dZ6cAQ==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "minipass": "^7.0.3"
+      },
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
     "node_modules/caching-transform": {
       "version": "4.0.0",
       "dev": true,
@@ -3170,10 +3257,12 @@
       }
     },
     "node_modules/cmd-shim": {
-      "version": "7.0.0",
+      "version": "8.0.0",
+      "resolved": "https://registry.npmjs.org/cmd-shim/-/cmd-shim-8.0.0.tgz",
+      "integrity": "sha512-Jk/BK6NCapZ58BKUxlSI+ouKRbjH1NLZCgJkYoab+vEHUY3f6OzpNBN9u7HFSv9J6TRDGs4PLOHezoKGaFRSCA==",
       "license": "ISC",
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^20.17.0 || >=22.9.0"
       }
     },
     "node_modules/code-suggester": {
@@ -4967,7 +5056,9 @@
       }
     },
     "node_modules/exponential-backoff": {
-      "version": "3.1.2",
+      "version": "3.1.3",
+      "resolved": "https://registry.npmjs.org/exponential-backoff/-/exponential-backoff-3.1.3.tgz",
+      "integrity": "sha512-ZgEeZXj30q+I0EN+CbSSpIyPaJ5HVQD18Z1m+u1FXbAeT94mr1zw50q4q6jiiC447Nl/YTcIYSAftiGqetwXCA==",
       "inBundle": true,
       "license": "Apache-2.0"
     },
@@ -5933,11 +6024,13 @@
       "license": "ISC"
     },
     "node_modules/ini": {
-      "version": "5.0.0",
+      "version": "6.0.0",
+      "resolved": "https://registry.npmjs.org/ini/-/ini-6.0.0.tgz",
+      "integrity": "sha512-IBTdIkzZNOpqm7q3dRqJvMaldXjDHWkEDfrwGEQTs5eaQMWV+djAhR+wahyNNMAa+qpbDUhBMVt4ZKNwpPm7xQ==",
       "inBundle": true,
       "license": "ISC",
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^20.17.0 || >=22.9.0"
       }
     },
     "node_modules/init-package-json": {
@@ -5957,6 +6050,16 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
+    "node_modules/init-package-json/node_modules/validate-npm-package-name": {
+      "version": "6.0.2",
+      "resolved": "https://registry.npmjs.org/validate-npm-package-name/-/validate-npm-package-name-6.0.2.tgz",
+      "integrity": "sha512-IUoow1YUtvoBBC06dXs8bR8B9vuA3aJfmQNKMoaPG/OFsPmoQvw8xh+6Ye25Gx9DQhoEom3Pcu9MKHerm/NpUQ==",
+      "inBundle": true,
+      "license": "ISC",
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
     "node_modules/internal-slot": {
       "version": "1.1.0",
       "dev": true,
@@ -6777,11 +6880,13 @@
       "peer": true
     },
     "node_modules/json-parse-even-better-errors": {
-      "version": "4.0.0",
+      "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-5.0.0.tgz",
+      "integrity": "sha512-ZF1nxZ28VhQouRWhUcVlUIN3qwSgPuswK05s/HIaoetAoE/9tngVmCHjSxmSQPav1nd+lPtTL0YZ/2AFdR/iYQ==",
       "inBundle": true,
       "license": "MIT",
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^20.17.0 || >=22.9.0"
       }
     },
     "node_modules/json-schema-traverse": {
@@ -7137,7 +7242,9 @@
       }
     },
     "node_modules/make-fetch-happen": {
-      "version": "15.0.2",
+      "version": "15.0.3",
+      "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-15.0.3.tgz",
+      "integrity": "sha512-iyyEpDty1mwW3dGlYXAJqC/azFn5PPvgKVwXayOGBSmKLxhKZ9fg4qIan2ePpp1vJIwfFiO34LAPZgq9SZW9Aw==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -7145,13 +7252,13 @@
         "cacache": "^20.0.1",
         "http-cache-semantics": "^4.1.1",
         "minipass": "^7.0.2",
-        "minipass-fetch": "^4.0.0",
+        "minipass-fetch": "^5.0.0",
         "minipass-flush": "^1.0.5",
         "minipass-pipeline": "^1.2.4",
         "negotiator": "^1.0.0",
-        "proc-log": "^5.0.0",
+        "proc-log": "^6.0.0",
         "promise-retry": "^2.0.1",
-        "ssri": "^12.0.0"
+        "ssri": "^13.0.0"
       },
       "engines": {
         "node": "^20.17.0 || >=22.9.0"
@@ -8052,9 +8159,11 @@
       }
     },
     "node_modules/minimatch": {
-      "version": "10.0.3",
+      "version": "10.1.1",
+      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.1.1.tgz",
+      "integrity": "sha512-enIvLvRAFZYXJzkCYG5RKmPfrFArdLv+R+lbQ53BmIMLIry74bjKzX6iHAm8WYamJkhSSEabrWN5D97XnKObjQ==",
       "inBundle": true,
-      "license": "ISC",
+      "license": "BlueOak-1.0.0",
       "dependencies": {
         "@isaacs/brace-expansion": "^5.0.0"
       },
@@ -8114,7 +8223,9 @@
       }
     },
     "node_modules/minipass-fetch": {
-      "version": "4.0.1",
+      "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-5.0.0.tgz",
+      "integrity": "sha512-fiCdUALipqgPWrOVTz9fw0XhcazULXOSU6ie40DDbX1F49p1dBrSRBuswndTx1x3vEb/g0FT7vC4c4C2u/mh3A==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -8123,7 +8234,7 @@
         "minizlib": "^3.0.1"
       },
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^20.17.0 || >=22.9.0"
       },
       "optionalDependencies": {
         "encoding": "^0.1.13"
@@ -8316,151 +8427,28 @@
       }
     },
     "node_modules/node-gyp": {
-      "version": "11.4.2",
+      "version": "12.1.0",
+      "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-12.1.0.tgz",
+      "integrity": "sha512-W+RYA8jBnhSr2vrTtlPYPc1K+CSjGpVDRZxcqJcERZ8ND3A1ThWPHRwctTx3qC3oW99jt726jhdz3Y6ky87J4g==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
         "env-paths": "^2.2.0",
         "exponential-backoff": "^3.1.1",
         "graceful-fs": "^4.2.6",
-        "make-fetch-happen": "^14.0.3",
-        "nopt": "^8.0.0",
-        "proc-log": "^5.0.0",
+        "make-fetch-happen": "^15.0.0",
+        "nopt": "^9.0.0",
+        "proc-log": "^6.0.0",
         "semver": "^7.3.5",
-        "tar": "^7.4.3",
+        "tar": "^7.5.2",
         "tinyglobby": "^0.2.12",
-        "which": "^5.0.0"
+        "which": "^6.0.0"
       },
       "bin": {
         "node-gyp": "bin/node-gyp.js"
       },
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
-    "node_modules/node-gyp/node_modules/@npmcli/agent": {
-      "version": "3.0.0",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "agent-base": "^7.1.0",
-        "http-proxy-agent": "^7.0.0",
-        "https-proxy-agent": "^7.0.1",
-        "lru-cache": "^10.0.1",
-        "socks-proxy-agent": "^8.0.3"
-      },
-      "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
-    "node_modules/node-gyp/node_modules/cacache": {
-      "version": "19.0.1",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "@npmcli/fs": "^4.0.0",
-        "fs-minipass": "^3.0.0",
-        "glob": "^10.2.2",
-        "lru-cache": "^10.0.1",
-        "minipass": "^7.0.3",
-        "minipass-collect": "^2.0.1",
-        "minipass-flush": "^1.0.5",
-        "minipass-pipeline": "^1.2.4",
-        "p-map": "^7.0.2",
-        "ssri": "^12.0.0",
-        "tar": "^7.4.3",
-        "unique-filename": "^4.0.0"
-      },
-      "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
-    "node_modules/node-gyp/node_modules/glob": {
-      "version": "10.4.5",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "foreground-child": "^3.1.0",
-        "jackspeak": "^3.1.2",
-        "minimatch": "^9.0.4",
-        "minipass": "^7.1.2",
-        "package-json-from-dist": "^1.0.0",
-        "path-scurry": "^1.11.1"
-      },
-      "bin": {
-        "glob": "dist/esm/bin.mjs"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      }
-    },
-    "node_modules/node-gyp/node_modules/jackspeak": {
-      "version": "3.4.3",
-      "inBundle": true,
-      "license": "BlueOak-1.0.0",
-      "dependencies": {
-        "@isaacs/cliui": "^8.0.2"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      },
-      "optionalDependencies": {
-        "@pkgjs/parseargs": "^0.11.0"
-      }
-    },
-    "node_modules/node-gyp/node_modules/lru-cache": {
-      "version": "10.4.3",
-      "inBundle": true,
-      "license": "ISC"
-    },
-    "node_modules/node-gyp/node_modules/make-fetch-happen": {
-      "version": "14.0.3",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "@npmcli/agent": "^3.0.0",
-        "cacache": "^19.0.1",
-        "http-cache-semantics": "^4.1.1",
-        "minipass": "^7.0.2",
-        "minipass-fetch": "^4.0.0",
-        "minipass-flush": "^1.0.5",
-        "minipass-pipeline": "^1.2.4",
-        "negotiator": "^1.0.0",
-        "proc-log": "^5.0.0",
-        "promise-retry": "^2.0.1",
-        "ssri": "^12.0.0"
-      },
-      "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
-    "node_modules/node-gyp/node_modules/minimatch": {
-      "version": "9.0.5",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "brace-expansion": "^2.0.1"
-      },
-      "engines": {
-        "node": ">=16 || 14 >=14.17"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      }
-    },
-    "node_modules/node-gyp/node_modules/path-scurry": {
-      "version": "1.11.1",
-      "inBundle": true,
-      "license": "BlueOak-1.0.0",
-      "dependencies": {
-        "lru-cache": "^10.2.0",
-        "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0"
-      },
-      "engines": {
-        "node": ">=16 || 14 >=14.18"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
+        "node": "^20.17.0 || >=22.9.0"
       }
     },
     "node_modules/node-html-parser": {
@@ -8491,17 +8479,19 @@
       "license": "MIT"
     },
     "node_modules/nopt": {
-      "version": "8.1.0",
+      "version": "9.0.0",
+      "resolved": "https://registry.npmjs.org/nopt/-/nopt-9.0.0.tgz",
+      "integrity": "sha512-Zhq3a+yFKrYwSBluL4H9XP3m3y5uvQkB/09CwDruCiRmR/UJYnn9W4R48ry0uGC70aeTPKLynBtscP9efFFcPw==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
-        "abbrev": "^3.0.0"
+        "abbrev": "^4.0.0"
       },
       "bin": {
         "nopt": "bin/nopt.js"
       },
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^20.17.0 || >=22.9.0"
       }
     },
     "node_modules/normalize-path": {
@@ -8521,72 +8511,80 @@
       }
     },
     "node_modules/npm-bundled": {
-      "version": "4.0.0",
+      "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-5.0.0.tgz",
+      "integrity": "sha512-JLSpbzh6UUXIEoqPsYBvVNVmyrjVZ1fzEFbqxKkTJQkWBO3xFzFT+KDnSKQWwOQNbuWRwt5LSD6HOTLGIWzfrw==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
-        "npm-normalize-package-bin": "^4.0.0"
+        "npm-normalize-package-bin": "^5.0.0"
       },
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^20.17.0 || >=22.9.0"
       }
     },
     "node_modules/npm-install-checks": {
-      "version": "7.1.2",
+      "version": "8.0.0",
+      "resolved": "https://registry.npmjs.org/npm-install-checks/-/npm-install-checks-8.0.0.tgz",
+      "integrity": "sha512-ScAUdMpyzkbpxoNekQ3tNRdFI8SJ86wgKZSQZdUxT+bj0wVFpsEMWnkXP0twVe1gJyNF5apBWDJhhIbgrIViRA==",
       "inBundle": true,
       "license": "BSD-2-Clause",
       "dependencies": {
         "semver": "^7.1.1"
       },
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^20.17.0 || >=22.9.0"
       }
     },
     "node_modules/npm-normalize-package-bin": {
-      "version": "4.0.0",
+      "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-5.0.0.tgz",
+      "integrity": "sha512-CJi3OS4JLsNMmr2u07OJlhcrPxCeOeP/4xq67aWNai6TNWWbTrlNDgl8NcFKVlcBKp18GPj+EzbNIgrBfZhsag==",
       "inBundle": true,
       "license": "ISC",
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^20.17.0 || >=22.9.0"
       }
     },
     "node_modules/npm-package-arg": {
-      "version": "13.0.1",
-      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-13.0.1.tgz",
-      "integrity": "sha512-6zqls5xFvJbgFjB1B2U6yITtyGBjDBORB7suI4zA4T/sZ1OmkMFlaQSNB/4K0LtXNA1t4OprAFxPisadK5O2ag==",
+      "version": "13.0.2",
+      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-13.0.2.tgz",
+      "integrity": "sha512-IciCE3SY3uE84Ld8WZU23gAPPV9rIYod4F+rc+vJ7h7cwAJt9Vk6TVsK60ry7Uj3SRS3bqRRIGuTp9YVlk6WNA==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
         "hosted-git-info": "^9.0.0",
-        "proc-log": "^5.0.0",
+        "proc-log": "^6.0.0",
         "semver": "^7.3.5",
-        "validate-npm-package-name": "^6.0.0"
+        "validate-npm-package-name": "^7.0.0"
       },
       "engines": {
         "node": "^20.17.0 || >=22.9.0"
       }
     },
     "node_modules/npm-packlist": {
-      "version": "10.0.2",
-      "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-10.0.2.tgz",
-      "integrity": "sha512-DrIWNiWT0FTdDRjGOYfEEZUNe1IzaSZ+up7qBTKnrQDySpdmuOQvytrqQlpK5QrCA4IThMvL4wTumqaa1ZvVIQ==",
+      "version": "10.0.3",
+      "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-10.0.3.tgz",
+      "integrity": "sha512-zPukTwJMOu5X5uvm0fztwS5Zxyvmk38H/LfidkOMt3gbZVCyro2cD/ETzwzVPcWZA3JOyPznfUN/nkyFiyUbxg==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
         "ignore-walk": "^8.0.0",
-        "proc-log": "^5.0.0"
+        "proc-log": "^6.0.0"
       },
       "engines": {
         "node": "^20.17.0 || >=22.9.0"
       }
     },
     "node_modules/npm-pick-manifest": {
-      "version": "11.0.1",
+      "version": "11.0.3",
+      "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-11.0.3.tgz",
+      "integrity": "sha512-buzyCfeoGY/PxKqmBqn1IUJrZnUi1VVJTdSSRPGI60tJdUhUoSQFhs0zycJokDdOznQentgrpf8LayEHyyYlqQ==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
-        "npm-install-checks": "^7.1.0",
-        "npm-normalize-package-bin": "^4.0.0",
+        "npm-install-checks": "^8.0.0",
+        "npm-normalize-package-bin": "^5.0.0",
         "npm-package-arg": "^13.0.0",
         "semver": "^7.3.5"
       },
@@ -8595,30 +8593,34 @@
       }
     },
     "node_modules/npm-profile": {
-      "version": "12.0.0",
+      "version": "12.0.1",
+      "resolved": "https://registry.npmjs.org/npm-profile/-/npm-profile-12.0.1.tgz",
+      "integrity": "sha512-Xs1mejJ1/9IKucCxdFMkiBJUre0xaxfCpbsO7DB7CadITuT4k68eI05HBlw4kj+Em1rsFMgeFNljFPYvPETbVQ==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
         "npm-registry-fetch": "^19.0.0",
-        "proc-log": "^5.0.0"
+        "proc-log": "^6.0.0"
       },
       "engines": {
         "node": "^20.17.0 || >=22.9.0"
       }
     },
     "node_modules/npm-registry-fetch": {
-      "version": "19.0.0",
+      "version": "19.1.1",
+      "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-19.1.1.tgz",
+      "integrity": "sha512-TakBap6OM1w0H73VZVDf44iFXsOS3h+L4wVMXmbWOQroZgFhMch0juN6XSzBNlD965yIKvWg2dfu7NSiaYLxtw==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
-        "@npmcli/redact": "^3.0.0",
+        "@npmcli/redact": "^4.0.0",
         "jsonparse": "^1.3.1",
         "make-fetch-happen": "^15.0.0",
         "minipass": "^7.0.2",
-        "minipass-fetch": "^4.0.0",
+        "minipass-fetch": "^5.0.0",
         "minizlib": "^3.0.1",
         "npm-package-arg": "^13.0.0",
-        "proc-log": "^5.0.0"
+        "proc-log": "^6.0.0"
       },
       "engines": {
         "node": "^20.17.0 || >=22.9.0"
@@ -9116,14 +9118,16 @@
       "license": "BlueOak-1.0.0"
     },
     "node_modules/pacote": {
-      "version": "21.0.3",
+      "version": "21.0.4",
+      "resolved": "https://registry.npmjs.org/pacote/-/pacote-21.0.4.tgz",
+      "integrity": "sha512-RplP/pDW0NNNDh3pnaoIWYPvNenS7UqMbXyvMqJczosiFWTeGGwJC2NQBLqKf4rGLFfwCOnntw1aEp9Jiqm1MA==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
         "@npmcli/git": "^7.0.0",
-        "@npmcli/installed-package-contents": "^3.0.0",
+        "@npmcli/installed-package-contents": "^4.0.0",
         "@npmcli/package-json": "^7.0.0",
-        "@npmcli/promise-spawn": "^8.0.0",
+        "@npmcli/promise-spawn": "^9.0.0",
         "@npmcli/run-script": "^10.0.0",
         "cacache": "^20.0.0",
         "fs-minipass": "^3.0.0",
@@ -9132,10 +9136,10 @@
         "npm-packlist": "^10.0.1",
         "npm-pick-manifest": "^11.0.1",
         "npm-registry-fetch": "^19.0.0",
-        "proc-log": "^5.0.0",
+        "proc-log": "^6.0.0",
         "promise-retry": "^2.0.1",
         "sigstore": "^4.0.0",
-        "ssri": "^12.0.0",
+        "ssri": "^13.0.0",
         "tar": "^7.4.3"
       },
       "bin": {
@@ -9157,16 +9161,18 @@
       }
     },
     "node_modules/parse-conflict-json": {
-      "version": "4.0.0",
+      "version": "5.0.1",
+      "resolved": "https://registry.npmjs.org/parse-conflict-json/-/parse-conflict-json-5.0.1.tgz",
+      "integrity": "sha512-ZHEmNKMq1wyJXNwLxyHnluPfRAFSIliBvbK/UiOceROt4Xh9Pz0fq49NytIaeaCUf5VR86hwQ/34FCcNU5/LKQ==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
-        "json-parse-even-better-errors": "^4.0.0",
+        "json-parse-even-better-errors": "^5.0.0",
         "just-diff": "^6.0.0",
         "just-diff-apply": "^5.2.0"
       },
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^20.17.0 || >=22.9.0"
       }
     },
     "node_modules/parse-diff": {
@@ -9404,11 +9410,13 @@
       }
     },
     "node_modules/proc-log": {
-      "version": "5.0.0",
+      "version": "6.0.0",
+      "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-6.0.0.tgz",
+      "integrity": "sha512-KG/XsTDN901PNfPfAMmj6N/Ywg9tM+bHK8pAz+27fS4N4Pcr+4zoYBOcGSBu6ceXYNPxkLpa4ohtfxV1XcLAfA==",
       "inBundle": true,
       "license": "ISC",
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^20.17.0 || >=22.9.0"
       }
     },
     "node_modules/process-on-spawn": {
@@ -9571,10 +9579,12 @@
       }
     },
     "node_modules/read-cmd-shim": {
-      "version": "5.0.0",
+      "version": "6.0.0",
+      "resolved": "https://registry.npmjs.org/read-cmd-shim/-/read-cmd-shim-6.0.0.tgz",
+      "integrity": "sha512-1zM5HuOfagXCBWMN83fuFI/x+T/UhZ7k+KIzhrHXcQoeX5+7gmaDYjELQHmmzIodumBHeByBJT4QYS7ufAgs7A==",
       "license": "ISC",
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^20.17.0 || >=22.9.0"
       }
     },
     "node_modules/read-pkg": {
@@ -10936,14 +10946,16 @@
       "license": "BSD-3-Clause"
     },
     "node_modules/ssri": {
-      "version": "12.0.0",
+      "version": "13.0.0",
+      "resolved": "https://registry.npmjs.org/ssri/-/ssri-13.0.0.tgz",
+      "integrity": "sha512-yizwGBpbCn4YomB2lzhZqrHLJoqFGXihNbib3ozhqF/cIp5ue+xSmOQrjNasEE62hFxsCcg/V/z23t4n8jMEng==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
         "minipass": "^7.0.3"
       },
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^20.17.0 || >=22.9.0"
       }
     },
     "node_modules/stack-utils": {
@@ -13365,11 +13377,11 @@
       }
     },
     "node_modules/tar": {
-      "version": "7.5.1",
-      "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.1.tgz",
-      "integrity": "sha512-nlGpxf+hv0v7GkWBK2V9spgactGOp0qvfWRxUMjqHyzrt3SgwE48DIv/FhqPHJYLHpgW1opq3nERbz5Anq7n1g==",
+      "version": "7.5.2",
+      "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.2.tgz",
+      "integrity": "sha512-7NyxrTE4Anh8km8iEy7o0QYPs+0JKBTj5ZaqHg6B39erLg0qYXN3BijtShwbsNSvQ+LN75+KV+C4QR/f6Gwnpg==",
       "inBundle": true,
-      "license": "ISC",
+      "license": "BlueOak-1.0.0",
       "dependencies": {
         "@isaacs/fs-minipass": "^4.0.0",
         "chownr": "^3.0.0",
@@ -14109,11 +14121,13 @@
       }
     },
     "node_modules/validate-npm-package-name": {
-      "version": "6.0.2",
+      "version": "7.0.0",
+      "resolved": "https://registry.npmjs.org/validate-npm-package-name/-/validate-npm-package-name-7.0.0.tgz",
+      "integrity": "sha512-bwVk/OK+Qu108aJcMAEiU4yavHUI7aN20TgZNBj9MR2iU1zPUl1Z1Otr7771ExfYTPTvfN8ZJ1pbr5Iklgt4xg==",
       "inBundle": true,
       "license": "ISC",
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^20.17.0 || >=22.9.0"
       }
     },
     "node_modules/vfile": {
@@ -14216,7 +14230,9 @@
       }
     },
     "node_modules/which": {
-      "version": "5.0.0",
+      "version": "6.0.0",
+      "resolved": "https://registry.npmjs.org/which/-/which-6.0.0.tgz",
+      "integrity": "sha512-f+gEpIKMR9faW/JgAgPK1D7mekkFoqbmiwvNzuhsHetni20QSgzg9Vhn0g2JSJkkfehQnqdUAx7/e15qS1lPxg==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -14226,7 +14242,7 @@
         "node-which": "bin/which.js"
       },
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^20.17.0 || >=22.9.0"
       }
     },
     "node_modules/which-boxed-primitive": {
@@ -14432,14 +14448,16 @@
       "license": "ISC"
     },
     "node_modules/write-file-atomic": {
-      "version": "6.0.0",
+      "version": "7.0.0",
+      "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-7.0.0.tgz",
+      "integrity": "sha512-YnlPC6JqnZl6aO4uRc+dx5PHguiR9S6WeoLtpxNT9wIG+BDya7ZNE1q7KOjVgaA73hKhKLpVPgJ5QA9THQ5BRg==",
       "license": "ISC",
       "dependencies": {
         "imurmurhash": "^0.1.4",
         "signal-exit": "^4.0.1"
       },
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^20.17.0 || >=22.9.0"
       }
     },
     "node_modules/ws": {
@@ -14559,12 +14577,12 @@
       "devDependencies": {
         "@npmcli/eslint-config": "^5.0.1",
         "@npmcli/mock-registry": "^1.0.0",
-        "@npmcli/promise-spawn": "^8.0.1",
+        "@npmcli/promise-spawn": "^9.0.0",
         "@npmcli/template-oss": "4.25.1",
         "proxy": "^2.1.1",
         "rimraf": "^6.0.1",
         "tap": "^16.3.8",
-        "which": "^5.0.0"
+        "which": "^6.0.0"
       },
       "engines": {
         "node": "^20.17.0 || >=22.9.0"
@@ -14577,16 +14595,16 @@
       "dependencies": {
         "@isaacs/string-locale-compare": "^1.1.0",
         "@npmcli/fs": "^4.0.0",
-        "@npmcli/installed-package-contents": "^3.0.0",
+        "@npmcli/installed-package-contents": "^4.0.0",
         "@npmcli/map-workspaces": "^5.0.0",
         "@npmcli/metavuln-calculator": "^9.0.2",
-        "@npmcli/name-from-folder": "^3.0.0",
-        "@npmcli/node-gyp": "^4.0.0",
+        "@npmcli/name-from-folder": "^4.0.0",
+        "@npmcli/node-gyp": "^5.0.0",
         "@npmcli/package-json": "^7.0.0",
         "@npmcli/query": "^4.0.0",
-        "@npmcli/redact": "^3.0.0",
+        "@npmcli/redact": "^4.0.0",
         "@npmcli/run-script": "^10.0.0",
-        "bin-links": "^5.0.0",
+        "bin-links": "^6.0.0",
         "cacache": "^20.0.1",
         "common-ancestor-path": "^1.0.1",
         "hosted-git-info": "^9.0.0",
@@ -14594,18 +14612,18 @@
         "lru-cache": "^11.2.1",
         "minimatch": "^10.0.3",
         "nopt": "^8.0.0",
-        "npm-install-checks": "^7.1.0",
+        "npm-install-checks": "^8.0.0",
         "npm-package-arg": "^13.0.0",
         "npm-pick-manifest": "^11.0.1",
         "npm-registry-fetch": "^19.0.0",
         "pacote": "^21.0.2",
-        "parse-conflict-json": "^4.0.0",
-        "proc-log": "^5.0.0",
+        "parse-conflict-json": "^5.0.1",
+        "proc-log": "^6.0.0",
         "proggy": "^3.0.0",
         "promise-all-reject-late": "^1.0.0",
         "promise-call-limit": "^3.0.1",
         "semver": "^7.3.7",
-        "ssri": "^12.0.0",
+        "ssri": "^13.0.0",
         "treeverse": "^3.0.0",
         "walk-up-path": "^4.0.0"
       },
@@ -14627,6 +14645,30 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
+    "workspaces/arborist/node_modules/nopt": {
+      "version": "8.1.0",
+      "resolved": "https://registry.npmjs.org/nopt/-/nopt-8.1.0.tgz",
+      "integrity": "sha512-ieGu42u/Qsa4TFktmaKEwM6MQH0pOWnaB3htzh0JRtx84+Mebc0cbZYN5bC+6WTZ4+77xrL9Pn5m7CV6VIkV7A==",
+      "license": "ISC",
+      "dependencies": {
+        "abbrev": "^3.0.0"
+      },
+      "bin": {
+        "nopt": "bin/nopt.js"
+      },
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
+    "workspaces/arborist/node_modules/nopt/node_modules/abbrev": {
+      "version": "3.0.1",
+      "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-3.0.1.tgz",
+      "integrity": "sha512-AO2ac6pjRB3SJmGJo+v5/aK6Omggp6fsLrs6wN9bd35ulu4cCwaAU9+7ZhXjeqHVkaHThLuzH0nZr0YpCDhygg==",
+      "license": "ISC",
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
     "workspaces/config": {
       "name": "@npmcli/config",
       "version": "10.4.2",
@@ -14635,9 +14677,9 @@
         "@npmcli/map-workspaces": "^5.0.0",
         "@npmcli/package-json": "^7.0.0",
         "ci-info": "^4.0.0",
-        "ini": "^5.0.0",
+        "ini": "^6.0.0",
         "nopt": "^8.1.0",
-        "proc-log": "^5.0.0",
+        "proc-log": "^6.0.0",
         "semver": "^7.3.5",
         "walk-up-path": "^4.0.0"
       },
@@ -14651,6 +14693,30 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
+    "workspaces/config/node_modules/nopt": {
+      "version": "8.1.0",
+      "resolved": "https://registry.npmjs.org/nopt/-/nopt-8.1.0.tgz",
+      "integrity": "sha512-ieGu42u/Qsa4TFktmaKEwM6MQH0pOWnaB3htzh0JRtx84+Mebc0cbZYN5bC+6WTZ4+77xrL9Pn5m7CV6VIkV7A==",
+      "license": "ISC",
+      "dependencies": {
+        "abbrev": "^3.0.0"
+      },
+      "bin": {
+        "nopt": "bin/nopt.js"
+      },
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
+    "workspaces/config/node_modules/nopt/node_modules/abbrev": {
+      "version": "3.0.1",
+      "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-3.0.1.tgz",
+      "integrity": "sha512-AO2ac6pjRB3SJmGJo+v5/aK6Omggp6fsLrs6wN9bd35ulu4cCwaAU9+7ZhXjeqHVkaHThLuzH0nZr0YpCDhygg==",
+      "license": "ISC",
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
     "workspaces/libnpmaccess": {
       "version": "10.0.3",
       "license": "ISC",
@@ -14690,6 +14756,43 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
+    "workspaces/libnpmdiff/node_modules/@npmcli/installed-package-contents": {
+      "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/@npmcli/installed-package-contents/-/installed-package-contents-3.0.0.tgz",
+      "integrity": "sha512-fkxoPuFGvxyrH+OQzyTkX2LUEamrF4jZSmxjAtPPHHGO0dqsQ8tTKjnIS8SAnPHdk2I03BDtSMR5K/4loKg79Q==",
+      "license": "ISC",
+      "dependencies": {
+        "npm-bundled": "^4.0.0",
+        "npm-normalize-package-bin": "^4.0.0"
+      },
+      "bin": {
+        "installed-package-contents": "bin/index.js"
+      },
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
+    "workspaces/libnpmdiff/node_modules/npm-bundled": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-4.0.0.tgz",
+      "integrity": "sha512-IxaQZDMsqfQ2Lz37VvyyEtKLe8FsRZuysmedy/N06TU1RyVppYKXrO4xIhR0F+7ubIBox6Q7nir6fQI3ej39iA==",
+      "license": "ISC",
+      "dependencies": {
+        "npm-normalize-package-bin": "^4.0.0"
+      },
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
+    "workspaces/libnpmdiff/node_modules/npm-normalize-package-bin": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-4.0.0.tgz",
+      "integrity": "sha512-TZKxPvItzai9kN9H/TkmCtx/ZN/hvr3vUycjlfmH0ootY9yFBzNOpiXAdIn1Iteqsvk4lQn6B5PTrt+n6h8k/w==",
+      "license": "ISC",
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
     "workspaces/libnpmexec": {
       "version": "10.1.8",
       "license": "ISC",
@@ -14700,7 +14803,7 @@
         "ci-info": "^4.0.0",
         "npm-package-arg": "^13.0.0",
         "pacote": "^21.0.2",
-        "proc-log": "^5.0.0",
+        "proc-log": "^6.0.0",
         "promise-retry": "^2.0.1",
         "read": "^4.0.0",
         "semver": "^7.3.7",
@@ -14711,7 +14814,7 @@
         "@npmcli/eslint-config": "^5.0.1",
         "@npmcli/mock-registry": "^1.0.0",
         "@npmcli/template-oss": "4.25.1",
-        "bin-links": "^5.0.0",
+        "bin-links": "^6.0.0",
         "chalk": "^5.2.0",
         "just-extend": "^6.2.0",
         "just-safe-set": "^4.2.1",
@@ -14782,10 +14885,10 @@
         "ci-info": "^4.0.0",
         "npm-package-arg": "^13.0.0",
         "npm-registry-fetch": "^19.0.0",
-        "proc-log": "^5.0.0",
+        "proc-log": "^6.0.0",
         "semver": "^7.3.7",
         "sigstore": "^4.0.0",
-        "ssri": "^12.0.0"
+        "ssri": "^13.0.0"
       },
       "devDependencies": {
         "@npmcli/eslint-config": "^5.0.1",
@@ -14837,8 +14940,8 @@
       "dependencies": {
         "@npmcli/git": "^7.0.0",
         "@npmcli/run-script": "^10.0.0",
-        "json-parse-even-better-errors": "^4.0.0",
-        "proc-log": "^5.0.0",
+        "json-parse-even-better-errors": "^5.0.0",
+        "proc-log": "^6.0.0",
         "semver": "^7.3.7"
       },
       "devDependencies": {
diff --git a/package.json b/package.json
index ba3441726b01c..c4032ae3cd05b 100644
--- a/package.json
+++ b/package.json
@@ -55,13 +55,14 @@
     "@npmcli/arborist": "^9.1.6",
     "@npmcli/config": "^10.4.2",
     "@npmcli/fs": "^4.0.0",
-    "@npmcli/map-workspaces": "^5.0.0",
-    "@npmcli/package-json": "^7.0.1",
-    "@npmcli/promise-spawn": "^8.0.3",
-    "@npmcli/redact": "^3.2.2",
-    "@npmcli/run-script": "^10.0.0",
+    "@npmcli/map-workspaces": "^5.0.1",
+    "@npmcli/metavuln-calculator": "^9.0.3",
+    "@npmcli/package-json": "^7.0.2",
+    "@npmcli/promise-spawn": "^9.0.1",
+    "@npmcli/redact": "^4.0.0",
+    "@npmcli/run-script": "^10.0.3",
     "@sigstore/tuf": "^4.0.0",
-    "abbrev": "^3.0.1",
+    "abbrev": "^4.0.0",
     "archy": "~1.0.0",
     "cacache": "^20.0.1",
     "chalk": "^5.6.2",
@@ -72,10 +73,10 @@
     "glob": "^11.0.3",
     "graceful-fs": "^4.2.11",
     "hosted-git-info": "^9.0.2",
-    "ini": "^5.0.0",
+    "ini": "^6.0.0",
     "init-package-json": "^8.2.2",
     "is-cidr": "^6.0.1",
-    "json-parse-even-better-errors": "^4.0.0",
+    "json-parse-even-better-errors": "^5.0.0",
     "libnpmaccess": "^10.0.3",
     "libnpmdiff": "^8.0.9",
     "libnpmexec": "^10.1.8",
@@ -86,36 +87,36 @@
     "libnpmsearch": "^9.0.1",
     "libnpmteam": "^8.0.2",
     "libnpmversion": "^8.0.2",
-    "make-fetch-happen": "^15.0.2",
-    "minimatch": "^10.0.3",
+    "make-fetch-happen": "^15.0.3",
+    "minimatch": "^10.1.1",
     "minipass": "^7.1.1",
     "minipass-pipeline": "^1.2.4",
     "ms": "^2.1.2",
-    "node-gyp": "^11.4.2",
-    "nopt": "^8.1.0",
+    "node-gyp": "^12.1.0",
+    "nopt": "^9.0.0",
     "npm-audit-report": "^6.0.0",
-    "npm-install-checks": "^7.1.2",
-    "npm-package-arg": "^13.0.1",
-    "npm-pick-manifest": "^11.0.1",
-    "npm-profile": "^12.0.0",
-    "npm-registry-fetch": "^19.0.0",
+    "npm-install-checks": "^8.0.0",
+    "npm-package-arg": "^13.0.2",
+    "npm-pick-manifest": "^11.0.3",
+    "npm-profile": "^12.0.1",
+    "npm-registry-fetch": "^19.1.1",
     "npm-user-validate": "^3.0.0",
     "p-map": "^7.0.3",
-    "pacote": "^21.0.3",
-    "parse-conflict-json": "^4.0.0",
-    "proc-log": "^5.0.0",
+    "pacote": "^21.0.4",
+    "parse-conflict-json": "^5.0.1",
+    "proc-log": "^6.0.0",
     "qrcode-terminal": "^0.12.0",
     "read": "^4.1.0",
     "semver": "^7.7.3",
     "spdx-expression-parse": "^4.0.0",
-    "ssri": "^12.0.0",
+    "ssri": "^13.0.0",
     "supports-color": "^10.2.2",
-    "tar": "^7.5.1",
+    "tar": "^7.5.2",
     "text-table": "~0.2.0",
     "tiny-relative-date": "^2.0.2",
     "treeverse": "^3.0.0",
-    "validate-npm-package-name": "^6.0.2",
-    "which": "^5.0.0"
+    "validate-npm-package-name": "^7.0.0",
+    "which": "^6.0.0"
   },
   "bundleDependencies": [
     "@isaacs/string-locale-compare",
@@ -123,6 +124,7 @@
     "@npmcli/config",
     "@npmcli/fs",
     "@npmcli/map-workspaces",
+    "@npmcli/metavuln-calculator",
     "@npmcli/package-json",
     "@npmcli/promise-spawn",
     "@npmcli/redact",
@@ -187,7 +189,7 @@
   "devDependencies": {
     "@npmcli/docs": "^1.0.0",
     "@npmcli/eslint-config": "^5.1.0",
-    "@npmcli/git": "^7.0.0",
+    "@npmcli/git": "^7.0.1",
     "@npmcli/mock-globals": "^1.0.0",
     "@npmcli/mock-registry": "^1.0.0",
     "@npmcli/template-oss": "4.25.1",
@@ -198,7 +200,7 @@
     "cli-table3": "^0.6.4",
     "diff": "^8.0.2",
     "nock": "^13.4.0",
-    "npm-packlist": "^10.0.2",
+    "npm-packlist": "^10.0.3",
     "remark": "^15.0.1",
     "remark-gfm": "^4.0.1",
     "remark-github": "^12.0.0",
diff --git a/smoke-tests/package.json b/smoke-tests/package.json
index 11d61b66a53d1..02e7f5e1f988e 100644
--- a/smoke-tests/package.json
+++ b/smoke-tests/package.json
@@ -21,12 +21,12 @@
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.1",
     "@npmcli/mock-registry": "^1.0.0",
-    "@npmcli/promise-spawn": "^8.0.1",
+    "@npmcli/promise-spawn": "^9.0.0",
     "@npmcli/template-oss": "4.25.1",
     "proxy": "^2.1.1",
     "rimraf": "^6.0.1",
     "tap": "^16.3.8",
-    "which": "^5.0.0"
+    "which": "^6.0.0"
   },
   "author": "GitHub Inc.",
   "license": "ISC",
diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json
index ed00181eceaec..7a7cea6dee2c5 100644
--- a/workspaces/arborist/package.json
+++ b/workspaces/arborist/package.json
@@ -5,16 +5,16 @@
   "dependencies": {
     "@isaacs/string-locale-compare": "^1.1.0",
     "@npmcli/fs": "^4.0.0",
-    "@npmcli/installed-package-contents": "^3.0.0",
+    "@npmcli/installed-package-contents": "^4.0.0",
     "@npmcli/map-workspaces": "^5.0.0",
     "@npmcli/metavuln-calculator": "^9.0.2",
-    "@npmcli/name-from-folder": "^3.0.0",
-    "@npmcli/node-gyp": "^4.0.0",
+    "@npmcli/name-from-folder": "^4.0.0",
+    "@npmcli/node-gyp": "^5.0.0",
     "@npmcli/package-json": "^7.0.0",
     "@npmcli/query": "^4.0.0",
-    "@npmcli/redact": "^3.0.0",
+    "@npmcli/redact": "^4.0.0",
     "@npmcli/run-script": "^10.0.0",
-    "bin-links": "^5.0.0",
+    "bin-links": "^6.0.0",
     "cacache": "^20.0.1",
     "common-ancestor-path": "^1.0.1",
     "hosted-git-info": "^9.0.0",
@@ -22,18 +22,18 @@
     "lru-cache": "^11.2.1",
     "minimatch": "^10.0.3",
     "nopt": "^8.0.0",
-    "npm-install-checks": "^7.1.0",
+    "npm-install-checks": "^8.0.0",
     "npm-package-arg": "^13.0.0",
     "npm-pick-manifest": "^11.0.1",
     "npm-registry-fetch": "^19.0.0",
     "pacote": "^21.0.2",
-    "parse-conflict-json": "^4.0.0",
-    "proc-log": "^5.0.0",
+    "parse-conflict-json": "^5.0.1",
+    "proc-log": "^6.0.0",
     "proggy": "^3.0.0",
     "promise-all-reject-late": "^1.0.0",
     "promise-call-limit": "^3.0.1",
     "semver": "^7.3.7",
-    "ssri": "^12.0.0",
+    "ssri": "^13.0.0",
     "treeverse": "^3.0.0",
     "walk-up-path": "^4.0.0"
   },
diff --git a/workspaces/config/package.json b/workspaces/config/package.json
index 651e2135893f4..0c046f6a9bc39 100644
--- a/workspaces/config/package.json
+++ b/workspaces/config/package.json
@@ -40,9 +40,9 @@
     "@npmcli/map-workspaces": "^5.0.0",
     "@npmcli/package-json": "^7.0.0",
     "ci-info": "^4.0.0",
-    "ini": "^5.0.0",
+    "ini": "^6.0.0",
     "nopt": "^8.1.0",
-    "proc-log": "^5.0.0",
+    "proc-log": "^6.0.0",
     "semver": "^7.3.5",
     "walk-up-path": "^4.0.0"
   },
diff --git a/workspaces/libnpmexec/package.json b/workspaces/libnpmexec/package.json
index d06081ce21a60..0f459e28942e9 100644
--- a/workspaces/libnpmexec/package.json
+++ b/workspaces/libnpmexec/package.json
@@ -53,7 +53,7 @@
     "@npmcli/eslint-config": "^5.0.1",
     "@npmcli/mock-registry": "^1.0.0",
     "@npmcli/template-oss": "4.25.1",
-    "bin-links": "^5.0.0",
+    "bin-links": "^6.0.0",
     "chalk": "^5.2.0",
     "just-extend": "^6.2.0",
     "just-safe-set": "^4.2.1",
@@ -66,7 +66,7 @@
     "ci-info": "^4.0.0",
     "npm-package-arg": "^13.0.0",
     "pacote": "^21.0.2",
-    "proc-log": "^5.0.0",
+    "proc-log": "^6.0.0",
     "promise-retry": "^2.0.1",
     "read": "^4.0.0",
     "semver": "^7.3.7",
diff --git a/workspaces/libnpmpublish/package.json b/workspaces/libnpmpublish/package.json
index d9f00aaffac6c..a76f41da44bb4 100644
--- a/workspaces/libnpmpublish/package.json
+++ b/workspaces/libnpmpublish/package.json
@@ -42,10 +42,10 @@
     "ci-info": "^4.0.0",
     "npm-package-arg": "^13.0.0",
     "npm-registry-fetch": "^19.0.0",
-    "proc-log": "^5.0.0",
+    "proc-log": "^6.0.0",
     "semver": "^7.3.7",
     "sigstore": "^4.0.0",
-    "ssri": "^12.0.0"
+    "ssri": "^13.0.0"
   },
   "engines": {
     "node": "^20.17.0 || >=22.9.0"
diff --git a/workspaces/libnpmversion/package.json b/workspaces/libnpmversion/package.json
index db1538b5721cc..61e7c6412a2c1 100644
--- a/workspaces/libnpmversion/package.json
+++ b/workspaces/libnpmversion/package.json
@@ -40,8 +40,8 @@
   "dependencies": {
     "@npmcli/git": "^7.0.0",
     "@npmcli/run-script": "^10.0.0",
-    "json-parse-even-better-errors": "^4.0.0",
-    "proc-log": "^5.0.0",
+    "json-parse-even-better-errors": "^5.0.0",
+    "proc-log": "^6.0.0",
     "semver": "^7.3.7"
   },
   "engines": {